gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
package at.ac.tuwien.dsg.pubsub.middleware.comp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import at.ac.tuwien.dsg.myx.fw.MyxJavaClassInitPropertiesInterfaceDescription;
import at.ac.tuwien.dsg.myx.monitor.AbstractMyxMonitoringRuntimeAdapter;
import at.ac.tuwien.dsg.myx.monitor.MyxProperties;
import at.ac.tuwien.dsg.myx.monitor.em.events.XADLElementType;
import at.ac.tuwien.dsg.myx.util.IdGenerator;
import at.ac.tuwien.dsg.myx.util.MyxUtils;
import at.ac.tuwien.dsg.pubsub.middleware.interfaces.IMyxRuntimeAdapter;
import at.ac.tuwien.dsg.pubsub.middleware.myx.DynamicArchitectureModelProperties;
import at.ac.tuwien.dsg.pubsub.middleware.myx.MessageDistributor;
import at.ac.tuwien.dsg.pubsub.middleware.myx.MyxInterfaceNames;
import edu.uci.isr.myx.fw.EMyxInterfaceDirection;
import edu.uci.isr.myx.fw.IMyxBrickDescription;
import edu.uci.isr.myx.fw.IMyxInterfaceDescription;
import edu.uci.isr.myx.fw.IMyxName;
import edu.uci.isr.myx.fw.IMyxWeld;
import edu.uci.isr.myx.fw.MyxBrickCreationException;
import edu.uci.isr.myx.fw.MyxBrickLoadException;
import edu.uci.isr.myx.fw.MyxJavaClassBrickDescription;
public class MyxRuntimeAdapter extends AbstractMyxMonitoringRuntimeAdapter implements IMyxRuntimeAdapter {
public static final IMyxName IN_IMYX_ADAPTER = MyxInterfaceNames.IMYX_ADAPTER;
public static final IMyxName[] PATH = null;
protected IMyxName messageDistributorRuntimeId = null;
protected Map<IMyxName, List<IMyxName>> component2Interfaces = new HashMap<>();
protected Map<IMyxName, List<IMyxWeld>> component2Welds = new HashMap<>();
@Override
public Object getServiceObject(IMyxName interfaceName) {
if (interfaceName.equals(IN_IMYX_ADAPTER)) {
return this;
}
return null;
}
/**
* Get the runtime id of the {@link MessageDistributor}.
*
* @return
*/
protected void fetchMessageDistributorRuntimeId() {
if (messageDistributorRuntimeId == null) {
synchronized (this) {
for (IMyxName brickName : getMyxRuntime().getAllBrickNames(null)) {
if (brickName.getName().startsWith(
DynamicArchitectureModelProperties.MESSAGE_DISTRIBUTOR_BLUEPRINT_ID)) {
messageDistributorRuntimeId = brickName;
return;
}
}
throw new RuntimeException("No MessageDistributor found");
}
}
}
/**
* Remove a component from the architecture.
*
* @param brickName
*/
protected void removeComponent(IMyxName brickName) {
synchronized (this) {
if (component2Interfaces.containsKey(brickName)) {
// call the specific myx methods
getMyxRuntime().end(PATH, brickName);
getMyxRuntime().destroy(PATH, brickName);
// remove welds
for (IMyxWeld weld : component2Welds.get(brickName)) {
try {
getMyxRuntime().removeWeld(weld);
} catch (Exception e) {
}
}
// remove interfaces
for (IMyxName interfaceName : component2Interfaces.get(brickName)) {
try {
getMyxRuntime().removeInterface(PATH, brickName, interfaceName);
} catch (Exception e) {
}
}
// remove the brick itself
getMyxRuntime().removeBrick(PATH, brickName);
// remove the brick from all collections
component2Interfaces.remove(brickName);
component2Welds.remove(brickName);
}
}
}
@Override
public void createPublisherEndpoint(String publisherEndpointClassName, Dispatcher<?> dispatcher) {
fetchMessageDistributorRuntimeId();
try {
Class<?> pubEndClass = Class.forName(publisherEndpointClassName);
if (!PublisherEndpoint.class.isAssignableFrom(pubEndClass)) {
throw new RuntimeException("Class " + publisherEndpointClassName + " is not a subclass of "
+ PublisherEndpoint.class.getName());
}
} catch (ClassNotFoundException e1) {
throw new RuntimeException("Class " + publisherEndpointClassName + " does not exist", e1);
}
// class description
Properties initProps = new Properties();
initProps.put(MyxProperties.ARCHITECTURE_BLUEPRINT_ID,
DynamicArchitectureModelProperties.PUBLISHER_ENDPOINT_BLUEPRINT_ID);
initProps.put(MyxProperties.ARCHITECTURE_BRICK_TYPE, XADLElementType.CONNECTOR);
IMyxBrickDescription publisherEndpointDesc = new MyxJavaClassBrickDescription(initProps,
publisherEndpointClassName);
// interface descriptions
Properties dispatcherInitProps = new Properties();
dispatcherInitProps.put(MyxProperties.ARCHITECTURE_INTERFACE_TYPE,
DynamicArchitectureModelProperties.DISPATCHER_INTERFACE_TYPE);
IMyxInterfaceDescription dispatcherDesc = new MyxJavaClassInitPropertiesInterfaceDescription(
new String[] { MyxInterfaceNames.IDISPATCHER.getName() }, dispatcherInitProps);
Properties subscriberInitProps = new Properties();
subscriberInitProps.put(MyxProperties.ARCHITECTURE_INTERFACE_TYPE,
DynamicArchitectureModelProperties.SUBSCRIBER_INTERFACE_TYPE);
IMyxInterfaceDescription subscriberDesc = new MyxJavaClassInitPropertiesInterfaceDescription(
new String[] { MyxInterfaceNames.ISUBSCRIBER.getName() }, subscriberInitProps);
Properties myxAdapterInitProps = new Properties();
myxAdapterInitProps.put(MyxProperties.ARCHITECTURE_INTERFACE_TYPE,
DynamicArchitectureModelProperties.MYX_ADAPTER_INTERFACE_TYPE);
IMyxInterfaceDescription myxAdapterDesc = new MyxJavaClassInitPropertiesInterfaceDescription(
new String[] { MyxInterfaceNames.IMYX_ADAPTER.getName() }, myxAdapterInitProps);
// external interface description
Properties virtualExternalInterfaceInitProps = new Properties();
virtualExternalInterfaceInitProps.put(MyxProperties.ARCHITECTURE_INTERFACE_TYPE,
DynamicArchitectureModelProperties.PUBLISHER_ENDPOINT_VIRTUAL_EXTERNAL_INTERFACE_TYPE);
IMyxInterfaceDescription virtualExternalInterfaceDesc = new MyxJavaClassInitPropertiesInterfaceDescription(
new String[0], virtualExternalInterfaceInitProps);
// create name
IMyxName publisherEndpoint = MyxUtils.createName(IdGenerator
.generateRuntimeInstantiationId(DynamicArchitectureModelProperties.PUBLISHER_ENDPOINT_BLUEPRINT_ID));
// add the bricks
try {
getMyxRuntime().addBrick(PATH, publisherEndpoint, publisherEndpointDesc);
component2Interfaces.put(publisherEndpoint, new ArrayList<IMyxName>());
component2Welds.put(publisherEndpoint, new ArrayList<IMyxWeld>());
} catch (MyxBrickLoadException | MyxBrickCreationException e) {
throw new RuntimeException("Could not load brick", e);
}
// add interfaces to the component
getMyxRuntime().addInterface(PATH, publisherEndpoint, MyxInterfaceNames.IDISPATCHER, dispatcherDesc,
EMyxInterfaceDirection.OUT);
component2Interfaces.get(publisherEndpoint).add(MyxInterfaceNames.IDISPATCHER);
getMyxRuntime().addInterface(PATH, publisherEndpoint, MyxInterfaceNames.ISUBSCRIBER, subscriberDesc,
EMyxInterfaceDirection.OUT);
component2Interfaces.get(publisherEndpoint).add(MyxInterfaceNames.ISUBSCRIBER);
getMyxRuntime().addInterface(PATH, publisherEndpoint, MyxInterfaceNames.IMYX_ADAPTER, myxAdapterDesc,
EMyxInterfaceDirection.OUT);
component2Interfaces.get(publisherEndpoint).add(MyxInterfaceNames.IMYX_ADAPTER);
// external interface
getMyxRuntime().addInterface(PATH, publisherEndpoint, MyxInterfaceNames.VIRTUAL_PUBLISHER_ENDPOINT,
virtualExternalInterfaceDesc, EMyxInterfaceDirection.IN);
component2Interfaces.get(publisherEndpoint).add(MyxInterfaceNames.VIRTUAL_PUBLISHER_ENDPOINT);
// init
getMyxRuntime().init(PATH, publisherEndpoint);
// wire up the endpoint
IMyxWeld pe2d = getMyxRuntime().createWeld(PATH, publisherEndpoint, MyxInterfaceNames.IDISPATCHER, PATH,
MyxUtils.getName(dispatcher), MyxInterfaceNames.IDISPATCHER);
getMyxRuntime().addWeld(pe2d);
component2Welds.get(publisherEndpoint).add(pe2d);
IMyxWeld pe2md = getMyxRuntime().createWeld(PATH, publisherEndpoint, MyxInterfaceNames.ISUBSCRIBER, PATH,
messageDistributorRuntimeId, MyxUtils.createName("in"));
getMyxRuntime().addWeld(pe2md);
component2Welds.get(publisherEndpoint).add(pe2md);
IMyxWeld pe2myx = getMyxRuntime().createWeld(PATH, publisherEndpoint, MyxInterfaceNames.IMYX_ADAPTER, PATH,
MyxUtils.getName(this), MyxInterfaceNames.IMYX_ADAPTER);
getMyxRuntime().addWeld(pe2myx);
component2Welds.get(publisherEndpoint).add(pe2myx);
// begin
getMyxRuntime().begin(PATH, publisherEndpoint);
}
@Override
public void shutdownPublisherEndpoint(PublisherEndpoint<?> endpoint) {
removeComponent(MyxUtils.getName(endpoint));
}
@Override
public void createSubscriberEndpoint(String subscriberEndpointClassName, Dispatcher<?> dispatcher) {
fetchMessageDistributorRuntimeId();
try {
Class<?> subEndClass = Class.forName(subscriberEndpointClassName);
if (!SubscriberEndpoint.class.isAssignableFrom(subEndClass)) {
throw new RuntimeException("Class " + subscriberEndpointClassName + " is not a subclass of "
+ SubscriberEndpoint.class.getName());
}
} catch (ClassNotFoundException e1) {
throw new RuntimeException("Class " + subscriberEndpointClassName + " does not exist", e1);
}
// class description
Properties initProps = new Properties();
initProps.put(MyxProperties.ARCHITECTURE_BLUEPRINT_ID,
DynamicArchitectureModelProperties.SUBSCRIBER_ENDPOINT_BLUEPRINT_ID);
initProps.put(MyxProperties.ARCHITECTURE_BRICK_TYPE, XADLElementType.CONNECTOR);
IMyxBrickDescription subscriberEndpointDesc = new MyxJavaClassBrickDescription(initProps,
subscriberEndpointClassName);
// interface descriptions
Properties dispatcherInitProps = new Properties();
dispatcherInitProps.put(MyxProperties.ARCHITECTURE_INTERFACE_TYPE,
DynamicArchitectureModelProperties.DISPATCHER_INTERFACE_TYPE);
IMyxInterfaceDescription dispatcherDesc = new MyxJavaClassInitPropertiesInterfaceDescription(
new String[] { MyxInterfaceNames.IDISPATCHER.getName() }, dispatcherInitProps);
Properties subscriberInitProps = new Properties();
subscriberInitProps.put(MyxProperties.ARCHITECTURE_INTERFACE_TYPE,
DynamicArchitectureModelProperties.SUBSCRIBER_INTERFACE_TYPE);
IMyxInterfaceDescription subscriberDesc = new MyxJavaClassInitPropertiesInterfaceDescription(
new String[] { MyxInterfaceNames.ISUBSCRIBER.getName() }, subscriberInitProps);
Properties myxAdapterInitProps = new Properties();
myxAdapterInitProps.put(MyxProperties.ARCHITECTURE_INTERFACE_TYPE,
DynamicArchitectureModelProperties.MYX_ADAPTER_INTERFACE_TYPE);
IMyxInterfaceDescription myxAdapterDesc = new MyxJavaClassInitPropertiesInterfaceDescription(
new String[] { MyxInterfaceNames.IMYX_ADAPTER.getName() }, myxAdapterInitProps);
// external interface description
Properties virtualExternalInterfaceInitProps = new Properties();
virtualExternalInterfaceInitProps.put(MyxProperties.ARCHITECTURE_INTERFACE_TYPE,
DynamicArchitectureModelProperties.SUBSCRIBER_ENDPOINT_VIRTUAL_EXTERNAL_INTERFACE_TYPE);
IMyxInterfaceDescription virtualExternalInterfaceDesc = new MyxJavaClassInitPropertiesInterfaceDescription(
new String[0], virtualExternalInterfaceInitProps);
// create name
IMyxName subscriberEndpoint = MyxUtils.createName(IdGenerator
.generateRuntimeInstantiationId(DynamicArchitectureModelProperties.SUBSCRIBER_ENDPOINT_BLUEPRINT_ID));
// add the bricks
try {
getMyxRuntime().addBrick(PATH, subscriberEndpoint, subscriberEndpointDesc);
component2Interfaces.put(subscriberEndpoint, new ArrayList<IMyxName>());
component2Welds.put(subscriberEndpoint, new ArrayList<IMyxWeld>());
} catch (MyxBrickLoadException | MyxBrickCreationException e) {
throw new RuntimeException("Could not load brick", e);
}
// add interfaces to components
getMyxRuntime().addInterface(PATH, subscriberEndpoint, MyxInterfaceNames.IDISPATCHER, dispatcherDesc,
EMyxInterfaceDirection.OUT);
component2Interfaces.get(subscriberEndpoint).add(MyxInterfaceNames.IDISPATCHER);
getMyxRuntime().addInterface(PATH, subscriberEndpoint, MyxInterfaceNames.ISUBSCRIBER, subscriberDesc,
EMyxInterfaceDirection.IN);
component2Interfaces.get(subscriberEndpoint).add(MyxInterfaceNames.ISUBSCRIBER);
getMyxRuntime().addInterface(PATH, subscriberEndpoint, MyxInterfaceNames.IMYX_ADAPTER, myxAdapterDesc,
EMyxInterfaceDirection.OUT);
component2Interfaces.get(subscriberEndpoint).add(MyxInterfaceNames.IMYX_ADAPTER);
// external interface
getMyxRuntime().addInterface(PATH, subscriberEndpoint, MyxInterfaceNames.VIRTUAL_SUBSCRIBER_ENDPOINT,
virtualExternalInterfaceDesc, EMyxInterfaceDirection.OUT);
component2Interfaces.get(subscriberEndpoint).add(MyxInterfaceNames.VIRTUAL_SUBSCRIBER_ENDPOINT);
// init
getMyxRuntime().init(PATH, subscriberEndpoint);
// wire up the endpoint
IMyxWeld se2d = getMyxRuntime().createWeld(PATH, subscriberEndpoint, MyxInterfaceNames.IDISPATCHER, PATH,
MyxUtils.getName(dispatcher), MyxInterfaceNames.IDISPATCHER);
getMyxRuntime().addWeld(se2d);
component2Welds.get(subscriberEndpoint).add(se2d);
IMyxWeld se2myx = getMyxRuntime().createWeld(PATH, subscriberEndpoint, MyxInterfaceNames.IMYX_ADAPTER, PATH,
MyxUtils.getName(this), MyxInterfaceNames.IMYX_ADAPTER);
getMyxRuntime().addWeld(se2myx);
component2Welds.get(subscriberEndpoint).add(se2myx);
// begin
getMyxRuntime().begin(PATH, subscriberEndpoint);
}
@Override
public void wireSubscriberEndpoint(SubscriberEndpoint<?> subscriber) {
IMyxName subscriberEndpoint = MyxUtils.getName(subscriber);
IMyxWeld md2se = getMyxRuntime().createWeld(PATH, messageDistributorRuntimeId,
MyxUtils.createName("out"), PATH, subscriberEndpoint, MyxInterfaceNames.ISUBSCRIBER);
getMyxRuntime().addWeld(md2se);
component2Welds.get(subscriberEndpoint).add(md2se);
}
@Override
public void shutdownSubscriberEndpoint(SubscriberEndpoint<?> endpoint) {
removeComponent(MyxUtils.getName(endpoint));
}
}
|
|
package org.onosproject.net.intent;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import org.onlab.packet.MplsLabel;
import org.onosproject.core.ApplicationId;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import com.google.common.base.MoreObjects;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Abstraction of MPLS label-switched connectivity.
*/
public class MplsIntent extends ConnectivityIntent {
private final ConnectPoint ingressPoint;
private final Optional<MplsLabel> ingressLabel;
private final ConnectPoint egressPoint;
private final Optional<MplsLabel> egressLabel;
/**
* Creates a new point-to-point intent with the supplied ingress/egress
* ports, labels and constraints.
*
* @param appId application identifier
* @param selector traffic selector
* @param treatment treatment
* @param ingressPoint ingress port
* @param ingressLabel ingress MPLS label
* @param egressPoint egress port
* @param egressLabel egress MPLS label
* @param constraints optional list of constraints
* @param priority priority to use for flows generated by this intent
* @throws NullPointerException if {@code ingressPoint} or {@code egressPoints} is null.
*/
private MplsIntent(ApplicationId appId,
Key key,
TrafficSelector selector,
TrafficTreatment treatment,
ConnectPoint ingressPoint,
Optional<MplsLabel> ingressLabel,
ConnectPoint egressPoint,
Optional<MplsLabel> egressLabel,
List<Constraint> constraints,
int priority) {
super(appId, key, Collections.emptyList(), selector, treatment, constraints,
priority);
this.ingressPoint = checkNotNull(ingressPoint);
this.ingressLabel = checkNotNull(ingressLabel);
this.egressPoint = checkNotNull(egressPoint);
this.egressLabel = checkNotNull(egressLabel);
checkArgument(!ingressPoint.equals(egressPoint),
"ingress and egress should be different (ingress: %s, egress: %s)",
ingressPoint, egressPoint);
}
/**
* Returns a new MPLS intent builder. The application id,
* ingress point, egress point, ingress label and egress label are
* required fields. If they are not set by calls to the appropriate
* methods, an exception will be thrown.
*
* @return point to point builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Builder of an MPLS intent.
*/
public static final class Builder extends ConnectivityIntent.Builder {
ConnectPoint ingressPoint;
ConnectPoint egressPoint;
Optional<MplsLabel> ingressLabel;
Optional<MplsLabel> egressLabel;
private Builder() {
// Hide constructor
}
@Override
public Builder appId(ApplicationId appId) {
return (Builder) super.appId(appId);
}
@Override
public Builder key(Key key) {
return (Builder) super.key(key);
}
@Override
public Builder selector(TrafficSelector selector) {
return (Builder) super.selector(selector);
}
@Override
public Builder treatment(TrafficTreatment treatment) {
return (Builder) super.treatment(treatment);
}
@Override
public Builder constraints(List<Constraint> constraints) {
return (Builder) super.constraints(constraints);
}
@Override
public Builder priority(int priority) {
return (Builder) super.priority(priority);
}
/**
* Sets the ingress point of the point to point intent that will be built.
*
* @param ingressPoint ingress connect point
* @return this builder
*/
public Builder ingressPoint(ConnectPoint ingressPoint) {
this.ingressPoint = ingressPoint;
return this;
}
/**
* Sets the egress point of the point to point intent that will be built.
*
* @param egressPoint egress connect point
* @return this builder
*/
public Builder egressPoint(ConnectPoint egressPoint) {
this.egressPoint = egressPoint;
return this;
}
/**
* Sets the ingress label of the intent that will be built.
*
* @param ingressLabel ingress label
* @return this builder
*/
public Builder ingressLabel(Optional<MplsLabel> ingressLabel) {
this.ingressLabel = ingressLabel;
return this;
}
/**
* Sets the ingress label of the intent that will be built.
*
* @param egressLabel ingress label
* @return this builder
*/
public Builder egressLabel(Optional<MplsLabel> egressLabel) {
this.egressLabel = egressLabel;
return this;
}
/**
* Builds a point to point intent from the accumulated parameters.
*
* @return point to point intent
*/
public MplsIntent build() {
return new MplsIntent(
appId,
key,
selector,
treatment,
ingressPoint,
ingressLabel,
egressPoint,
egressLabel,
constraints,
priority
);
}
}
/**
* Constructor for serializer.
*/
protected MplsIntent() {
super();
this.ingressPoint = null;
this.ingressLabel = null;
this.egressPoint = null;
this.egressLabel = null;
}
/**
* Returns the port on which the ingress traffic should be connected to
* the egress.
*
* @return ingress switch port
*/
public ConnectPoint ingressPoint() {
return ingressPoint;
}
/**
* Returns the port on which the traffic should egress.
*
* @return egress switch port
*/
public ConnectPoint egressPoint() {
return egressPoint;
}
/**
* Returns the MPLS label which the ingress traffic should tagged.
*
* @return ingress MPLS label
*/
public Optional<MplsLabel> ingressLabel() {
return ingressLabel;
}
/**
* Returns the MPLS label which the egress traffic should tagged.
*
* @return egress MPLS label
*/
public Optional<MplsLabel> egressLabel() {
return egressLabel;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("id", id())
.add("appId", appId())
.add("key", key())
.add("priority", priority())
.add("selector", selector())
.add("treatment", treatment())
.add("ingressPoint", ingressPoint)
.add("ingressLabel", ingressLabel)
.add("egressPoint", egressPoint)
.add("egressLabel", egressLabel)
.add("constraints", constraints())
.toString();
}
}
|
|
/**
* @author Aleksey Terzi
*
*/
package com.encrox.instancedregions.chunkmap;
import java.io.IOException;
import java.util.Arrays;
import com.encrox.instancedregions.types.BlockState;
public class ChunkMapManager {
private static final ThreadLocal<ChunkMapBuffer> _buffer = new ThreadLocal<ChunkMapBuffer>() {
@Override
protected ChunkMapBuffer initialValue() {
return new ChunkMapBuffer();
}
};
private ChunkMapBuffer buffer;
private ChunkData chunkData;
private ChunkReader reader;
private int sectionCount;
private int sectionIndex;
private int y;
private int minX;
private int maxX;
private int minZ;
private int maxZ;
private int blockIndex;
public int getSectionCount() {
return this.sectionCount;
}
public int getY() {
return this.y;
}
public ChunkData getChunkData() {
return this.chunkData;
}
public ChunkMapManager(ChunkData chunkData) {
this.buffer = _buffer.get();
this.chunkData = chunkData;
}
public void init() throws IOException {
this.reader = new ChunkReader(this.chunkData.data);
this.sectionCount = 0;
this.sectionIndex = -1;
this.minX = this.chunkData.chunkX << 4;
this.maxX = this.minX + 15;
this.minZ = this.chunkData.chunkZ << 4;
this.maxZ = this.minZ + 15;
this.buffer.lightArrayLength = 2048;
if(this.chunkData.isOverworld) {
this.buffer.lightArrayLength <<= 1;
}
this.buffer.writer.init();
int mask = this.chunkData.primaryBitMask;
while(mask != 0) {
if((mask & 0x1) != 0) {
this.sectionCount++;
}
mask >>>= 1;
}
this.buffer.clearLayers();
moveToNextLayer();
}
public boolean inputHasNonAirBlock() {
return this.buffer.paletteLength > 1 || this.buffer.palette[0] != 0;
}
public static void blockDataToState(int blockData, BlockState blockState) {
blockState.id = blockData >>> 4;
blockState.meta = blockData & 0xf;
}
public static int getBlockIdFromData(int blockData) {
return blockData >>> 4;
}
public static int getBlockMetaFromData(int blockData) {
return blockData & 0xf;
}
public static int blockStateToData(BlockState blockState) {
return (blockState.id << 4) | blockState.meta;
}
public static int getBlockDataFromId(int id) {
return id << 4;
}
public boolean initOutputPalette() {
if(this.buffer.paletteLength == 0 || this.buffer.paletteLength == 255) {
this.buffer.outputPaletteLength = 0;
return false;
}
Arrays.fill(this.buffer.outputPaletteMap, (byte)-1);
this.buffer.outputPaletteLength = this.buffer.paletteLength;
for(int i = 0; i < this.buffer.paletteLength; i++) {
int blockData = this.buffer.palette[i];
this.buffer.outputPalette[i] = blockData;
if(blockData >= 0) {
this.buffer.outputPaletteMap[blockData] = (byte)i;
}
}
return true;
}
public boolean addToOutputPalette(int blockData) {
if(this.buffer.outputPaletteMap[blockData] >= 0) return true;
//255 (-1 for byte) is special code in my algorithm
if(this.buffer.outputPaletteLength == 254) {
this.buffer.outputPaletteLength = 0;
return false;
}
this.buffer.outputPalette[this.buffer.outputPaletteLength] = blockData;
this.buffer.outputPaletteMap[blockData] = (byte)this.buffer.outputPaletteLength;
this.buffer.outputPaletteLength++;
return true;
}
public void initOutputSection() throws IOException {
calcOutputBitsPerBlock();
this.buffer.writer.setBitsPerBlock(this.buffer.outputBitsPerBlock);
//Bits Per Block
this.buffer.writer.writeByte((byte)this.buffer.outputBitsPerBlock);
//Palette Length
this.buffer.writer.writeVarInt(this.buffer.outputPaletteLength);
//Palette
for(int i = 0; i < this.buffer.outputPaletteLength; i++) {
this.buffer.writer.writeVarInt(this.buffer.outputPalette[i]);
}
int dataArrayLengthInBits = this.buffer.outputBitsPerBlock << 12;// multiply by 4096
int outputDataArrayLength = dataArrayLengthInBits >>> 6;//divide by 64
if((dataArrayLengthInBits & 0x3f) != 0) {
outputDataArrayLength++;
}
//Data Array Length
this.buffer.writer.writeVarInt(outputDataArrayLength);
//Copy Block Light and Sky Light arrays
int lightArrayStartIndex = this.buffer.dataArrayStartIndex + (this.buffer.dataArrayLength << 3);
int outputLightArrayStartIndex = this.buffer.writer.getByteIndex() + (outputDataArrayLength << 3);
System.arraycopy(
this.chunkData.data,
lightArrayStartIndex,
this.buffer.output,
outputLightArrayStartIndex,
this.buffer.lightArrayLength
);
}
public void writeOutputBlock(int blockData) throws IOException {
if(this.buffer.outputPaletteLength > 0) {
long paletteIndex = this.buffer.outputPaletteMap[blockData] & 0xffL;
if(paletteIndex == 255) {
BlockState blockState = new BlockState();
blockDataToState(blockData, blockState);
throw new IllegalArgumentException("Block " + blockState.id + ":" + blockState.meta + " is absent in output palette.");
}
this.buffer.writer.writeBlockBits(paletteIndex);
} else {
this.buffer.writer.writeBlockBits(blockData);
}
}
public void finalizeOutput() throws IOException {
if(this.buffer.writer.getByteIndex() == 0) return;
this.buffer.writer.save();
this.buffer.writer.skip(this.buffer.lightArrayLength);
}
public byte[] createOutput() {
int readerByteIndex = this.reader.getByteIndex();
int writerByteIndex = this.buffer.writer.getByteIndex();
int biomesSize = this.chunkData.data.length - readerByteIndex;
byte[] output = new byte[writerByteIndex + biomesSize];
System.arraycopy(this.buffer.output, 0, output, 0, writerByteIndex);
if(biomesSize > 0) {
System.arraycopy(
this.chunkData.data,
readerByteIndex,
output,
writerByteIndex,
biomesSize
);
}
return output;
}
private void calcOutputBitsPerBlock() {
if(this.buffer.outputPaletteLength == 0) {
this.buffer.outputBitsPerBlock = 13;
} else {
byte mask = (byte)this.buffer.outputPaletteLength;
int index = 0;
while((mask & 0x80) == 0) {
index++;
mask <<= 1;
}
this.buffer.outputBitsPerBlock = 8 - index;
if(this.buffer.outputBitsPerBlock < 4) {
this.buffer.outputBitsPerBlock = 4;
}
}
}
public int readNextBlock() throws IOException {
if(this.blockIndex == 16 * 16) {
if(!moveToNextLayer()) return -1;
}
return this.buffer.curLayer.map[this.blockIndex++];
}
public int get(int x, int y, int z) throws IOException {
if(x < minX || x > maxX
|| z < minZ || z > maxZ
|| y > 255 || y < this.y - 1 || y > this.y + 1
) {
return -1;
}
ChunkLayer layer;
if(y == this.y) layer = this.buffer.curLayer;
else if(y == this.y - 1) layer = this.buffer.prevLayer;
else layer = this.buffer.nextLayer;
if(!layer.hasData) return -1;
int blockIndex = ((z - this.minZ) << 4) | (x - this.minX);
return layer.map[blockIndex];
}
private boolean moveToNextLayer() throws IOException {
if(!increaseY()) return false;
shiftLayersDown();
if(!this.buffer.curLayer.hasData) {
readLayer(this.buffer.curLayer);
}
if(((this.y + 1) >>> 4) > this.sectionIndex) {
int oldSectionIndex = this.sectionIndex;
moveToNextSection();
if(this.sectionIndex < 16 && oldSectionIndex + 1 == this.sectionIndex) {
readLayer(this.buffer.nextLayer);
}
} else {
readLayer(this.buffer.nextLayer);
}
this.blockIndex = 0;
return true;
}
private boolean increaseY() throws IOException {
if(this.sectionIndex < 0) {
if(!moveToNextSection()) return false;
this.y = this.sectionIndex << 4;
}
else {
this.y++;
if((this.y & 0xf) == 0) {
if(this.sectionIndex > 15) return false;
if((this.y >>> 4) != this.sectionIndex) {
this.buffer.clearLayers();
this.y = this.sectionIndex << 4;
}
}
}
return true;
}
private void shiftLayersDown() {
ChunkLayer temp = this.buffer.prevLayer;
this.buffer.prevLayer = this.buffer.curLayer;
this.buffer.curLayer = this.buffer.nextLayer;
this.buffer.nextLayer = temp;
this.buffer.nextLayer.hasData = false;
}
private boolean moveToNextSection() throws IOException {
if(this.sectionIndex >= 0) {
this.reader.skip(this.buffer.lightArrayLength);
}
do {
this.sectionIndex++;
} while(this.sectionIndex < 16 && (this.chunkData.primaryBitMask & (1 << this.sectionIndex)) == 0);
if(this.sectionIndex >= 16) return false;
readSectionHeader();
return true;
}
private void readLayer(ChunkLayer layer) throws IOException {
for(int i = 0; i < 16 * 16; i++) {
int blockData = this.reader.readBlockBits();
if(this.buffer.paletteLength > 0) {
blockData = blockData >= 0 && blockData < this.buffer.paletteLength
? this.buffer.palette[blockData]
: 0;
}
layer.map[i] = blockData;
}
layer.hasData = true;
}
private void readSectionHeader() throws IOException {
this.buffer.bitsPerBlock = this.reader.readByte();
this.buffer.paletteLength = this.reader.readVarInt();
for(int i = 0; i < this.buffer.paletteLength; i++) {
int paletteData = this.reader.readVarInt();
this.buffer.palette[i] = paletteData;
}
this.buffer.dataArrayLength = this.reader.readVarInt();
this.buffer.dataArrayStartIndex = this.reader.getByteIndex();
this.reader.setBitsPerBlock(this.buffer.bitsPerBlock);
}
}
|
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class GetWorkflowExecutionHistoryRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* The name of the domain containing the workflow execution.
* </p>
*/
private String domain;
/**
* <p>
* Specifies the workflow execution for which to return the history.
* </p>
*/
private WorkflowExecution execution;
/**
* <p>
* If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged.
* </p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results
* can be returned in a single call.
* </p>
*/
private String nextPageToken;
/**
* <p>
* The maximum number of results that will be returned per call.
* <code>nextPageToken</code> can be used to obtain futher pages of results.
* The default is 1000, which is the maximum allowed page size. You can,
* however, specify a page size <i>smaller</i> than the maximum.
* </p>
* <p>
* This is an upper limit only; the actual number of results returned per
* call may be fewer than the specified maximum.
* </p>
*/
private Integer maximumPageSize;
/**
* <p>
* When set to <code>true</code>, returns the events in reverse order. By
* default the results are returned in ascending order of the
* <code>eventTimeStamp</code> of the events.
* </p>
*/
private Boolean reverseOrder;
/**
* <p>
* The name of the domain containing the workflow execution.
* </p>
*
* @param domain
* The name of the domain containing the workflow execution.
*/
public void setDomain(String domain) {
this.domain = domain;
}
/**
* <p>
* The name of the domain containing the workflow execution.
* </p>
*
* @return The name of the domain containing the workflow execution.
*/
public String getDomain() {
return this.domain;
}
/**
* <p>
* The name of the domain containing the workflow execution.
* </p>
*
* @param domain
* The name of the domain containing the workflow execution.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GetWorkflowExecutionHistoryRequest withDomain(String domain) {
setDomain(domain);
return this;
}
/**
* <p>
* Specifies the workflow execution for which to return the history.
* </p>
*
* @param execution
* Specifies the workflow execution for which to return the history.
*/
public void setExecution(WorkflowExecution execution) {
this.execution = execution;
}
/**
* <p>
* Specifies the workflow execution for which to return the history.
* </p>
*
* @return Specifies the workflow execution for which to return the history.
*/
public WorkflowExecution getExecution() {
return this.execution;
}
/**
* <p>
* Specifies the workflow execution for which to return the history.
* </p>
*
* @param execution
* Specifies the workflow execution for which to return the history.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GetWorkflowExecutionHistoryRequest withExecution(
WorkflowExecution execution) {
setExecution(execution);
return this;
}
/**
* <p>
* If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged.
* </p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results
* can be returned in a single call.
* </p>
*
* @param nextPageToken
* If a <code>NextPageToken</code> was returned by a previous call,
* there are more results available. To retrieve the next page of
* results, make the call again using the returned token in
* <code>nextPageToken</code>. Keep all other arguments
* unchanged.</p>
* <p>
* The configured <code>maximumPageSize</code> determines how many
* results can be returned in a single call.
*/
public void setNextPageToken(String nextPageToken) {
this.nextPageToken = nextPageToken;
}
/**
* <p>
* If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged.
* </p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results
* can be returned in a single call.
* </p>
*
* @return If a <code>NextPageToken</code> was returned by a previous call,
* there are more results available. To retrieve the next page of
* results, make the call again using the returned token in
* <code>nextPageToken</code>. Keep all other arguments
* unchanged.</p>
* <p>
* The configured <code>maximumPageSize</code> determines how many
* results can be returned in a single call.
*/
public String getNextPageToken() {
return this.nextPageToken;
}
/**
* <p>
* If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged.
* </p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results
* can be returned in a single call.
* </p>
*
* @param nextPageToken
* If a <code>NextPageToken</code> was returned by a previous call,
* there are more results available. To retrieve the next page of
* results, make the call again using the returned token in
* <code>nextPageToken</code>. Keep all other arguments
* unchanged.</p>
* <p>
* The configured <code>maximumPageSize</code> determines how many
* results can be returned in a single call.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GetWorkflowExecutionHistoryRequest withNextPageToken(
String nextPageToken) {
setNextPageToken(nextPageToken);
return this;
}
/**
* <p>
* The maximum number of results that will be returned per call.
* <code>nextPageToken</code> can be used to obtain futher pages of results.
* The default is 1000, which is the maximum allowed page size. You can,
* however, specify a page size <i>smaller</i> than the maximum.
* </p>
* <p>
* This is an upper limit only; the actual number of results returned per
* call may be fewer than the specified maximum.
* </p>
*
* @param maximumPageSize
* The maximum number of results that will be returned per call.
* <code>nextPageToken</code> can be used to obtain futher pages of
* results. The default is 1000, which is the maximum allowed page
* size. You can, however, specify a page size <i>smaller</i> than
* the maximum.</p>
* <p>
* This is an upper limit only; the actual number of results returned
* per call may be fewer than the specified maximum.
*/
public void setMaximumPageSize(Integer maximumPageSize) {
this.maximumPageSize = maximumPageSize;
}
/**
* <p>
* The maximum number of results that will be returned per call.
* <code>nextPageToken</code> can be used to obtain futher pages of results.
* The default is 1000, which is the maximum allowed page size. You can,
* however, specify a page size <i>smaller</i> than the maximum.
* </p>
* <p>
* This is an upper limit only; the actual number of results returned per
* call may be fewer than the specified maximum.
* </p>
*
* @return The maximum number of results that will be returned per call.
* <code>nextPageToken</code> can be used to obtain futher pages of
* results. The default is 1000, which is the maximum allowed page
* size. You can, however, specify a page size <i>smaller</i> than
* the maximum.</p>
* <p>
* This is an upper limit only; the actual number of results
* returned per call may be fewer than the specified maximum.
*/
public Integer getMaximumPageSize() {
return this.maximumPageSize;
}
/**
* <p>
* The maximum number of results that will be returned per call.
* <code>nextPageToken</code> can be used to obtain futher pages of results.
* The default is 1000, which is the maximum allowed page size. You can,
* however, specify a page size <i>smaller</i> than the maximum.
* </p>
* <p>
* This is an upper limit only; the actual number of results returned per
* call may be fewer than the specified maximum.
* </p>
*
* @param maximumPageSize
* The maximum number of results that will be returned per call.
* <code>nextPageToken</code> can be used to obtain futher pages of
* results. The default is 1000, which is the maximum allowed page
* size. You can, however, specify a page size <i>smaller</i> than
* the maximum.</p>
* <p>
* This is an upper limit only; the actual number of results returned
* per call may be fewer than the specified maximum.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GetWorkflowExecutionHistoryRequest withMaximumPageSize(
Integer maximumPageSize) {
setMaximumPageSize(maximumPageSize);
return this;
}
/**
* <p>
* When set to <code>true</code>, returns the events in reverse order. By
* default the results are returned in ascending order of the
* <code>eventTimeStamp</code> of the events.
* </p>
*
* @param reverseOrder
* When set to <code>true</code>, returns the events in reverse
* order. By default the results are returned in ascending order of
* the <code>eventTimeStamp</code> of the events.
*/
public void setReverseOrder(Boolean reverseOrder) {
this.reverseOrder = reverseOrder;
}
/**
* <p>
* When set to <code>true</code>, returns the events in reverse order. By
* default the results are returned in ascending order of the
* <code>eventTimeStamp</code> of the events.
* </p>
*
* @return When set to <code>true</code>, returns the events in reverse
* order. By default the results are returned in ascending order of
* the <code>eventTimeStamp</code> of the events.
*/
public Boolean getReverseOrder() {
return this.reverseOrder;
}
/**
* <p>
* When set to <code>true</code>, returns the events in reverse order. By
* default the results are returned in ascending order of the
* <code>eventTimeStamp</code> of the events.
* </p>
*
* @param reverseOrder
* When set to <code>true</code>, returns the events in reverse
* order. By default the results are returned in ascending order of
* the <code>eventTimeStamp</code> of the events.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GetWorkflowExecutionHistoryRequest withReverseOrder(
Boolean reverseOrder) {
setReverseOrder(reverseOrder);
return this;
}
/**
* <p>
* When set to <code>true</code>, returns the events in reverse order. By
* default the results are returned in ascending order of the
* <code>eventTimeStamp</code> of the events.
* </p>
*
* @return When set to <code>true</code>, returns the events in reverse
* order. By default the results are returned in ascending order of
* the <code>eventTimeStamp</code> of the events.
*/
public Boolean isReverseOrder() {
return this.reverseOrder;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDomain() != null)
sb.append("Domain: " + getDomain() + ",");
if (getExecution() != null)
sb.append("Execution: " + getExecution() + ",");
if (getNextPageToken() != null)
sb.append("NextPageToken: " + getNextPageToken() + ",");
if (getMaximumPageSize() != null)
sb.append("MaximumPageSize: " + getMaximumPageSize() + ",");
if (getReverseOrder() != null)
sb.append("ReverseOrder: " + getReverseOrder());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetWorkflowExecutionHistoryRequest == false)
return false;
GetWorkflowExecutionHistoryRequest other = (GetWorkflowExecutionHistoryRequest) obj;
if (other.getDomain() == null ^ this.getDomain() == null)
return false;
if (other.getDomain() != null
&& other.getDomain().equals(this.getDomain()) == false)
return false;
if (other.getExecution() == null ^ this.getExecution() == null)
return false;
if (other.getExecution() != null
&& other.getExecution().equals(this.getExecution()) == false)
return false;
if (other.getNextPageToken() == null ^ this.getNextPageToken() == null)
return false;
if (other.getNextPageToken() != null
&& other.getNextPageToken().equals(this.getNextPageToken()) == false)
return false;
if (other.getMaximumPageSize() == null
^ this.getMaximumPageSize() == null)
return false;
if (other.getMaximumPageSize() != null
&& other.getMaximumPageSize().equals(this.getMaximumPageSize()) == false)
return false;
if (other.getReverseOrder() == null ^ this.getReverseOrder() == null)
return false;
if (other.getReverseOrder() != null
&& other.getReverseOrder().equals(this.getReverseOrder()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getDomain() == null) ? 0 : getDomain().hashCode());
hashCode = prime * hashCode
+ ((getExecution() == null) ? 0 : getExecution().hashCode());
hashCode = prime
* hashCode
+ ((getNextPageToken() == null) ? 0 : getNextPageToken()
.hashCode());
hashCode = prime
* hashCode
+ ((getMaximumPageSize() == null) ? 0 : getMaximumPageSize()
.hashCode());
hashCode = prime
* hashCode
+ ((getReverseOrder() == null) ? 0 : getReverseOrder()
.hashCode());
return hashCode;
}
@Override
public GetWorkflowExecutionHistoryRequest clone() {
return (GetWorkflowExecutionHistoryRequest) super.clone();
}
}
|
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2011.12.14 at 03:30:44 PM CET
//
package ch.epfl.bbp.uima.xml.archivearticle3;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlElementRefs;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlIDREF;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
/**
* <p>Java class for momentabout.type complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="momentabout.type">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <group ref="{http://www.w3.org/1998/Math/MathML}momentabout.content" maxOccurs="unbounded"/>
* <attGroup ref="{http://www.w3.org/1998/Math/MathML}momentabout.attlist"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "momentabout.type", namespace = "http://www.w3.org/1998/Math/MathML", propOrder = {
"momentaboutContent"
})
public class MomentaboutType {
@XmlElementRefs({
@XmlElementRef(name = "munder", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mfenced", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "approx", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "infinity", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "diff", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "apply", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "notanumber", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "domainofapplication", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "setdiff", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "rem", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "csch", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "log", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "integers", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arccot", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "interval", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "sum", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "maligngroup", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "piecewise", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arg", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mfrac", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "lambda", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "logbase", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "gt", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "real", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "intersect", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "degree", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "reals", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "ci", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "momentabout", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "condition", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "compose", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "msubsup", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mean", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "transpose", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "maction", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "tendsto", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "sinh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "vectorproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arcsech", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "munderover", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mi", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "median", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "naturalnumbers", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "rationals", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "lt", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "tan", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mn", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "curl", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "not", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "plus", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "malignmark", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mtext", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "exists", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "matrix", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "ceiling", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "menclose", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "cot", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "msup", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "ln", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "grad", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "times", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "inverse", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "equivalent", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "cartesianproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "lcm", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arcsec", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "factorof", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mspace", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "exp", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mover", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "gcd", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "notin", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "semantics", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mo", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "divergence", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "limit", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "forall", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "floor", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "min", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "eq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "csc", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "max", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "ident", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "and", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "tanh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "quotient", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "merror", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "selector", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arctanh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "union", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arccsch", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "sec", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "sin", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "divide", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "true", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mrow", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "in", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mphantom", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "notprsubset", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "moment", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "geq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "sech", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "uplimit", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "eulergamma", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arcsinh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "abs", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "product", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "msqrt", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "scalarproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "minus", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "domain", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "imaginary", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "prsubset", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "card", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mmultiscripts", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "root", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mstyle", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "power", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "list", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "primes", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arccoth", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "cn", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "implies", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "factorial", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "conjugate", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "notsubset", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "ms", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "lowlimit", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "xor", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "coth", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "sdev", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "image", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "variance", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "imaginaryi", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "msub", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mtable", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mpadded", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "csymbol", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "determinant", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "partialdiff", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arccos", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "set", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "codomain", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "subset", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arctan", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "leq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arccosh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "cosh", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "exponentiale", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "laplacian", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "outerproduct", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mroot", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arcsin", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "cos", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "pi", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "neq", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "bvar", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "false", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "emptyset", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "vector", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "arccsc", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "declare", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "mode", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "or", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "int", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class),
@XmlElementRef(name = "complexes", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class)
})
protected List<JAXBElement<?>> momentaboutContent;
@XmlAttribute(name = "encoding")
protected String encoding;
@XmlAttribute(name = "definitionURL")
@XmlSchemaType(name = "anyURI")
protected String definitionURL;
@XmlAttribute(name = "class")
@XmlSchemaType(name = "NMTOKENS")
protected List<String> clazz;
@XmlAttribute(name = "style")
protected String style;
@XmlAttribute(name = "xref")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
protected Object xref;
@XmlAttribute(name = "id")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink")
@XmlSchemaType(name = "anySimpleType")
protected String href;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the momentaboutContent property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the momentaboutContent property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getMomentaboutContent().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link JAXBElement }{@code <}{@link MunderType }{@code >}
* {@link JAXBElement }{@code <}{@link MfencedType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link DiffType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link ApplyType }{@code >}
* {@link JAXBElement }{@code <}{@link SetdiffType }{@code >}
* {@link JAXBElement }{@code <}{@link DomainofapplicationType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link IntervalType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link MaligngroupType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link PiecewiseType }{@code >}
* {@link JAXBElement }{@code <}{@link LogbaseType }{@code >}
* {@link JAXBElement }{@code <}{@link LambdaType }{@code >}
* {@link JAXBElement }{@code <}{@link MfracType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link IntersectType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link CiType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link DegreeType }{@code >}
* {@link JAXBElement }{@code <}{@link MomentaboutType }{@code >}
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConditionType }{@code >}
* {@link JAXBElement }{@code <}{@link MeanType }{@code >}
* {@link JAXBElement }{@code <}{@link MsubsupType }{@code >}
* {@link JAXBElement }{@code <}{@link TransposeType }{@code >}
* {@link JAXBElement }{@code <}{@link TendstoType }{@code >}
* {@link JAXBElement }{@code <}{@link MactionType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link VectorproductType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link MunderoverType }{@code >}
* {@link JAXBElement }{@code <}{@link MedianType }{@code >}
* {@link JAXBElement }{@code <}{@link MiType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link MnType }{@code >}
* {@link JAXBElement }{@code <}{@link CurlType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link JAXBElement }{@code <}{@link MtextType }{@code >}
* {@link JAXBElement }{@code <}{@link MalignmarkType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link MatrixType }{@code >}
* {@link JAXBElement }{@code <}{@link MencloseType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link MsupType }{@code >}
* {@link JAXBElement }{@code <}{@link GradType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link InverseType }{@code >}
* {@link JAXBElement }{@code <}{@link CartesianproductType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link MspaceType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link NotinType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link MoverType }{@code >}
* {@link JAXBElement }{@code <}{@link SemanticsType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link JAXBElement }{@code <}{@link LimitType }{@code >}
* {@link JAXBElement }{@code <}{@link DivergenceType }{@code >}
* {@link JAXBElement }{@code <}{@link MoType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link SelectorType }{@code >}
* {@link JAXBElement }{@code <}{@link MerrorType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link UnionType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link MrowType }{@code >}
* {@link JAXBElement }{@code <}{@link InType }{@code >}
* {@link JAXBElement }{@code <}{@link NotprsubsetType }{@code >}
* {@link JAXBElement }{@code <}{@link MphantomType }{@code >}
* {@link JAXBElement }{@code <}{@link MomentType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link UplimitType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link MsqrtType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ScalarproductType }{@code >}
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link PrsubsetType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link CardType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link MmultiscriptsType }{@code >}
* {@link JAXBElement }{@code <}{@link MstyleType }{@code >}
* {@link JAXBElement }{@code <}{@link ListType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link CnType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link ArithType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link JAXBElement }{@code <}{@link NotsubsetType }{@code >}
* {@link JAXBElement }{@code <}{@link LowlimitType }{@code >}
* {@link JAXBElement }{@code <}{@link MsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link JAXBElement }{@code <}{@link SdevType }{@code >}
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link VarianceType }{@code >}
* {@link JAXBElement }{@code <}{@link MsubType }{@code >}
* {@link JAXBElement }{@code <}{@link MtableType }{@code >}
* {@link JAXBElement }{@code <}{@link CsymbolType }{@code >}
* {@link JAXBElement }{@code <}{@link MpaddedType }{@code >}
* {@link JAXBElement }{@code <}{@link DeterminantType }{@code >}
* {@link JAXBElement }{@code <}{@link PartialdiffType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link SetType }{@code >}
* {@link JAXBElement }{@code <}{@link FunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link SubsetType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link OuterproductType }{@code >}
* {@link JAXBElement }{@code <}{@link LaplacianType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link MrootType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link RelationsType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link BvarType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link VectorType }{@code >}
* {@link JAXBElement }{@code <}{@link ElementaryFunctionsType }{@code >}
* {@link JAXBElement }{@code <}{@link DeclareType }{@code >}
* {@link JAXBElement }{@code <}{@link LogicType }{@code >}
* {@link JAXBElement }{@code <}{@link ModeType }{@code >}
* {@link JAXBElement }{@code <}{@link ConstantType }{@code >}
* {@link JAXBElement }{@code <}{@link IntType }{@code >}
*
*
*/
public List<JAXBElement<?>> getMomentaboutContent() {
if (momentaboutContent == null) {
momentaboutContent = new ArrayList<JAXBElement<?>>();
}
return this.momentaboutContent;
}
/**
* Gets the value of the encoding property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEncoding() {
return encoding;
}
/**
* Sets the value of the encoding property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEncoding(String value) {
this.encoding = value;
}
/**
* Gets the value of the definitionURL property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDefinitionURL() {
return definitionURL;
}
/**
* Sets the value of the definitionURL property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDefinitionURL(String value) {
this.definitionURL = value;
}
/**
* Gets the value of the clazz property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the clazz property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getClazz().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getClazz() {
if (clazz == null) {
clazz = new ArrayList<String>();
}
return this.clazz;
}
/**
* Gets the value of the style property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getStyle() {
return style;
}
/**
* Sets the value of the style property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setStyle(String value) {
this.style = value;
}
/**
* Gets the value of the xref property.
*
* @return
* possible object is
* {@link Object }
*
*/
public Object getXref() {
return xref;
}
/**
* Sets the value of the xref property.
*
* @param value
* allowed object is
* {@link Object }
*
*/
public void setXref(Object value) {
this.xref = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the href property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHref() {
return href;
}
/**
* Sets the value of the href property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHref(String value) {
this.href = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
|
|
/*
* Online
*
* Version 1.0
*
* November 12, 2017
*
* Copyright (c) 2017 Team NOTcmput301, CMPUT301, University of Alberta - All Rights Reserved
* You may use, distribute, or modify this code under terms and conditions of the Code of Student Behavior at University of Alberta.
* You can find a copy of the license in the project wiki on github. Otherwise please contact miller4@ualberta.ca.
*/
package com.notcmput301.habitbook;
import android.content.Intent;
import android.graphics.drawable.AnimationDrawable;
import android.os.Bundle;
import android.support.constraint.ConstraintLayout;
import android.support.design.widget.FloatingActionButton;
import android.util.Log;
import android.view.View;
import android.support.design.widget.NavigationView;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.google.gson.Gson;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import de.hdodenhof.circleimageview.CircleImageView;
/**
* Activity handling online interactions
*
* @author NOTcmput301
* @version 1.0
* @see HabitType
* @since 1.0
*/
public class Online extends AppCompatActivity
implements NavigationView.OnNavigationItemSelectedListener {
private User loggedInUser;
private NetworkHandler nH;
private Gson gson = new Gson();
private ArrayList<HabitEvent> eventlist = new ArrayList<>();
private Map<Integer, String> monthMap = new HashMap<Integer, String>();
/**
* Called when the activity is first created.
*
* @param savedInstanceState previous instance of activity
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_online);
Intent receiver = getIntent();
String u = receiver.getExtras().getString("passedUser");
loggedInUser = gson.fromJson(u, User.class);
nH = new NetworkHandler(this);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
//init months
monthMap.put(0, "Jan"); monthMap.put(1, "Feb"); monthMap.put(2, "Mar");
monthMap.put(3, "Apr"); monthMap.put(4, "May"); monthMap.put(5, "Jun");
monthMap.put(6, "Jul"); monthMap.put(7, "Aug"); monthMap.put(8, "Sept");
monthMap.put(9, "Oct"); monthMap.put(10, "Nov"); monthMap.put(11, "Dec");
//follower request status button
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.Oln_viewRequestStatus);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent followerRequestActivity = new Intent(Online.this, FollowerRequestsActivity.class);
followerRequestActivity.putExtra("passedUser", gson.toJson(loggedInUser));
startActivity(followerRequestActivity);
}
});
FloatingActionButton fab2 = (FloatingActionButton) findViewById(R.id.oln_mapButton);
fab2.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent map = new Intent(Online.this, MapsActivity.class);
map.putExtra("events", gson.toJson(eventlist));
startActivity(map);
}
});
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(
this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
drawer.setDrawerListener(toggle);
toggle.syncState();
//navigation view
NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view);
//enables us to put our own icon and not show up as greys
navigationView.setItemIconTintList(null);
//change the headerviews name, and image
View headerview = navigationView.getHeaderView(0);
TextView navName = (TextView) headerview.findViewById(R.id.MNavH_Name);
navName.setText(loggedInUser.getUsername());
navigationView.setNavigationItemSelectedListener(this);
fillList();
}
//Checks if user exists
/**
* Sends a Follow request to a User
*
* @param view view of current activity status
*/
public void sendRequest(View view){
EditText reqText = (EditText) findViewById(R.id.Oln_EText);
Button sendReq = (Button) findViewById(R.id.Oln_sendRequest);
String uname = reqText.getText().toString().toLowerCase();
//check if the user is a troll and put in his own username
Followers newF = new Followers(loggedInUser.getUsername(), uname);
if (uname.equals(loggedInUser.getUsername())){
Toast.makeText(this, "Can't follow yourself", Toast.LENGTH_SHORT).show();
return;
}
//check if the person is already followed
ArrayList<Followers> allFollowers = new ArrayList<>();
ElasticSearch.getFollowerPairs gFP = new ElasticSearch.getFollowerPairs();
//the second argument denotes which perspective we are coming from.
//requester indicates we want all followers objects where requester=username
//requested indicates we want all followers objects where requested=username
gFP.execute(loggedInUser.getUsername(), "requester", "2");
try{
allFollowers = gFP.get();
//will return null if it failed to retrieve items
if (allFollowers==null){
Toast.makeText(this, "Ooops, Something went wrong on our end", Toast.LENGTH_SHORT).show();
return;
}
}catch(Exception e){
e.printStackTrace();
Toast.makeText(this, "Failed to retrieve followers. Check connection", Toast.LENGTH_SHORT).show();
}
//check if user already followed this person
for(Followers f: allFollowers){
if (f.getRequestedUser().equals(uname)){
Toast.makeText(this, "Follower already requested", Toast.LENGTH_SHORT).show();
return;
}
}
//check if username exist.. if it does send the request
if (uname.isEmpty()){
Toast.makeText(this, "Username needed", Toast.LENGTH_SHORT).show();
}else{
ElasticSearch.userExists ue = new ElasticSearch.userExists();
ue.execute(uname);
try{
int success = ue.get();
if (success < 0){
Toast.makeText(this, "Ooops, Something went wrong on our end", Toast.LENGTH_SHORT).show();
return;
}else if (success == 0){
Toast.makeText(this, "User does not exist", Toast.LENGTH_SHORT).show();
return;
}else {
//send request
ElasticSearch.addFollowerPair aF = new ElasticSearch.addFollowerPair();
aF.execute(newF);
try {
Boolean res = aF.get();
if (res) {
Toast.makeText(this, "Request Sent!", Toast.LENGTH_LONG).show();
return;
} else {
Toast.makeText(this, "Oops!, Something went wrong on our end", Toast.LENGTH_LONG).show();
return;
}
} catch (Exception e) {
Log.e("get failure", "Failed to add request");
e.printStackTrace();
Toast.makeText(this, "Ooops, Something went wrong on our end", Toast.LENGTH_LONG).show();
return;
}
}
}catch(Exception e){
Log.e("get failure", "Failed to retrieve");
e.printStackTrace();
Toast.makeText(this, "Ooops, Something went wrong on our end", Toast.LENGTH_LONG).show();
return;
}
}
}
/**
* Fill list with follower Habit Events
*
*/
public void fillList(){
//get all followed users
ElasticSearch.getFollowerPairs followers = new ElasticSearch.getFollowerPairs();
followers.execute(loggedInUser.getUsername(), "requester", "1");
ArrayList<Followers> fArr = new ArrayList<>();
try{
fArr = followers.get();
//will return null if it failed to retrieve items
if (fArr==null){
fArr = new ArrayList<>();
Toast.makeText(this, "Ooops, Something went wrong on our end", Toast.LENGTH_SHORT).show();
}
}catch(Exception e) {
e.printStackTrace();
Toast.makeText(this, "Failed to retrieve followers. Check connection", Toast.LENGTH_SHORT).show();
}
//for each user, cycle through all their habittypes.
for (Followers f: fArr){
String requestedUser = f.getRequestedUser();
ArrayList<HabitType> habitTypes = nH.getHabitList(requestedUser);
for (HabitType h: habitTypes){
eventlist.addAll(h.getEvents());
}
}
ListView eventListView = (ListView) findViewById(R.id.Oln_eventListView);
Online.OnlineListAdapter oAdapter = new Online.OnlineListAdapter();
eventListView.setAdapter(oAdapter);
}
class OnlineListAdapter extends BaseAdapter{
@Override
public int getCount() {
return eventlist.size();
}
@Override
public Object getItem(int position) {
return null;
}
@Override
public long getItemId(int position) {
return 0;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
convertView = getLayoutInflater().inflate(R.layout.online_list_layout, null);
TextView titleE = (TextView) convertView.findViewById(R.id.OLIST_Title);
TextView nameE = (TextView) convertView.findViewById(R.id.OLIST_Name);
TextView dateE = (TextView) convertView.findViewById(R.id.OLIST_Date);
CircleImageView imageV = (CircleImageView) convertView.findViewById(R.id.eventImg);
Button like = (Button) convertView.findViewById(R.id.likeButton);
like.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View view){
HabitEvent habitEvent = eventlist.get(position);
habitEvent.setLikes(habitEvent.getLikes()+1);
ArrayList<HabitType> habitTypes = new ArrayList<HabitType>();
ElasticSearch.getHabitTypeList ghtl = new ElasticSearch.getHabitTypeList();
ghtl.execute(loggedInUser.getUsername());
try {
habitTypes = ghtl.get();
if (habitTypes==null){
habitTypes = new ArrayList<>();
}
}catch(Exception e){
e.printStackTrace();
Toast.makeText(Online.this, "Failed to retrieve items. Check connection", Toast.LENGTH_SHORT).show();
}
for (HabitType h : habitTypes){
//Toast.makeText(Online.this, h.getTitle().toString(), Toast.LENGTH_SHORT).show();
ArrayList<HabitEvent> he = h.getEvents();
for (HabitEvent eve : he){
//Toast.makeText(Online.this, eve.getComment().toString()+"--"+habitEvent.getComment().toString(), Toast.LENGTH_SHORT).show();
if (eve.getComment().equals(habitEvent.getComment())){
ElasticSearch.deleteHabitType delHT = new ElasticSearch.deleteHabitType();
delHT.execute(loggedInUser.getUsername(), h.getTitle());
try{
boolean result = delHT.get();
if (result){
//Toast.makeText(this, "deleted item!", Toast.LENGTH_SHORT).show();
finish();
}else{
Toast.makeText(Online.this, "Failed to delete", Toast.LENGTH_SHORT).show();
}
}catch (Exception e){
e.printStackTrace();
Toast.makeText(Online.this, "Failed to delete", Toast.LENGTH_SHORT).show();
}
h.getEvents().remove(eve);
h.getEvents().add(habitEvent);
ElasticSearch.addHabitType aht = new ElasticSearch.addHabitType();
aht.execute(h);
try{
/*boolean success = aht.get();
if (!success){
Toast.makeText(Online.this, "Opps, Something went wrong on our end", Toast.LENGTH_SHORT).show();
}else{
Toast.makeText(Online.this, "liked!", Toast.LENGTH_SHORT).show();
return;
}*/
}catch(Exception e){
Log.e("get failure", "Failed to retrieve");
e.printStackTrace();
}
break;
}
}
}
}
});
titleE.setText(eventlist.get(position).getHabit());
nameE.setText(eventlist.get(position).getComment());
Date start = eventlist.get(position).getDate();
Calendar cal = Calendar.getInstance();
cal.setTime(start);
int month = cal.get(Calendar.MONTH);
int day = cal.get(Calendar.DAY_OF_MONTH);
dateE.setText(monthMap.get(month) + " " +day);
imageV.setImageBitmap(eventlist.get(position).imageToBitmap());
return convertView;
}
}
/**
* function for handling back button presses
*
*/
@Override
public void onBackPressed() {
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
if (drawer.isDrawerOpen(GravityCompat.START)) {
drawer.closeDrawer(GravityCompat.START);
} else {
super.onBackPressed();
}
}
/**
* Called when creating options menu
*
* @param menu menu object to operate on
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.online, menu);
return true;
}
/**
* function for handling options menu
*
* @param item selected menu item
*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.follow) {
Intent intent = new Intent(Online.this, FollowerRequestsActivity.class);
startActivity(intent);
}
else if (id == R.id.maps){
}
return super.onOptionsItemSelected(item);
}
/**
* Function for handling navigation menu selections
*
* @param item selected navigation item
*/
@SuppressWarnings("StatementWithEmptyBody")
@Override
public boolean onNavigationItemSelected(MenuItem item) {
// Handle navigation view item clicks here.
int id = item.getItemId();
if (id == R.id.habit_type) {
Intent habitType = new Intent(Online.this, HabitTypeList2.class);
habitType.putExtra("passedUser", gson.toJson(loggedInUser));
startActivity(habitType);
finish();
} else if (id == R.id.today_habit) {
Intent habitType = new Intent(Online.this, MainActivity.class);
habitType.putExtra("passedUser", gson.toJson(loggedInUser));
finish();
startActivity(habitType);
} else if (id == R.id.habit_event_history) {
Intent history = new Intent(Online.this, HabitEventHistory2.class);
history.putExtra("passedUser", gson.toJson(loggedInUser));
finish();
startActivity(history);
} else if (id == R.id.online) {
} else if (id == R.id.logout) {
Intent logout = new Intent(Online.this, LoginActivity.class);
startActivity(logout);
finish();
}
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
drawer.closeDrawer(GravityCompat.START);
return true;
}
}
|
|
package org.visallo.core.ingest;
import com.google.common.base.Strings;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.apache.commons.io.IOUtils;
import org.json.JSONObject;
import org.vertexium.*;
import org.vertexium.property.StreamingPropertyValue;
import org.visallo.core.config.Configuration;
import org.visallo.core.model.WorkQueueNames;
import org.visallo.core.model.ontology.OntologyProperty;
import org.visallo.core.model.ontology.OntologyRepository;
import org.visallo.core.model.properties.VisalloProperties;
import org.visallo.core.model.properties.types.PropertyMetadata;
import org.visallo.core.model.properties.types.VisalloProperty;
import org.visallo.core.model.properties.types.VisalloPropertyUpdate;
import org.visallo.core.model.workQueue.Priority;
import org.visallo.core.model.workQueue.WorkQueueRepository;
import org.visallo.core.model.workspace.Workspace;
import org.visallo.core.model.workspace.WorkspaceRepository;
import org.visallo.core.security.VisalloVisibility;
import org.visallo.core.security.VisibilityTranslator;
import org.visallo.core.user.User;
import org.visallo.core.util.RowKeyHelper;
import org.visallo.core.util.ServiceLoaderUtil;
import org.visallo.core.util.VisalloLogger;
import org.visallo.core.util.VisalloLoggerFactory;
import org.visallo.web.clientapi.model.ClientApiImportProperty;
import org.visallo.web.clientapi.model.VisibilityJson;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.ParseException;
import java.util.*;
import static org.vertexium.util.IterableUtils.toList;
@Singleton
public class FileImport {
private static final VisalloLogger LOGGER = VisalloLoggerFactory.getLogger(FileImport.class);
public static final String MULTI_VALUE_KEY = FileImport.class.getName();
private final VisibilityTranslator visibilityTranslator;
private final Graph graph;
private final WorkQueueRepository workQueueRepository;
private final WorkspaceRepository workspaceRepository;
private final WorkQueueNames workQueueNames;
private final OntologyRepository ontologyRepository;
private final Configuration configuration;
private List<FileImportSupportingFileHandler> fileImportSupportingFileHandlers;
private List<PostFileImportHandler> postFileImportHandlers;
@Inject
public FileImport(
VisibilityTranslator visibilityTranslator,
Graph graph,
WorkQueueRepository workQueueRepository,
WorkspaceRepository workspaceRepository,
WorkQueueNames workQueueNames,
OntologyRepository ontologyRepository,
Configuration configuration
) {
this.visibilityTranslator = visibilityTranslator;
this.graph = graph;
this.workQueueRepository = workQueueRepository;
this.workspaceRepository = workspaceRepository;
this.workQueueNames = workQueueNames;
this.ontologyRepository = ontologyRepository;
this.configuration = configuration;
}
public void importDirectory(
File dataDir,
boolean queueDuplicates,
String conceptTypeIRI,
String visibilitySource,
Workspace workspace,
Priority priority,
User user,
Authorizations authorizations
) throws IOException {
ensureInitialized();
LOGGER.debug("Importing files from %s", dataDir);
File[] files = dataDir.listFiles();
if (files == null || files.length == 0) {
return;
}
int totalFileCount = files.length;
int fileCount = 0;
int importedFileCount = 0;
try {
for (File f : files) {
if (f.getName().startsWith(".") || f.length() == 0) {
continue;
}
if (isSupportingFile(f)) {
continue;
}
LOGGER.debug("Importing file (%d/%d): %s", fileCount + 1, totalFileCount, f.getAbsolutePath());
try {
importFile(
f,
f.getName(),
queueDuplicates,
conceptTypeIRI,
null,
visibilitySource,
workspace,
true,
priority,
user,
authorizations
);
importedFileCount++;
} catch (Exception ex) {
LOGGER.error("Could not import %s", f.getAbsolutePath(), ex);
}
fileCount++;
}
} finally {
graph.flush();
}
LOGGER.debug(String.format("Imported %d, skipped %d files from %s", importedFileCount, fileCount - importedFileCount, dataDir));
}
private boolean isSupportingFile(File f) {
for (FileImportSupportingFileHandler fileImportSupportingFileHandler : this.fileImportSupportingFileHandlers) {
if (fileImportSupportingFileHandler.isSupportingFile(f)) {
return true;
}
}
return false;
}
public Vertex importFile(
File f,
boolean queueDuplicates,
String visibilitySource,
Workspace workspace,
Priority priority,
User user,
Authorizations authorizations
) throws Exception {
return importFile(
f,
f.getName(),
queueDuplicates,
null,
null,
visibilitySource,
workspace,
true,
priority,
user,
authorizations
);
}
public Vertex importFile(
File f,
String originalFilename,
boolean queueDuplicates,
String conceptId,
ClientApiImportProperty[] properties,
String visibilitySource,
Workspace workspace,
boolean findExistingByFileHash,
Priority priority,
User user,
Authorizations authorizations
) throws Exception {
Vertex vertex;
ensureInitialized();
String hash = calculateFileHash(f);
if (findExistingByFileHash) {
vertex = findExistingVertexWithHash(hash, authorizations);
if (vertex != null) {
LOGGER.debug("vertex already exists with hash %s", hash);
if (queueDuplicates) {
LOGGER.debug(
"pushing %s on to %s queue",
vertex.getId(),
workQueueNames.getGraphPropertyQueueName()
);
if (workspace != null) {
workspaceRepository.updateEntityOnWorkspace(
workspace,
vertex.getId(),
user
);
workQueueRepository.broadcastElement(vertex, workspace.getWorkspaceId());
workQueueRepository.pushGraphPropertyQueue(
vertex,
MULTI_VALUE_KEY,
VisalloProperties.RAW.getPropertyName(),
workspace.getWorkspaceId(),
visibilitySource,
priority
);
} else {
workQueueRepository.pushGraphPropertyQueue(
vertex,
MULTI_VALUE_KEY,
VisalloProperties.RAW.getPropertyName(),
priority
);
}
}
return vertex;
}
}
List<FileImportSupportingFileHandler.AddSupportingFilesResult> addSupportingFilesResults = new ArrayList<>();
try (FileInputStream fileInputStream = new FileInputStream(f)) {
JSONObject metadataJson = loadMetadataJson(f);
String predefinedId = null;
if (metadataJson != null) {
predefinedId = metadataJson.optString("id", null);
String metadataVisibilitySource = metadataJson.optString("visibilitySource", null);
if (metadataVisibilitySource != null) {
visibilitySource = metadataVisibilitySource;
}
}
StreamingPropertyValue rawValue = StreamingPropertyValue.create(fileInputStream, byte[].class);
rawValue.searchIndex(false);
Date modifiedDate = new Date();
VisibilityJson visibilityJson = VisibilityJson.updateVisibilitySourceAndAddWorkspaceId(null, visibilitySource, workspace == null ? null : workspace.getWorkspaceId());
VisalloVisibility visalloVisibility = this.visibilityTranslator.toVisibility(visibilityJson);
Visibility visibility = visalloVisibility.getVisibility();
PropertyMetadata propertyMetadata = new PropertyMetadata(modifiedDate, user, 0.1, visibilityJson, visibility);
Visibility defaultVisibility = visibilityTranslator.getDefaultVisibility();
VisibilityJson defaultVisibilityJson = new VisibilityJson(defaultVisibility.getVisibilityString());
PropertyMetadata defaultPropertyMetadata = new PropertyMetadata(modifiedDate, user, defaultVisibilityJson, defaultVisibility);
VertexBuilder vertexBuilder;
if (predefinedId == null) {
vertexBuilder = this.graph.prepareVertex(visibility);
} else {
vertexBuilder = this.graph.prepareVertex(predefinedId, visibility);
}
List<VisalloPropertyUpdate> changedProperties = new ArrayList<>();
VisalloProperties.RAW.updateProperty(changedProperties, null, vertexBuilder, rawValue, defaultPropertyMetadata);
VisalloProperties.CONTENT_HASH.updateProperty(changedProperties, null, vertexBuilder, MULTI_VALUE_KEY, hash, defaultPropertyMetadata);
String fileName = Strings.isNullOrEmpty(originalFilename) ? f.getName() : originalFilename;
VisalloProperties.FILE_NAME.updateProperty(changedProperties, null, vertexBuilder, MULTI_VALUE_KEY, fileName, propertyMetadata);
VisalloProperties.MODIFIED_DATE.updateProperty(
changedProperties,
null,
vertexBuilder,
new Date(f.lastModified()),
(Metadata) null,
defaultVisibility
);
VisalloProperties.MODIFIED_BY.updateProperty(
changedProperties,
null,
vertexBuilder,
user.getUserId(),
(Metadata) null,
defaultVisibility
);
VisalloProperties.VISIBILITY_JSON.updateProperty(
changedProperties,
null,
vertexBuilder,
visibilityJson,
(Metadata) null,
defaultVisibility
);
if (conceptId != null) {
VisalloProperties.CONCEPT_TYPE.updateProperty(
changedProperties,
null,
vertexBuilder,
conceptId,
(Metadata) null,
defaultVisibility
);
}
if (properties != null) {
addProperties(properties, changedProperties, vertexBuilder, visibilityJson, workspace, user);
}
for (FileImportSupportingFileHandler fileImportSupportingFileHandler : this.fileImportSupportingFileHandlers) {
FileImportSupportingFileHandler.AddSupportingFilesResult addSupportingFilesResult = fileImportSupportingFileHandler.addSupportingFiles(vertexBuilder, f, visibility);
if (addSupportingFilesResult != null) {
addSupportingFilesResults.add(addSupportingFilesResult);
}
}
vertex = vertexBuilder.save(authorizations);
for (PostFileImportHandler postFileImportHandler : this.postFileImportHandlers) {
postFileImportHandler.handle(graph, vertex, changedProperties, workspace, propertyMetadata, visibility, user, authorizations);
}
graph.flush();
String workspaceId = null;
if (workspace != null) {
workspaceRepository.updateEntityOnWorkspace(workspace, vertex.getId(), user);
workspaceId = workspace.getWorkspaceId();
}
LOGGER.debug("File %s imported. vertex id: %s", f.getAbsolutePath(), vertex.getId());
LOGGER.debug("pushing %s on to %s queue", vertex.getId(), workQueueNames.getGraphPropertyQueueName());
this.workQueueRepository.broadcastElement(vertex, workspaceId);
this.workQueueRepository.pushGraphVisalloPropertyQueue(
vertex,
changedProperties,
workspace == null ? null : workspace.getWorkspaceId(),
visibilitySource,
priority
);
return vertex;
} finally {
for (FileImportSupportingFileHandler.AddSupportingFilesResult addSupportingFilesResult : addSupportingFilesResults) {
addSupportingFilesResult.close();
}
}
}
private void addProperties(ClientApiImportProperty[] properties, List<VisalloPropertyUpdate> changedProperties, VertexBuilder vertexBuilder, VisibilityJson visibilityJson, Workspace workspace, User user) throws ParseException {
for (ClientApiImportProperty property : properties) {
OntologyProperty ontologyProperty = ontologyRepository.getPropertyByIRI(property.getName(), workspace.getWorkspaceId());
if (ontologyProperty == null) {
ontologyProperty = ontologyRepository.getRequiredPropertyByIntent(property.getName(), workspace.getWorkspaceId());
}
Object value = ontologyProperty.convertString(property.getValue());
VisalloProperty prop = ontologyProperty.getVisalloProperty();
PropertyMetadata propMetadata = new PropertyMetadata(user, visibilityJson, visibilityTranslator.getDefaultVisibility());
for (Map.Entry<String, Object> metadataEntry : property.getMetadata().entrySet()) {
propMetadata.add(metadataEntry.getKey(), metadataEntry.getValue(), visibilityTranslator.getDefaultVisibility());
}
//noinspection unchecked
prop.updateProperty(changedProperties, null, vertexBuilder, property.getKey(), value, propMetadata);
}
}
public List<Vertex> importVertices(
Workspace workspace,
List<FileOptions> files,
Priority priority,
boolean addToWorkspace,
boolean findExistingByFileHash,
User user,
Authorizations authorizations
) throws Exception {
ensureInitialized();
List<Vertex> vertices = new ArrayList<>();
for (FileOptions file : files) {
if (isSupportingFile(file.getFile())) {
LOGGER.debug("Skipping file: %s (supporting file)", file.getFile().getAbsolutePath());
continue;
}
LOGGER.debug("Processing file: %s", file.getFile().getAbsolutePath());
Vertex vertex = importFile(
file.getFile(),
file.getOriginalFilename(),
true,
file.getConceptId(),
file.getProperties(),
file.getVisibilitySource(),
workspace,
findExistingByFileHash,
priority,
user,
authorizations
);
vertices.add(vertex);
}
return vertices;
}
private JSONObject loadMetadataJson(File f) throws IOException {
File metadataFile = MetadataFileImportSupportingFileHandler.getMetadataFile(f);
if (metadataFile.exists()) {
try (FileInputStream in = new FileInputStream(metadataFile)) {
String fileContents = IOUtils.toString(in);
return new JSONObject(fileContents);
}
}
return null;
}
private void ensureInitialized() {
if (fileImportSupportingFileHandlers == null) {
fileImportSupportingFileHandlers = getFileImportSupportingFileHandlers();
}
if (postFileImportHandlers == null) {
postFileImportHandlers = getPostFileImportHandlers();
}
}
protected List<PostFileImportHandler> getPostFileImportHandlers() {
return toList(ServiceLoaderUtil.load(PostFileImportHandler.class, this.configuration));
}
protected List<FileImportSupportingFileHandler> getFileImportSupportingFileHandlers() {
return toList(ServiceLoaderUtil.load(FileImportSupportingFileHandler.class, this.configuration));
}
private Vertex findExistingVertexWithHash(String hash, Authorizations authorizations) {
Iterator<Vertex> existingVertices = this.graph.query(authorizations)
.has(VisalloProperties.CONTENT_HASH.getPropertyName(), hash)
.vertices()
.iterator();
if (existingVertices.hasNext()) {
return existingVertices.next();
}
return null;
}
private String calculateFileHash(File f) throws IOException {
try (FileInputStream fileInputStream = new FileInputStream(f)) {
return RowKeyHelper.buildSHA256KeyString(fileInputStream);
}
}
public static class FileOptions {
private File file;
private String originalFilename;
private String visibilitySource;
private String conceptId;
private ClientApiImportProperty[] properties;
public File getFile() {
return file;
}
public void setFile(File file) {
this.file = file;
}
public String getOriginalFilename() {
return originalFilename;
}
public void setOriginalFilename(String originalFilename) {
this.originalFilename = originalFilename;
}
public String getConceptId() {
return conceptId;
}
public void setConceptId(String conceptId) {
this.conceptId = conceptId;
}
public String getVisibilitySource() {
return visibilitySource;
}
public void setVisibilitySource(String visibilitySource) {
this.visibilitySource = visibilitySource;
}
public void setProperties(ClientApiImportProperty[] properties) {
this.properties = properties;
}
public ClientApiImportProperty[] getProperties() {
return properties;
}
}
}
|
|
package training.nuttyyokel.controller;
import org.junit.Assert;
import org.junit.Test;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import training.nuttyyokel.IntegrationTestParent;
import training.nuttyyokel.builders.TreeBuilder;
import training.nuttyyokel.builders.TreeRequestResponseBuilder;
import training.nuttyyokel.builders.TreeUpdateRequestBuilder;
import training.nuttyyokel.dto.FieldErrorResponse;
import training.nuttyyokel.dto.GenericResponse;
import training.nuttyyokel.dto.tree.TreeRequestResponse;
import training.nuttyyokel.dto.tree.TreeSaveResponse;
import training.nuttyyokel.dto.tree.TreeUpdateRequest;
import javax.transaction.Transactional;
import java.text.DateFormat;
import java.util.Arrays;
import static org.hamcrest.Matchers.*;
public class TreeControllerTest extends IntegrationTestParent {
private static final int MAX_HEALTH = 9;
private static final int MAX_HEIGHT = 100000;
private static final String MESSAGE_TOO_SMALL = "must be greater than or equal to 1";
private static final String MESSAGE_HEALTH_TOO_BIG = "must be less than or equal to " + MAX_HEALTH;
private static final String MESSAGE_HEIGHT_TOO_BIG = "must be less than or equal to " + MAX_HEIGHT;
private static final String MESSAGE_MAY_NOT_BE_NULL = "may not be null";
private static final String FIELD_HEIGHT = "height";
private static final String FIELD_HEALTH = "health";
private static final String FIELD_NAME = "name";
private static final String FIELD_TYPE = "type";
private static final String FIELD_DATE_PLANTED = "datePlanted";
private static final String NAME_OTHER = "Jake";
@Test
public void testGetAll_whenEmpty_resultsEmptyList() {
ResponseEntity<Object[]> result = getRestTemplate()
.getForEntity(getWebPath().base().tree().build(), Object[].class);
Assert.assertEquals(HttpStatus.OK, result.getStatusCode());
Assert.assertEquals(0, result.getBody().length);
}
@Test
@Transactional
public void testGetAll_whenFilled_resultsPopulatedList() {
createTree();
createTree(NAME_OTHER);
ResponseEntity<Object[]> result = getRestTemplate()
.getForEntity(getWebPath().base().tree().build(), Object[].class);
Assert.assertThat(result.getStatusCode(), is(HttpStatus.OK));
Assert.assertEquals(2, result.getBody().length);
}
@Test
public void testGetTree_whenEmpty_resultsEmptyResponse() {
String path = getWebPath().base().tree(0).build();
ResponseEntity<GenericResponse> result = getRestTemplate().getForEntity(path, GenericResponse.class);
Assert.assertEquals(HttpStatus.BAD_REQUEST, result.getStatusCode());
Assert.assertEquals(TreeController.MESSAGE_INVALID_ID, result.getBody().getMessage());
}
@Test
@Transactional
public void testGetTree_whenExists_resultsRightTree() {
int id = ((TreeSaveResponse) createTree().getBody()).getId();
String path = getWebPath().base().tree(id).build();
ResponseEntity<TreeRequestResponse> result = getRestTemplate().getForEntity(path, TreeRequestResponse.class);
DateFormat format = DateFormat.getDateInstance();
Assert.assertEquals(HttpStatus.OK, result.getStatusCode());
Assert.assertEquals(TreeBuilder.NAME, result.getBody().getName());
Assert.assertEquals(TreeBuilder.TYPE, result.getBody().getType());
Assert.assertEquals(format.format(TreeBuilder.DATE_PLANTED), format.format(result.getBody().getDatePlanted()));
Assert.assertEquals(TreeBuilder.HEALTH, result.getBody().getHealth());
Assert.assertEquals(TreeBuilder.HEIGHT, result.getBody().getHeight(), 0);
}
@Test
@Transactional
public void testSave_whenValid_resultsSuccess() {
ResponseEntity result = createTree();
Assert.assertEquals(HttpStatus.OK, result.getStatusCode());
Assert.assertThat(((TreeSaveResponse) result.getBody()).getId(), greaterThanOrEqualTo(0));
}
@Test
@Transactional
public void testSave_whenDuplicateName_resultsError() {
createTree();
ResponseEntity result = createTree();
Assert.assertEquals(HttpStatus.BAD_REQUEST, result.getStatusCode());
Assert.assertEquals(TreeController.MESSAGE_INVALID_FIELD, ((GenericResponse) result.getBody()).getMessage());
Assert.assertTrue(((FieldErrorResponse) result.getBody()).getField().contains(FIELD_NAME));
}
@Test
@Transactional
public void testSave_whenEmptyFields_resultsError() {
TreeRequestResponse treeRequestResponse = new TreeRequestResponseBuilder()
.emptyTreeRequestResponseBuilder()
.build();
ResponseEntity<FieldErrorResponse[]> result = getRestTemplate()
.postForEntity(getWebPath().base().tree().build(), treeRequestResponse, FieldErrorResponse[].class);
Assert.assertEquals(HttpStatus.BAD_REQUEST, result.getStatusCode());
Assert.assertThat(Arrays.asList(result.getBody()), containsInAnyOrder(
new FieldErrorResponse(MESSAGE_TOO_SMALL, FIELD_HEIGHT),
new FieldErrorResponse(MESSAGE_TOO_SMALL, FIELD_HEALTH),
new FieldErrorResponse(MESSAGE_MAY_NOT_BE_NULL, FIELD_DATE_PLANTED),
new FieldErrorResponse(MESSAGE_MAY_NOT_BE_NULL, FIELD_NAME),
new FieldErrorResponse(MESSAGE_MAY_NOT_BE_NULL, FIELD_TYPE)
));
}
@Test
@Transactional
public void testSave_whenTooBigFields_resultsError() {
TreeRequestResponse treeRequestResponse = new TreeRequestResponseBuilder()
.normalTreeRequestResponseBuilder()
.setHealth(MAX_HEALTH + 1)
.setHeight(MAX_HEIGHT + 1)
.build();
ResponseEntity<FieldErrorResponse[]> result = getRestTemplate()
.postForEntity(getWebPath().base().tree().build(), treeRequestResponse, FieldErrorResponse[].class);
Assert.assertEquals(HttpStatus.BAD_REQUEST, result.getStatusCode());
Assert.assertThat(Arrays.asList(result.getBody()), containsInAnyOrder(
new FieldErrorResponse(MESSAGE_HEIGHT_TOO_BIG, FIELD_HEIGHT),
new FieldErrorResponse(MESSAGE_HEALTH_TOO_BIG, FIELD_HEALTH)
));
}
@Test
@Transactional
public void testUpdate_whenValid_resultsSuccess() {
int id = ((TreeSaveResponse) createTree().getBody()).getId();
TreeUpdateRequest updateRequest = new TreeUpdateRequestBuilder().normalTreeUpdateRequest().build();
ResponseEntity<GenericResponse> result = getRestTemplate()
.postForEntity(getWebPath().base().tree(id).build(), updateRequest, GenericResponse.class);
Assert.assertEquals(HttpStatus.OK, result.getStatusCode());
Assert.assertThat(result.getBody().getStatus(), is(HttpStatus.OK));
Assert.assertThat(result.getBody().getMessage(), is(TreeController.MESSAGE_SUCCESS));
}
@Test
@Transactional
public void testUpdate_whenInvalidId_resultsErrorMessage() {
int id = ((TreeSaveResponse) createTree().getBody()).getId();
TreeUpdateRequest updateRequest = new TreeUpdateRequestBuilder().normalTreeUpdateRequest().build();
ResponseEntity<GenericResponse> result = getRestTemplate()
.postForEntity(getWebPath().base().tree(id + 1).build(), updateRequest, GenericResponse.class);
Assert.assertEquals(HttpStatus.BAD_REQUEST, result.getStatusCode());
Assert.assertThat(result.getBody().getStatus(), is(HttpStatus.BAD_REQUEST));
Assert.assertThat(result.getBody().getMessage(), is(TreeController.MESSAGE_INVALID_ID));
}
@Test
@Transactional
public void testUpdate_whenValuesTooSmall_resultsFieldErrorList() {
int id = ((TreeSaveResponse) createTree().getBody()).getId();
TreeUpdateRequest updateRequest = new TreeUpdateRequestBuilder().emptyTreeUpdateRequest().build();
ResponseEntity<FieldErrorResponse[]> result = getRestTemplate()
.postForEntity(getWebPath().base().tree(id).build(), updateRequest, FieldErrorResponse[].class);
Assert.assertEquals(HttpStatus.BAD_REQUEST, result.getStatusCode());
Assert.assertEquals(2, result.getBody().length);
Assert.assertThat(Arrays.asList(result.getBody()), containsInAnyOrder(
new FieldErrorResponse(MESSAGE_TOO_SMALL, FIELD_HEIGHT),
new FieldErrorResponse(MESSAGE_TOO_SMALL, FIELD_HEALTH)
));
}
@Test
@Transactional
public void testUpdate_whenValuesTooBig_resultsFieldErrorList() {
int id = ((TreeSaveResponse) createTree().getBody()).getId();
TreeUpdateRequest updateRequest = new TreeUpdateRequestBuilder()
.emptyTreeUpdateRequest()
.setHealth(MAX_HEALTH + 1)
.setHeight(MAX_HEIGHT + 1)
.build();
ResponseEntity<FieldErrorResponse[]> result = getRestTemplate()
.postForEntity(getWebPath().base().tree(id).build(), updateRequest, FieldErrorResponse[].class);
Assert.assertEquals(HttpStatus.BAD_REQUEST, result.getStatusCode());
Assert.assertEquals(2, result.getBody().length);
Assert.assertThat(Arrays.asList(result.getBody()), containsInAnyOrder(
new FieldErrorResponse(MESSAGE_HEIGHT_TOO_BIG, FIELD_HEIGHT),
new FieldErrorResponse(MESSAGE_HEALTH_TOO_BIG, FIELD_HEALTH)
));
}
@Test
@Transactional
public void testDelete_whenValid_resultsSuccess() {
int id = ((TreeSaveResponse) createTree().getBody()).getId();
String path = getWebPath().base().tree(id).build();
ResponseEntity<GenericResponse> result = getRestTemplate().exchange(path, HttpMethod.DELETE, null, GenericResponse.class);
Assert.assertEquals(HttpStatus.OK, result.getStatusCode());
Assert.assertThat(result.getBody().getStatus(), is(HttpStatus.OK));
Assert.assertThat(result.getBody().getMessage(), is(TreeController.MESSAGE_SUCCESS));
}
@Test
@Transactional
public void testDelete_whenInvalidId_resultsError() {
String path = getWebPath().base().tree(0).build();
ResponseEntity<GenericResponse> result = getRestTemplate().exchange(path, HttpMethod.DELETE, null, GenericResponse.class);
Assert.assertEquals(HttpStatus.BAD_REQUEST, result.getStatusCode());
Assert.assertEquals(TreeController.MESSAGE_INVALID_ID, result.getBody().getMessage());
}
private ResponseEntity<? extends GenericResponse> createTree(String... name) {
String finalName = (name == null || name.length < 1) ? TreeBuilder.NAME : name[0];
String path = getWebPath().base().tree().build();
TreeRequestResponse treeRequestResponse = new TreeRequestResponseBuilder()
.normalTreeRequestResponseBuilder()
.setName(finalName)
.build();
ResponseEntity<? extends GenericResponse> result = getRestTemplate()
.postForEntity(path, treeRequestResponse, TreeSaveResponse.class);
if (result.getStatusCode() != HttpStatus.OK) {
result = getRestTemplate().postForEntity(path, treeRequestResponse, FieldErrorResponse.class);
}
return result;
}
}
|
|
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iot.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Information about a certificate.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Certificate implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The ARN of the certificate.
* </p>
*/
private String certificateArn;
/**
* <p>
* The ID of the certificate.
* </p>
*/
private String certificateId;
/**
* <p>
* The status of the certificate.
* </p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* </p>
*/
private String status;
/**
* <p>
* The date and time the certificate was created.
* </p>
*/
private java.util.Date creationDate;
/**
* <p>
* The ARN of the certificate.
* </p>
*
* @param certificateArn
* The ARN of the certificate.
*/
public void setCertificateArn(String certificateArn) {
this.certificateArn = certificateArn;
}
/**
* <p>
* The ARN of the certificate.
* </p>
*
* @return The ARN of the certificate.
*/
public String getCertificateArn() {
return this.certificateArn;
}
/**
* <p>
* The ARN of the certificate.
* </p>
*
* @param certificateArn
* The ARN of the certificate.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Certificate withCertificateArn(String certificateArn) {
setCertificateArn(certificateArn);
return this;
}
/**
* <p>
* The ID of the certificate.
* </p>
*
* @param certificateId
* The ID of the certificate.
*/
public void setCertificateId(String certificateId) {
this.certificateId = certificateId;
}
/**
* <p>
* The ID of the certificate.
* </p>
*
* @return The ID of the certificate.
*/
public String getCertificateId() {
return this.certificateId;
}
/**
* <p>
* The ID of the certificate.
* </p>
*
* @param certificateId
* The ID of the certificate.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Certificate withCertificateId(String certificateId) {
setCertificateId(certificateId);
return this;
}
/**
* <p>
* The status of the certificate.
* </p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* </p>
*
* @param status
* The status of the certificate.</p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* @see CertificateStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* The status of the certificate.
* </p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* </p>
*
* @return The status of the certificate.</p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* @see CertificateStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* The status of the certificate.
* </p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* </p>
*
* @param status
* The status of the certificate.</p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* @return Returns a reference to this object so that method calls can be chained together.
* @see CertificateStatus
*/
public Certificate withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* The status of the certificate.
* </p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* </p>
*
* @param status
* The status of the certificate.</p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* @see CertificateStatus
*/
public void setStatus(CertificateStatus status) {
this.status = status.toString();
}
/**
* <p>
* The status of the certificate.
* </p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* </p>
*
* @param status
* The status of the certificate.</p>
* <p>
* The status value REGISTER_INACTIVE is deprecated and should not be used.
* @return Returns a reference to this object so that method calls can be chained together.
* @see CertificateStatus
*/
public Certificate withStatus(CertificateStatus status) {
setStatus(status);
return this;
}
/**
* <p>
* The date and time the certificate was created.
* </p>
*
* @param creationDate
* The date and time the certificate was created.
*/
public void setCreationDate(java.util.Date creationDate) {
this.creationDate = creationDate;
}
/**
* <p>
* The date and time the certificate was created.
* </p>
*
* @return The date and time the certificate was created.
*/
public java.util.Date getCreationDate() {
return this.creationDate;
}
/**
* <p>
* The date and time the certificate was created.
* </p>
*
* @param creationDate
* The date and time the certificate was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Certificate withCreationDate(java.util.Date creationDate) {
setCreationDate(creationDate);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCertificateArn() != null)
sb.append("CertificateArn: ").append(getCertificateArn()).append(",");
if (getCertificateId() != null)
sb.append("CertificateId: ").append(getCertificateId()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getCreationDate() != null)
sb.append("CreationDate: ").append(getCreationDate());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Certificate == false)
return false;
Certificate other = (Certificate) obj;
if (other.getCertificateArn() == null ^ this.getCertificateArn() == null)
return false;
if (other.getCertificateArn() != null && other.getCertificateArn().equals(this.getCertificateArn()) == false)
return false;
if (other.getCertificateId() == null ^ this.getCertificateId() == null)
return false;
if (other.getCertificateId() != null && other.getCertificateId().equals(this.getCertificateId()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getCreationDate() == null ^ this.getCreationDate() == null)
return false;
if (other.getCreationDate() != null && other.getCreationDate().equals(this.getCreationDate()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCertificateArn() == null) ? 0 : getCertificateArn().hashCode());
hashCode = prime * hashCode + ((getCertificateId() == null) ? 0 : getCertificateId().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getCreationDate() == null) ? 0 : getCreationDate().hashCode());
return hashCode;
}
@Override
public Certificate clone() {
try {
return (Certificate) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.iot.model.transform.CertificateMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
/*******************************************************************************
* Copyright 2002-2012, OpenNebula Project Leads (OpenNebula.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.opennebula.client;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import org.apache.xmlrpc.XmlRpcException;
import org.apache.xmlrpc.client.XmlRpcClient;
import org.apache.xmlrpc.client.XmlRpcClientConfigImpl;
/**
* This class represents the connection with the core and handles the
* xml-rpc calls.
*
*/
public class Client{
//--------------------------------------------------------------------------
// PUBLIC INTERFACE
//--------------------------------------------------------------------------
/**
* Creates a new xml-rpc client with default options: the auth. file will be
* assumed to be at $ONE_AUTH, and the endpoint will be set to $ONE_XMLRPC. <br/>
* It is the equivalent of Client(null, null).
*
* @throws ClientConfigurationException
* if the default configuration options are invalid.
*/
public Client() throws ClientConfigurationException
{
setOneAuth(null);
setOneEndPoint(null);
}
/**
* Creates a new xml-rpc client with specified options.
*
* @param secret
* A string containing the ONE user:password tuple. Can be null
* @param endpoint
* Where the rpc server is listening, must be something like
* "http://localhost:2633/RPC2". Can be null
* @throws ClientConfigurationException
* if the configuration options are invalid
*/
public Client(String secret, String endpoint)
throws ClientConfigurationException
{
setOneAuth(secret);
setOneEndPoint(endpoint);
}
/**
* Performs an XML-RPC call.
*
* @param action ONE action
* @param args ONE arguments
* @return The server's xml-rpc response encapsulated
*/
public OneResponse call(String action, Object...args)
{
boolean success = false;
String msg = null;
try
{
Object[] params = new Object[args.length + 1];
params[0] = oneAuth;
for(int i=0; i<args.length; i++)
params[i+1] = args[i];
Object[] result = (Object[]) client.execute("one."+action, params);
success = (Boolean) result[0];
// In some cases, the xml-rpc response only has a boolean
// OUT parameter
if(result.length > 1)
{
try
{
msg = (String) result[1];
}
catch (ClassCastException e)
{
// The result may be an Integer
msg = ((Integer) result[1]).toString();
}
}
}
catch (XmlRpcException e)
{
msg = e.getMessage();
}
return new OneResponse(success, msg);
}
//--------------------------------------------------------------------------
// PRIVATE ATTRIBUTES AND METHODS
//--------------------------------------------------------------------------
private String oneAuth;
private String oneEndPoint;
private XmlRpcClient client;
private void setOneAuth(String secret) throws ClientConfigurationException
{
oneAuth = secret;
try
{
if(oneAuth == null)
{
String oneAuthEnv = System.getenv("ONE_AUTH");
File authFile;
if ( oneAuthEnv != null && oneAuthEnv.length() != 0)
{
authFile = new File(oneAuthEnv);
}
else
{
authFile = new File(System.getenv("HOME")+"/.one/one_auth");
}
oneAuth =
(new BufferedReader(new FileReader(authFile))).readLine();
}
oneAuth = oneAuth.trim();
}
catch (FileNotFoundException e)
{
// This comes first, since it is a special case of IOException
throw new ClientConfigurationException("ONE_AUTH file not present");
}
catch (IOException e)
{
// You could have the file but for some reason the program can not
// read it
throw new ClientConfigurationException("ONE_AUTH file unreadable");
}
}
private void setOneEndPoint(String endpoint)
throws ClientConfigurationException
{
oneEndPoint = "http://localhost:2633/RPC2";
if(endpoint != null)
{
oneEndPoint = endpoint;
}
else
{
String oneXmlRpcEnv = System.getenv("ONE_XMLRPC");
if ( oneXmlRpcEnv != null && oneXmlRpcEnv.length() != 0 )
{
oneEndPoint = oneXmlRpcEnv;
}
}
XmlRpcClientConfigImpl config = new XmlRpcClientConfigImpl();
try
{
config.setServerURL(new URL(oneEndPoint));
}
catch (MalformedURLException e)
{
throw new ClientConfigurationException(
"The URL "+oneEndPoint+" is malformed.");
}
client = new XmlRpcClient();
client.setConfig(config);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph.failover;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.runtime.clusterframework.types.AllocationID;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.executiongraph.Execution;
import org.apache.flink.runtime.executiongraph.ExecutionGraph;
import org.apache.flink.runtime.executiongraph.ExecutionVertex;
import org.apache.flink.runtime.executiongraph.GlobalModVersionMismatch;
import org.apache.flink.runtime.jobgraph.JobStatus;
import org.apache.flink.runtime.jobmanager.scheduler.CoLocationGroup;
import org.apache.flink.runtime.jobmanager.scheduler.LocationPreferenceConstraint;
import org.apache.flink.util.AbstractID;
import org.apache.flink.util.FlinkException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* FailoverRegion manages the failover of a minimal pipeline connected sub graph.
* It will change from CREATED to CANCELING and then to CANCELLED and at last to RUNNING,
*/
public class FailoverRegion {
private static final AtomicReferenceFieldUpdater<FailoverRegion, JobStatus> STATE_UPDATER =
AtomicReferenceFieldUpdater.newUpdater(FailoverRegion.class, JobStatus.class, "state");
/** The log object used for debugging. */
private static final Logger LOG = LoggerFactory.getLogger(FailoverRegion.class);
// ------------------------------------------------------------------------
/** a unique id for debugging */
private final AbstractID id = new AbstractID();
private final ExecutionGraph executionGraph;
private final List<ExecutionVertex> connectedExecutionVertexes;
/** Current status of the job execution */
private volatile JobStatus state = JobStatus.RUNNING;
public FailoverRegion(
ExecutionGraph executionGraph,
List<ExecutionVertex> connectedExecutions) {
this.executionGraph = checkNotNull(executionGraph);
this.connectedExecutionVertexes = checkNotNull(connectedExecutions);
LOG.debug("Created failover region {} with vertices: {}", id, connectedExecutions);
}
public void onExecutionFail(Execution taskExecution, Throwable cause) {
// TODO: check if need to failover the preceding region
if (!executionGraph.getRestartStrategy().canRestart()) {
// delegate the failure to a global fail that will check the restart strategy and not restart
executionGraph.failGlobal(cause);
}
else {
cancel(taskExecution.getGlobalModVersion());
}
}
private void allVerticesInTerminalState(long globalModVersionOfFailover) {
while (true) {
JobStatus curStatus = this.state;
if (curStatus.equals(JobStatus.CANCELLING)) {
if (transitionState(curStatus, JobStatus.CANCELED)) {
reset(globalModVersionOfFailover);
break;
}
}
else {
LOG.info("FailoverRegion {} is {} when allVerticesInTerminalState.", id, state);
break;
}
}
}
public JobStatus getState() {
return state;
}
/**
* get all execution vertexes contained in this region
*/
public List<ExecutionVertex> getAllExecutionVertexes() {
return connectedExecutionVertexes;
}
// Notice the region to failover,
private void failover(long globalModVersionOfFailover) {
if (!executionGraph.getRestartStrategy().canRestart()) {
executionGraph.failGlobal(new FlinkException("RestartStrategy validate fail"));
}
else {
JobStatus curStatus = this.state;
if (curStatus.equals(JobStatus.RUNNING)) {
cancel(globalModVersionOfFailover);
}
else if (curStatus.equals(JobStatus.CANCELED)) {
reset(globalModVersionOfFailover);
}
else {
LOG.info("FailoverRegion {} is {} when notified to failover.", id, state);
}
}
}
// cancel all executions in this sub graph
private void cancel(final long globalModVersionOfFailover) {
executionGraph.getJobMasterMainThreadExecutor().assertRunningInMainThread();
while (true) {
JobStatus curStatus = this.state;
if (curStatus.equals(JobStatus.RUNNING)) {
if (transitionState(curStatus, JobStatus.CANCELLING)) {
createTerminationFutureOverAllConnectedVertexes()
.thenAccept((nullptr) -> allVerticesInTerminalState(globalModVersionOfFailover));
break;
}
} else {
LOG.info("FailoverRegion {} is {} when cancel.", id, state);
break;
}
}
}
@VisibleForTesting
protected CompletableFuture<Void> createTerminationFutureOverAllConnectedVertexes() {
// we build a future that is complete once all vertices have reached a terminal state
final ArrayList<CompletableFuture<?>> futures = new ArrayList<>(connectedExecutionVertexes.size());
// cancel all tasks (that still need cancelling)
for (ExecutionVertex vertex : connectedExecutionVertexes) {
futures.add(vertex.cancel());
}
return FutureUtils.waitForAll(futures);
}
// reset all executions in this sub graph
private void reset(long globalModVersionOfFailover) {
try {
// reset all connected ExecutionVertexes
final Collection<CoLocationGroup> colGroups = new HashSet<>();
final long restartTimestamp = System.currentTimeMillis();
for (ExecutionVertex ev : connectedExecutionVertexes) {
CoLocationGroup cgroup = ev.getJobVertex().getCoLocationGroup();
if (cgroup != null && !colGroups.contains(cgroup)){
cgroup.resetConstraints();
colGroups.add(cgroup);
}
ev.resetForNewExecution(restartTimestamp, globalModVersionOfFailover);
}
if (transitionState(JobStatus.CANCELED, JobStatus.CREATED)) {
restart(globalModVersionOfFailover);
}
else {
LOG.info("FailoverRegion {} switched from CANCELLING to CREATED fail, will fail this region again.", id);
failover(globalModVersionOfFailover);
}
}
catch (GlobalModVersionMismatch e) {
// happens when a global recovery happens concurrently to the regional recovery
// go back to a clean state
state = JobStatus.RUNNING;
}
catch (Throwable e) {
LOG.info("FailoverRegion {} reset fail, will failover again.", id);
failover(globalModVersionOfFailover);
}
}
// restart all executions in this sub graph
private void restart(long globalModVersionOfFailover) {
try {
if (transitionState(JobStatus.CREATED, JobStatus.RUNNING)) {
// if we have checkpointed state, reload it into the executions
//TODO: checkpoint support restore part ExecutionVertex cp
/**
if (executionGraph.getCheckpointCoordinator() != null) {
executionGraph.getCheckpointCoordinator().restoreLatestCheckpointedState(
connectedExecutionVertexes, false, false);
}
*/
HashSet<AllocationID> previousAllocationsInRegion = new HashSet<>(connectedExecutionVertexes.size());
for (ExecutionVertex connectedExecutionVertex : connectedExecutionVertexes) {
AllocationID latestPriorAllocation = connectedExecutionVertex.getLatestPriorAllocation();
if (latestPriorAllocation != null) {
previousAllocationsInRegion.add(latestPriorAllocation);
}
}
//TODO, use restart strategy to schedule them.
//restart all connected ExecutionVertexes
for (ExecutionVertex ev : connectedExecutionVertexes) {
try {
ev.scheduleForExecution(
executionGraph.getSlotProvider(),
executionGraph.isQueuedSchedulingAllowed(),
LocationPreferenceConstraint.ANY,
previousAllocationsInRegion); // some inputs not belonging to the failover region might have failed concurrently
}
catch (Throwable e) {
failover(globalModVersionOfFailover);
}
}
}
else {
LOG.info("FailoverRegion {} switched from CREATED to RUNNING fail, will fail this region again.", id);
failover(globalModVersionOfFailover);
}
} catch (Exception e) {
LOG.info("FailoverRegion {} restart failed, failover again.", id, e);
failover(globalModVersionOfFailover);
}
}
private boolean transitionState(JobStatus current, JobStatus newState) {
if (STATE_UPDATER.compareAndSet(this, current, newState)) {
LOG.info("FailoverRegion {} switched from state {} to {}.", id, current, newState);
return true;
}
else {
return false;
}
}
}
|
|
/*
* MIT License
*
* Copyright (c) 2016-2022 EPAM Systems
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.epam.catgenome.manager.vcf;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import com.epam.catgenome.component.MessageHelper;
import com.epam.catgenome.constant.MessagesConstants;
import com.epam.catgenome.controller.vo.ga4gh.VariantGA4GH;
import com.epam.catgenome.exception.Ga4ghResourceUnavailableException;
import com.epam.catgenome.manager.gene.GeneTrackManager;
import com.epam.catgenome.manager.vcf.reader.VcfGa4ghReader;
import com.epam.catgenome.util.feature.reader.EhCacheBasedIndexCache;
import htsjdk.tribble.TribbleException;
import org.codehaus.jettison.json.JSONObject;
import org.eclipse.jetty.server.Server;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.Spy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.core.io.Resource;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import com.epam.catgenome.common.AbstractManagerTest;
import com.epam.catgenome.controller.util.UrlTestingUtils;
import com.epam.catgenome.controller.vo.Query2TrackConverter;
import com.epam.catgenome.controller.vo.TrackQuery;
import com.epam.catgenome.controller.vo.registration.FeatureIndexedFileRegistrationRequest;
import com.epam.catgenome.controller.vo.registration.ReferenceRegistrationRequest;
import com.epam.catgenome.dao.BiologicalDataItemDao;
import com.epam.catgenome.entity.BiologicalDataItem;
import com.epam.catgenome.entity.BiologicalDataItemResourceType;
import com.epam.catgenome.entity.gene.GeneFile;
import com.epam.catgenome.entity.reference.Chromosome;
import com.epam.catgenome.entity.reference.Reference;
import com.epam.catgenome.entity.track.Track;
import com.epam.catgenome.entity.track.TrackType;
import com.epam.catgenome.entity.vcf.Variation;
import com.epam.catgenome.entity.vcf.VariationQuery;
import com.epam.catgenome.entity.vcf.VariationType;
import com.epam.catgenome.entity.vcf.VcfFile;
import com.epam.catgenome.entity.vcf.VcfFilterInfo;
import com.epam.catgenome.entity.vcf.VcfSample;
import com.epam.catgenome.exception.ExternalDbUnavailableException;
import com.epam.catgenome.exception.VcfReadingException;
import com.epam.catgenome.helper.EntityHelper;
import com.epam.catgenome.manager.BiologicalDataItemManager;
import com.epam.catgenome.manager.DownloadFileManager;
import com.epam.catgenome.manager.FeatureIndexManager;
import com.epam.catgenome.manager.FileManager;
import com.epam.catgenome.manager.TrackHelper;
import com.epam.catgenome.manager.externaldb.HttpDataManager;
import com.epam.catgenome.manager.externaldb.ParameterNameValue;
import com.epam.catgenome.manager.gene.GffManager;
import com.epam.catgenome.manager.reference.ReferenceGenomeManager;
import com.epam.catgenome.manager.reference.ReferenceManager;
import com.epam.catgenome.util.Utils;
/**
* Source: VcfManagerTest.java
* Created: 22/10/15, 1:46 PM
* Project: CATGenome Browser
* Make: IntelliJ IDEA 14.1.4, JDK 1.8
* <p>
* A test for VcfManager class
* </p>
*
* @author Mikhail Miroliubov
*/
@SuppressWarnings("PMD.UnusedPrivateField")
@RunWith(SpringJUnit4ClassRunner.class)
@TestPropertySource("classpath:test-catgenome.properties")
@ContextConfiguration({"classpath:applicationContext-test.xml"})
@Transactional(propagation = Propagation.REQUIRES_NEW, rollbackFor = Exception.class)
public class VcfManagerTest extends AbstractManagerTest {
private static final String CLASSPATH_TEMPLATES_FELIS_CATUS_VCF = "classpath:templates/Felis_catus.vcf";
private static final String CLASSPATH_TEMPLATES_SAMPLES_VCF = "classpath:templates/samples.vcf";
private static final String CLASSPATH_TEMPLATES_FELIS_CATUS_VCF_COMPRESSED = "classpath:templates/Felis_catus.vcf" +
".gz";
private static final String CLASSPATH_TEMPLATES_FELIS_CATUS_VCF_GOOGLE = "classpath:templates/1000-genomes.chrMT" +
".vcf";
private static final String HTTP_VCF = "http://localhost/vcf/BK0010_S12.vcf";
private static final String NA_19238 = "NA19238";
private static final String PRETTY_NAME = "pretty";
private static final String SAMPLE_PRETTY_NAME = "Sample1";
private static final int TEST_END_INDEX = 187708306;
private static final double TEST_SMALL_SCALE_FACTOR = 0.000007682737;
private static final int TEST_CHROMOSOME_SIZE = 239107476;
private static final int GENE_POSTION = 35471;
private static final String SAMPLE_NAME = "HG00702";
private static final int NUMBER_OF_FILTERS = 2;
private static final int NUMBER_OF_TRIVIAL_INFO = 18;
private static final int INDEX_BUFFER_SIZE = 32;
@Mock
private HttpDataManager httpDataManager;
@Spy
@Autowired
private VcfFileManager vcfFileManager;
@Spy
@Autowired
private TrackHelper trackHelper;
@Spy
@Autowired
private FileManager fileManager;
@Spy
@Autowired
private BiologicalDataItemManager biologicalDataItemManager;
@Spy
@Autowired
private FeatureIndexManager featureIndexManager;
@Spy
@Autowired
private ReferenceGenomeManager referenceGenomeManager;
@Spy
@Autowired
private DownloadFileManager downloadFileManager;
@Spy
@Autowired
private GeneTrackManager geneTrackManager;
@Autowired
private ReferenceManager referenceManager;
@Autowired
private BiologicalDataItemDao biologicalDataItemDao;
@Autowired
private GffManager gffManager;
@InjectMocks
private VcfManager vcfManager;
@Autowired
private ApplicationContext context;
@Spy
@Autowired(required = false)
private EhCacheBasedIndexCache indexCache;
@Value("${ga4gh.google.variantSetId}")
private String varSet;
@Value("${ga4gh.google.startPosition}")
private Integer start;
@Value("${ga4gh.google.endPosition}")
private Integer end;
@Value("${ga4gh.google.chrGA4GH}")
private String chrGA4GH;
@Value("${vcf.extended.info.patterns}")
private String infoTemplate;
private long referenceId;
private long referenceIdGA4GH;
private Reference testReference;
private Reference testReferenceGA4GH;
private Chromosome testChromosome;
private Chromosome testChrGA4GH;
private Logger logger = LoggerFactory.getLogger(VcfManagerTest.class);
@Before
public void setup() throws Exception {
MockitoAnnotations.initMocks(this);
Assert.assertNotNull(featureIndexManager);
Assert.assertNotNull(downloadFileManager);
Assert.assertNotNull(biologicalDataItemManager);
Assert.assertNotNull(fileManager);
Assert.assertNotNull(trackHelper);
Assert.assertNotNull(indexCache);
testChromosome = EntityHelper.createNewChromosome();
testChromosome.setSize(TEST_CHROMOSOME_SIZE);
testReference = EntityHelper.createNewReference(testChromosome, referenceGenomeManager.createReferenceId());
referenceGenomeManager.create(testReference);
referenceId = testReference.getId();
// create new chromosome and reference for ga4gh
testChrGA4GH = EntityHelper.createNewChromosome(chrGA4GH);
testChrGA4GH.setSize(TEST_CHROMOSOME_SIZE);
testReferenceGA4GH = EntityHelper.createNewReference(testChrGA4GH, referenceGenomeManager.createReferenceId());
testReferenceGA4GH.setType(BiologicalDataItemResourceType.GA4GH);
referenceGenomeManager.create(testReferenceGA4GH);
referenceIdGA4GH = testReferenceGA4GH.getId();
vcfManager.setExtendedInfoTemplates(infoTemplate);
vcfManager.setIndexBufferSize(INDEX_BUFFER_SIZE);
}
@Test
public void testSaveLoadVcfFile() throws IOException {
final VcfFile vcfFile = testSave(CLASSPATH_TEMPLATES_FELIS_CATUS_VCF);
final VcfFile file = vcfFileManager.load(vcfFile.getId());
Assert.assertNotNull(file);
Assert.assertEquals(PRETTY_NAME, file.getPrettyName());
testLoad(vcfFile, 1D, true);
}
@Test
public void testSaveLoadMultiSampleVcfFile() throws IOException {
final VcfFile vcfFile = testSave(CLASSPATH_TEMPLATES_SAMPLES_VCF);
final Map<String, String> aliases = new HashMap<>();
aliases.put("NA19239", "Sample1");
aliases.put("NA19238", "Sample2");
aliases.put("NA19240", "Sample3");
vcfFileManager.setVcfAliases(aliases, vcfFile.getId());
final VcfFile file = vcfFileManager.load(vcfFile.getId());
Assert.assertNotNull(file);
Assert.assertEquals(SAMPLE_PRETTY_NAME, file.getSamples().get(0).getPrettyName());
}
@Test
public void testSaveLoadVcfCompressedFile() throws IOException {
final VcfFile vcfFile = testSave(CLASSPATH_TEMPLATES_FELIS_CATUS_VCF_COMPRESSED);
final VcfFile file = vcfFileManager.load(vcfFile.getId());
Assert.assertNotNull(file);
testLoad(vcfFile, 1D, true);
}
/**
* Tests vcfFileManager.load() behaviour on small scale factors.
* Should return a number of variations having type STATISTIC and variationsCount > 1
*/
@Test
public void testLoadSmallScaleVcfFile() throws IOException {
VcfFile vcfFile = testSave(CLASSPATH_TEMPLATES_FELIS_CATUS_VCF);
VcfFile file = vcfFileManager.load(vcfFile.getId());
Assert.assertNotNull(file);
Track<Variation> trackResult = testLoad(vcfFile, TEST_SMALL_SCALE_FACTOR, true);
List<Variation> ambiguousVariations = trackResult.getBlocks().stream().filter((b) ->
b.getVariationsCount() != null && b.getVariationsCount() > 1).collect(Collectors.toList());
Assert.assertFalse(ambiguousVariations.isEmpty());
/// test not collapsed
trackResult = testLoad(vcfFile, TEST_SMALL_SCALE_FACTOR, true, false);
ambiguousVariations = trackResult.getBlocks().stream()
.filter((b) -> b.getVariationsCount() != null && b.getVariationsCount() > 1)
.collect(Collectors.toList());
Assert.assertTrue(ambiguousVariations.isEmpty());
}
@Test
public void testLoadSmallScaleVcfFileGa4GH() throws IOException, ExternalDbUnavailableException {
String fetchRes1 = readFile("GA4GH_id10473.json");
String fetchRes2 = readFile("GA4GH_id10473_variant.json");
Mockito.when(
httpDataManager.fetchData(Mockito.any(), Mockito.any(JSONObject.class)))
.thenReturn(fetchRes1)
.thenReturn(fetchRes2);
String fetchRes3 = readFile("GA4GH_id10473_param.json");
Mockito.when(
httpDataManager.fetchData(Mockito.any(), Mockito.any(ParameterNameValue[].class)))
.thenReturn(fetchRes3);
VcfFile vcfFileGA4GH = registerVcfGA4GH();
vcfFileGA4GH.setType(BiologicalDataItemResourceType.GA4GH);
List<VcfSample> vcfSamples = vcfFileGA4GH.getSamples();
Track<Variation> trackResult;
Long sampleId = 0L;
for (VcfSample sample : vcfSamples) {
if (sample.getName().equals(SAMPLE_NAME)) {
sampleId = sample.getId();
}
}
trackResult = testLoadGA4GH(vcfFileGA4GH, TEST_SMALL_SCALE_FACTOR, true, sampleId);
List<Variation> ambiguousVariations = trackResult.getBlocks().stream().filter((b) ->
b.getVariationsCount() != null && b.getVariationsCount() > 1).collect(Collectors.toList());
Assert.assertFalse(ambiguousVariations.isEmpty());
}
@Test
public void testLoadExtendedSummary() throws IOException {
VcfFile vcfFile = testSave("classpath:templates/samples.vcf");
VcfFile file = vcfFileManager.load(vcfFile.getId());
Assert.assertNotNull(file);
Track<Variation> trackResult = testLoad(file, 1D, true);
final VariationQuery query = new VariationQuery();
query.setId(vcfFile.getId());
query.setChromosomeId(testChromosome.getId());
query.setPosition(trackResult.getBlocks().get(0).getStartIndex());
Variation variation = vcfManager.loadVariation(query);
Assert.assertFalse(variation.getInfo().isEmpty());
Assert.assertFalse(variation.getGenotypeData().get(NA_19238).getInfo().isEmpty());
VcfFilterInfo filterInfo = vcfManager.getFiltersInfo(Collections.singleton(vcfFile.getId()));
Assert.assertFalse(filterInfo.getInfoItems().isEmpty());
Assert.assertFalse(filterInfo.getAvailableFilters().isEmpty());
Assert.assertFalse(filterInfo.getSamples().isEmpty());
// now add a project and try to fetch genes affected
vcfFile = testSave(CLASSPATH_TEMPLATES_FELIS_CATUS_VCF);
Resource resource = context.getResource("classpath:templates/genes_sorted.gtf");
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceId);
request.setPath(resource.getFile().getAbsolutePath());
GeneFile geneFile = gffManager.registerGeneFile(request);
Assert.assertNotNull(geneFile);
Assert.assertNotNull(geneFile.getId());
referenceGenomeManager.updateReferenceGeneFileId(testReference.getId(), geneFile.getId());
query.setId(vcfFile.getId());
query.setPosition(GENE_POSTION);
variation = vcfManager.loadVariation(query);
Assert.assertFalse(variation.getInfo().isEmpty());
Assert.assertNotNull(variation.getGeneNames());
Assert.assertFalse(variation.getGeneNames().isEmpty());
}
public VcfFile registerVcfGA4GH() {
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceIdGA4GH);
request.setType(BiologicalDataItemResourceType.GA4GH);
request.setPath(varSet);
VcfFile vcfFileGA4GH = vcfManager.registerVcfFile(request);
vcfFileGA4GH.setType(BiologicalDataItemResourceType.GA4GH);
return vcfFileGA4GH;
}
@Ignore
@Test
public void testAllTrackGa4GH() throws IOException {
VcfFile vcfFileGA4GH = registerVcfGA4GH();
vcfFileGA4GH.setType(BiologicalDataItemResourceType.GA4GH);
List<VcfSample> vcfSamples = vcfFileGA4GH.getSamples();
for (VcfSample sample : vcfSamples) {
Track<Variation> trackResultGA4GH = testLoadGA4GH(vcfFileGA4GH, 1D, true, sample.getId());
Assert.assertNotNull(trackResultGA4GH);
}
}
@Test
public void testGetVariantsGA4GH() throws IOException, ExternalDbUnavailableException,
Ga4ghResourceUnavailableException {
String fetchRes1 = readFile("GA4GH_id10473_variant_2.json");
Mockito.when(
httpDataManager.fetchData(Mockito.any(), Mockito.any(JSONObject.class)))
.thenReturn(fetchRes1);
VcfGa4ghReader reader = new VcfGa4ghReader(httpDataManager, referenceGenomeManager);
List<VariantGA4GH> ghList = reader.getVariantsGA4GH(varSet, start.toString(), end.toString(),
testChrGA4GH.getName());
Assert.assertFalse(ghList.isEmpty());
Assert.assertNotNull(ghList.get(1).getNames());
Assert.assertFalse(ghList.get(1).getCalls().isEmpty());
}
@Ignore
@Test
public void testRegisterDownloadFile() {
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceId);
request.setType(BiologicalDataItemResourceType.DOWNLOAD);
request.setPath(HTTP_VCF);
VcfFile vcfFile = vcfManager.registerVcfFile(request);
Assert.assertNotNull(vcfFile);
Assert.assertNotNull(vcfFile.getId());
}
@Test
public void testRegisterFile() throws IOException{
Resource resource = context.getResource(CLASSPATH_TEMPLATES_FELIS_CATUS_VCF);
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceId);
request.setPath(resource.getFile().getAbsolutePath());
VcfFile vcfFile = vcfManager.registerVcfFile(request);
Assert.assertNotNull(vcfFile);
Assert.assertNotNull(vcfFile.getId());
Track<Variation> trackResult = testLoad(vcfFile, 1D, true);
Assert.assertFalse(trackResult.getBlocks().isEmpty());
VcfFile filesByReference = vcfFileManager.load(vcfFile.getId());
Assert.assertNotNull(filesByReference);
}
@Test
public void testLoadStructuralVariations() throws IOException {
Resource refResource = context.getResource("classpath:templates/A3.fa");
ReferenceRegistrationRequest refRequest = new ReferenceRegistrationRequest();
refRequest.setName(testReference.getName() + this.getClass().getSimpleName());
refRequest.setPath(refResource.getFile().getPath());
Reference reference = referenceManager.registerGenome(refRequest);
Resource resource = context.getResource(CLASSPATH_TEMPLATES_FELIS_CATUS_VCF);
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(reference.getId());
request.setPath(resource.getFile().getAbsolutePath());
VcfFile vcfFile = vcfManager.registerVcfFile(request);
Assert.assertNotNull(vcfFile);
Assert.assertNotNull(vcfFile.getId());
Track<Variation> trackResult = testLoad(vcfFile, 1D, true);
Assert.assertFalse(trackResult.getBlocks().isEmpty());
List<VariationType> structTypes = Arrays.asList(VariationType.INS, VariationType.DEL,
VariationType.DUP, VariationType.INV, VariationType.BND);
List<Variation> structVars = trackResult.getBlocks().stream()
.filter(v -> structTypes.contains(v.getType()) && v.isStructural())
.collect(Collectors.toList());
Assert.assertFalse(structVars.isEmpty());
Variation bindVar = structVars.stream().filter(v -> v.getType() == VariationType.BND).findAny().get();
Assert.assertNotNull(bindVar);
Assert.assertNotNull(bindVar.getBindInfo().get("CIPOS"));
VariationQuery query = new VariationQuery();
query.setChromosomeId(testChromosome.getId());
query.setId(vcfFile.getId());
query.setPosition(bindVar.getStartIndex());
Variation bindVarInfo = vcfManager.loadVariation(query);
Assert.assertNotNull(bindVarInfo.getBindInfo().get("BIND_CHR"));
Assert.assertNotNull(bindVarInfo.getBindInfo().get("BIND_POS"));
}
@Test(expected = IllegalArgumentException.class)
public void testUnregisterVcfFile() throws IOException {
// Register vcf file.
Resource resource = context.getResource(CLASSPATH_TEMPLATES_FELIS_CATUS_VCF);
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceId);
request.setPath(resource.getFile().getAbsolutePath());
VcfFile vcfFile = vcfManager.registerVcfFile(request);
Assert.assertNotNull(vcfFile);
Assert.assertNotNull(vcfFile.getId());
// Unregister vcf file.
VcfFile deletedVcfFile = vcfManager.unregisterVcfFile(vcfFile.getId());
Assert.assertNotNull(vcfFile);
Assert.assertNotNull(vcfFile.getId());
Assert.assertEquals(vcfFile.getId(), deletedVcfFile.getId());
List<BiologicalDataItem> indexItems = biologicalDataItemDao.loadBiologicalDataItemsByIds(
Arrays.asList(vcfFile.getIndex().getId(), vcfFile.getBioDataItemId()));
Assert.assertTrue(indexItems.isEmpty());
// Check. Should me IllegalArgumentException
testLoad(vcfFile, 1D, false);
request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceIdGA4GH);
request.setType(BiologicalDataItemResourceType.GA4GH);
request.setPath(varSet);
VcfFile vcfFileGA4GH = vcfManager.registerVcfFile(request);
vcfFileGA4GH.setType(BiologicalDataItemResourceType.GA4GH);
Assert.assertNotNull(vcfFileGA4GH);
Assert.assertNotNull(vcfFileGA4GH.getId());
// Unregister vcf file.
deletedVcfFile = vcfManager.unregisterVcfFile(vcfFileGA4GH.getId());
Assert.assertNotNull(vcfFileGA4GH);
Assert.assertNotNull(vcfFileGA4GH.getId());
Assert.assertEquals(vcfFileGA4GH.getId(), deletedVcfFile.getId());
indexItems = biologicalDataItemDao.loadBiologicalDataItemsByIds(
Arrays.asList(vcfFileGA4GH.getIndex().getId(), vcfFileGA4GH.getBioDataItemId()));
Assert.assertTrue(indexItems.isEmpty());
// Check. Should me IllegalArgumentException
testLoadGA4GH(vcfFileGA4GH, 1D, false, null);
}
@Test
public void testGetNextFeature() throws IOException, ExternalDbUnavailableException {
String fetchRes1 = readFile("GA4GH_id10473.json");
String fetchRes2 = readFile("GA4GH_id10473_variant.json");
String fetchRes3 = readFile("GA4GH_id10473_variant_2.json");
String fetchRes4 = readFile("GA4GH_id10473_variant_3.json");
Mockito.when(
httpDataManager.fetchData(Mockito.any(), Mockito.any(JSONObject.class)))
.thenReturn(fetchRes1)
.thenReturn(fetchRes2)
.thenReturn(fetchRes3)
.thenReturn(fetchRes4);
String fetchRes5 = readFile("GA4GH_id10473_param.json");
Mockito.when(
httpDataManager.fetchData(Mockito.any(), Mockito.any(ParameterNameValue[].class)))
.thenReturn(fetchRes5);
getNextFeature(referenceId, BiologicalDataItemResourceType.FILE);
logger.info("success, next feature variation for file");
}
@Test
@Ignore
public void testSaveLoadUrl() throws Exception {
final String path = "/Felis_catus.vcf";
String vcfUrl = UrlTestingUtils.TEST_FILE_SERVER_URL + path;
String indexUrl = UrlTestingUtils.TEST_FILE_SERVER_URL + "/Felis_catus.idx";
Server server = UrlTestingUtils.getFileServer(context);
try {
server.start();
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceId);
request.setPath(vcfUrl);
request.setIndexPath(indexUrl);
request.setIndexType(BiologicalDataItemResourceType.URL);
request.setType(BiologicalDataItemResourceType.URL);
VcfFile vcfFile = vcfManager.registerVcfFile(request);
Assert.assertNotNull(vcfFile);
Assert.assertNotNull(vcfFile.getId());
Assert.assertEquals(BiologicalDataItemResourceType.URL, vcfFile.getType());
Assert.assertEquals(vcfUrl, vcfFile.getPath());
Assert.assertEquals(BiologicalDataItemResourceType.URL, vcfFile.getIndex().getType());
testLoad(vcfFile, 1D, true);
// index as file
Resource resource = context.getResource("classpath:templates/Felis_catus.idx");
request.setIndexPath(resource.getFile().getAbsolutePath());
request.setIndexType(null);
vcfFile = vcfManager.registerVcfFile(request);
Assert.assertEquals(BiologicalDataItemResourceType.FILE, vcfFile.getIndex().getType());
testLoad(vcfFile, 1D, true);
// Compressed file
vcfUrl = UrlTestingUtils.TEST_FILE_SERVER_URL + "/Felis_catus.vcf.gz";
indexUrl = UrlTestingUtils.TEST_FILE_SERVER_URL + "/Felis_catus.tbi";
request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceId);
request.setPath(vcfUrl);
request.setIndexPath(indexUrl);
request.setIndexType(BiologicalDataItemResourceType.URL);
request.setType(BiologicalDataItemResourceType.URL);
vcfFile = vcfManager.registerVcfFile(request);
testLoad(vcfFile, 1D, true);
} finally {
server.stop();
}
}
@Test
@Ignore
public void testLoadUrlNoRegistration() throws Exception {
final String path = "/Felis_catus.vcf";
String vcfUrl = UrlTestingUtils.TEST_FILE_SERVER_URL + path;
String indexUrl = UrlTestingUtils.TEST_FILE_SERVER_URL + "/Felis_catus.idx";
Server server = UrlTestingUtils.getFileServer(context);
try {
server.start();
TrackQuery vcfTrackQuery = new TrackQuery();
vcfTrackQuery.setChromosomeId(testChromosome.getId());
vcfTrackQuery.setStartIndex(1);
vcfTrackQuery.setEndIndex(TEST_END_INDEX);
vcfTrackQuery.setScaleFactor(1D);
Track<Variation> variationTrack = Query2TrackConverter.convertToTrack(vcfTrackQuery);
Track<Variation> trackResult = vcfManager.loadVariations(variationTrack, vcfUrl, indexUrl,
null, true, true);
Assert.assertFalse(trackResult.getBlocks().isEmpty());
Variation var = vcfManager.getNextOrPreviousVariation(trackResult.getBlocks().get(3).getEndIndex(), null,
null, testChromosome.getId(), true, vcfUrl, indexUrl);
Assert.assertNotNull(var);
Assert.assertEquals(var.getStartIndex(), trackResult.getBlocks().get(4).getStartIndex());
Assert.assertEquals(var.getEndIndex(), trackResult.getBlocks().get(4).getEndIndex());
} finally {
server.stop();
}
}
@Test
public void testLoadExtendedSummaryUrl() throws Exception {
final String path = "/Felis_catus.vcf";
String vcfUrl = UrlTestingUtils.TEST_FILE_SERVER_URL + path;
String indexUrl = UrlTestingUtils.TEST_FILE_SERVER_URL + "/Felis_catus.idx";
Resource resource = context.getResource("classpath:templates/genes_sorted.gtf");
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceId);
request.setPath(resource.getFile().getAbsolutePath());
GeneFile geneFile = gffManager.registerGeneFile(request);
Assert.assertNotNull(geneFile);
Assert.assertNotNull(geneFile.getId());
referenceGenomeManager.updateReferenceGeneFileId(testReference.getId(), geneFile.getId());
Server server = UrlTestingUtils.getFileServer(context);
try {
server.start();
final VariationQuery query = new VariationQuery();
query.setPosition(GENE_POSTION);
query.setChromosomeId(testChromosome.getId());
Variation variation = vcfManager.loadVariation(query, vcfUrl, indexUrl);
Assert.assertFalse(variation.getInfo().isEmpty());
Assert.assertFalse(variation.getInfo().isEmpty());
Assert.assertNotNull(variation.getGeneNames());
Assert.assertFalse(variation.getGeneNames().isEmpty());
} finally {
server.stop();
}
}
@Test
public void testLoadExtendedInfo() throws IOException {
VcfFile vcfFile = testSave("classpath:templates/extended_info.vcf");
VcfFile file = vcfFileManager.load(vcfFile.getId());
Assert.assertNotNull(file);
VcfFilterInfo filterInfo = vcfManager.getFiltersInfo(Collections.singleton(vcfFile.getId()));
Assert.assertEquals(NUMBER_OF_FILTERS, filterInfo.getAvailableFilters().size());
Assert.assertEquals(NUMBER_OF_TRIVIAL_INFO, filterInfo.getInfoItems().size() - 1);
Assert.assertEquals(NUMBER_OF_TRIVIAL_INFO, filterInfo.getInfoItemMap().size() - 1); // -1 refers to is_exon
// item which is added externally
}
@Test
public void testSaveUnsorted() throws IOException {
String invalidVcf = "unsorted.vcf";
testRegisterInvalidFile("classpath:templates/invalid/" + invalidVcf, MessageHelper
.getMessage(MessagesConstants.ERROR_UNSORTED_FILE));
Assert.assertTrue(biologicalDataItemDao
.loadFilesByNameStrict(invalidVcf).isEmpty());
}
@Test
public void testRegisterFileExtraChr() throws IOException {
VcfFile vcfFile = testSave("classpath:templates/invalid/extra_chr.vcf");
Assert.assertTrue(vcfFile != null);
}
public static VcfFile registerVcf(final Resource vcfFile, final Long referenceId, final VcfManager vcfManager,
final String prettyName) throws IOException {
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setReferenceId(referenceId);
request.setPath(vcfFile.getFile().getAbsolutePath());
request.setPrettyName(prettyName);
return vcfManager.registerVcfFile(request);
}
private void getNextFeature(final Long reference, final BiologicalDataItemResourceType type) throws IOException {
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
switch (type) {
case GA4GH: {
request.setPath(varSet);
request.setType(BiologicalDataItemResourceType.GA4GH);
break;
}
default: {
Resource resource = context.getResource(CLASSPATH_TEMPLATES_FELIS_CATUS_VCF);
request.setType(BiologicalDataItemResourceType.FILE);
request.setPath(resource.getFile().getAbsolutePath());
break;
}
}
request.setReferenceId(reference);
VcfFile vcfFile = vcfManager.registerVcfFile(request);
Assert.assertNotNull(vcfFile);
Assert.assertNotNull(vcfFile.getId());
Track<Variation> trackResult;
Long sampleId = 0L;
switch (type) {
case GA4GH: {
List<VcfSample> vcfSamples = vcfFile.getSamples();
for (VcfSample sample : vcfSamples) {
if (sample.getName().equals(SAMPLE_NAME)) {
sampleId = sample.getId();
}
}
trackResult = testLoadGA4GH(vcfFile, 1D, true, sampleId);
break;
}
default: {
trackResult = testLoad(vcfFile, 1D, true);
Assert.assertFalse(trackResult.getBlocks().isEmpty());
}
}
int middle = trackResult.getBlocks().size() / 2;
Variation var1 = trackResult.getBlocks().get(middle);
Variation var2 = trackResult.getBlocks().get(middle + 1);
double time1 = Utils.getSystemTimeMilliseconds();
Variation loadedNextVar;
switch (type) {
case GA4GH: {
loadedNextVar = vcfManager.getNextOrPreviousVariation(var1.getEndIndex(), vcfFile.getId(), sampleId,
testChrGA4GH.getId(), true, null, null);
break;
}
default: {
loadedNextVar = vcfManager.getNextOrPreviousVariation(var1.getEndIndex(), vcfFile.getId(), null,
testChromosome.getId(), true, null, null);
}
}
double time2 = Utils.getSystemTimeMilliseconds();
logger.info("next feature took {} ms", time2 - time1);
Assert.assertNotNull(loadedNextVar);
Assert.assertEquals(var2.getStartIndex(), loadedNextVar.getStartIndex());
Assert.assertEquals(var2.getEndIndex(), loadedNextVar.getEndIndex());
time1 = Utils.getSystemTimeMilliseconds();
Variation loadedPrevVar;
switch (type) {
case GA4GH: {
loadedPrevVar = vcfManager.getNextOrPreviousVariation(var2.getStartIndex(), vcfFile.getId(), sampleId,
testChrGA4GH.getId(), false, null, null);
break;
}
default: {
loadedPrevVar = vcfManager.getNextOrPreviousVariation(var2.getStartIndex(), vcfFile.getId(), null,
testChromosome.getId(), false, null, null);
break;
}
}
time2 = Utils.getSystemTimeMilliseconds();
logger.info("prev feature took {} ms", time2 - time1);
Assert.assertNotNull(loadedNextVar);
Assert.assertEquals(var1.getStartIndex(), loadedPrevVar.getStartIndex());
Assert.assertEquals(var1.getEndIndex(), loadedPrevVar.getEndIndex());
}
private VcfFile testSave(final String filePath) throws IOException {
Resource resource = context.getResource(filePath);
return registerVcf(resource, referenceId, vcfManager, PRETTY_NAME);
}
private Track<Variation> testLoad(final VcfFile vcfFile, final Double scaleFactor, final boolean checkBlocks)
throws IOException {
return testLoad(vcfFile, scaleFactor, checkBlocks, true);
}
private Track<Variation> testLoad(final VcfFile vcfFile, final Double scaleFactor, final boolean checkBlocks,
final boolean collapse) throws IOException {
TrackQuery vcfTrackQuery = new TrackQuery();
vcfTrackQuery.setChromosomeId(testChromosome.getId());
vcfTrackQuery.setStartIndex(1);
vcfTrackQuery.setEndIndex(TEST_END_INDEX);
vcfTrackQuery.setId(vcfFile.getId());
vcfTrackQuery.setScaleFactor(scaleFactor);
Track<Variation> variationTrack = Query2TrackConverter.convertToTrack(vcfTrackQuery);
double time1 = Utils.getSystemTimeMilliseconds();
Track<Variation> trackResult = vcfManager.loadVariations(variationTrack, null, true, collapse);
double time2 = Utils.getSystemTimeMilliseconds();
logger.debug("Loading VCF records took {} ms", time2 - time1);
if (checkBlocks) {
Assert.assertFalse(trackResult.getBlocks().isEmpty());
}
return trackResult;
}
private Track<Variation> testLoadGA4GH(final VcfFile vcfFile, final Double scaleFactor, final boolean checkBlocks,
final Long sampleIndex) throws VcfReadingException {
TrackQuery vcfTrackQuery = new TrackQuery();
vcfTrackQuery.setChromosomeId(testChrGA4GH.getId());
vcfTrackQuery.setEndIndex(end);
vcfTrackQuery.setStartIndex(start);
vcfTrackQuery.setScaleFactor(scaleFactor);
vcfTrackQuery.setId(vcfFile.getId());
Track<Variation> variationTrack = Query2TrackConverter.convertToTrack(vcfTrackQuery);
if (vcfFile.getType() == BiologicalDataItemResourceType.GA4GH) {
variationTrack.setType(TrackType.GA4GH);
}
Track<Variation> trackResult = vcfManager.loadVariations(variationTrack, sampleIndex, true, true);
if (checkBlocks) {
Assert.assertFalse(trackResult.getBlocks().isEmpty());
}
return trackResult;
}
private String readFile(final String filename) throws IOException {
Resource resource = context.getResource("classpath:externaldb//data//" + filename);
String pathStr = resource.getFile().getPath();
return new String(Files.readAllBytes(Paths.get(pathStr)), Charset.defaultCharset());
}
private void testRegisterInvalidFile(final String path, final String expectedMessage) throws IOException {
String errorMessage = "";
try {
Resource resource = context.getResource(path);
FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
request.setPath(resource.getFile().getAbsolutePath());
request.setReferenceId(referenceId);
vcfManager.registerVcfFile(request);
} catch (TribbleException | IllegalArgumentException | AssertionError e) {
errorMessage = e.getMessage();
}
//check that we received an appropriate message
Assert.assertTrue(errorMessage.contains(expectedMessage));
}
}
|
|
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.map.impl.operation;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.map.impl.MapDataSerializerHook;
import com.hazelcast.map.impl.record.Record;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.spi.impl.operationservice.BackupAwareOperation;
import com.hazelcast.spi.impl.operationservice.Operation;
import com.hazelcast.spi.impl.operationservice.PartitionAwareOperation;
import com.hazelcast.spi.merge.SplitBrainMergePolicy;
import com.hazelcast.spi.merge.SplitBrainMergeTypes.MapMergeTypes;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static com.hazelcast.core.EntryEventType.MERGED;
/**
* Contains multiple merge entries for split-brain
* healing with a {@link SplitBrainMergePolicy}.
*
* @since 3.10
*/
public class MergeOperation extends MapOperation
implements PartitionAwareOperation, BackupAwareOperation {
private boolean disableWanReplicationEvent;
private List<MapMergeTypes<Object, Object>> mergingEntries;
private SplitBrainMergePolicy<Object, MapMergeTypes<Object, Object>, Object> mergePolicy;
private transient int currentIndex;
private transient boolean hasMapListener;
private transient boolean hasWanReplication;
private transient boolean hasBackups;
private transient boolean hasInvalidation;
private transient List<Data> invalidationKeys;
private transient boolean hasMergedValues;
private List backupPairs;
public MergeOperation() {
}
public MergeOperation(String name, List<MapMergeTypes<Object, Object>> mergingEntries,
SplitBrainMergePolicy<Object, MapMergeTypes<Object, Object>, Object> mergePolicy,
boolean disableWanReplicationEvent) {
super(name);
this.mergingEntries = mergingEntries;
this.mergePolicy = mergePolicy;
this.disableWanReplicationEvent = disableWanReplicationEvent;
}
@Override
protected boolean disableWanReplicationEvent() {
return disableWanReplicationEvent;
}
@Override
protected void runInternal() {
hasMapListener = mapEventPublisher.hasEventListener(name);
hasWanReplication = mapContainer.isWanReplicationEnabled()
&& !disableWanReplicationEvent;
hasBackups = mapContainer.getTotalBackupCount() > 0;
hasInvalidation = mapContainer.hasInvalidationListener();
if (hasBackups) {
backupPairs = new ArrayList(2 * mergingEntries.size());
}
if (hasInvalidation) {
invalidationKeys = new ArrayList<>(mergingEntries.size());
}
// if currentIndex is not zero, this is a
// continuation of the operation after a NativeOOME
int size = mergingEntries.size();
while (currentIndex < size) {
merge(mergingEntries.get(currentIndex));
currentIndex++;
}
}
private void merge(MapMergeTypes<Object, Object> mergingEntry) {
Data dataKey = getNodeEngine().toData(mergingEntry.getRawKey());
Data oldValue = hasMapListener ? getValue(dataKey) : null;
if (recordStore.merge(mergingEntry, mergePolicy, getCallerProvenance())) {
hasMergedValues = true;
Data dataValue = getValueOrPostProcessedValue(dataKey, getValue(dataKey));
mapServiceContext.interceptAfterPut(mapContainer.getInterceptorRegistry(), dataValue);
if (hasMapListener) {
mapEventPublisher.publishEvent(getCallerAddress(), name, MERGED, dataKey, oldValue, dataValue);
}
if (hasWanReplication) {
publishWanUpdate(dataKey, dataValue);
}
if (hasInvalidation) {
invalidationKeys.add(dataKey);
}
if (hasBackups) {
backupPairs.add(dataKey);
backupPairs.add(dataValue);
}
evict(dataKey);
}
}
private Data getValueOrPostProcessedValue(Data dataKey, Data dataValue) {
if (!isPostProcessing(recordStore)) {
return dataValue;
}
Record record = recordStore.getRecord(dataKey);
return mapServiceContext.toData(record.getValue());
}
private Data getValue(Data dataKey) {
Record record = recordStore.getRecord(dataKey);
if (record != null) {
return mapServiceContext.toData(record.getValue());
}
return null;
}
@Override
public Object getResponse() {
return hasMergedValues;
}
@Override
public boolean shouldBackup() {
return hasBackups && !backupPairs.isEmpty();
}
@Override
public int getSyncBackupCount() {
return mapContainer.getBackupCount();
}
@Override
public int getAsyncBackupCount() {
return mapContainer.getAsyncBackupCount();
}
@Override
protected void afterRunInternal() {
invalidateNearCache(invalidationKeys);
super.afterRunInternal();
}
@Override
public Operation getBackupOperation() {
return new PutAllBackupOperation(name,
toBackupListByRemovingEvictedRecords(), disableWanReplicationEvent);
}
/**
* Since records may get evicted on NOOME after
* they have been merged. We are re-checking
* backup pair list to eliminate evicted entries.
*
* @return list of existing records which can
* safely be transferred to backup replica.
*/
@Nonnull
private List toBackupListByRemovingEvictedRecords() {
List toBackupList = new ArrayList(backupPairs.size());
for (int i = 0; i < backupPairs.size(); i += 2) {
Data dataKey = ((Data) backupPairs.get(i));
Record record = recordStore.getRecord(dataKey);
if (record != null) {
toBackupList.add(dataKey);
toBackupList.add(backupPairs.get(i + 1));
toBackupList.add(record);
toBackupList.add(recordStore.getExpirySystem().getExpiredMetadata(dataKey));
}
}
return toBackupList;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeInt(mergingEntries.size());
for (MapMergeTypes mergingEntry : mergingEntries) {
out.writeObject(mergingEntry);
}
out.writeObject(mergePolicy);
out.writeBoolean(disableWanReplicationEvent);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
int size = in.readInt();
mergingEntries = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
MapMergeTypes mergingEntry = in.readObject();
mergingEntries.add(mergingEntry);
}
mergePolicy = in.readObject();
disableWanReplicationEvent = in.readBoolean();
}
@Override
public int getClassId() {
return MapDataSerializerHook.MERGE;
}
}
|
|
package com.bitdubai.fermat_bnk_plugin.layer.bank_money_transaction.make_offline_bank_transfer.developer.bitdubai.version_1.database;
import com.bitdubai.fermat_api.DealsWithPluginIdentity;
import com.bitdubai.fermat_api.layer.all_definition.developer.DeveloperDatabase;
import com.bitdubai.fermat_api.layer.all_definition.developer.DeveloperDatabaseTable;
import com.bitdubai.fermat_api.layer.all_definition.developer.DeveloperDatabaseTableRecord;
import com.bitdubai.fermat_api.layer.all_definition.developer.DeveloperObjectFactory;
import com.bitdubai.fermat_api.layer.osa_android.database_system.Database;
import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseRecord;
import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseTable;
import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseTableRecord;
import com.bitdubai.fermat_api.layer.osa_android.database_system.DealsWithPluginDatabaseSystem;
import com.bitdubai.fermat_api.layer.osa_android.database_system.PluginDatabaseSystem;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantCreateDatabaseException;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantLoadTableToMemoryException;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.DatabaseNotFoundException;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantOpenDatabaseException;
import com.bitdubai.fermat_bnk_plugin.layer.bank_money_transaction.make_offline_bank_transfer.developer.bitdubai.version_1.exceptions.CantInitializeMakeOfflineBankTransferBankMoneyTransactionDatabaseException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
/**
* The Class <code>com.bitdubai.fermat_bnk_plugin.layer.bank_money_transaction.make_offline_bank_transfer.developer.bitdubai.version_1.database.MakeOfflineBankTransferBankMoneyTransactionDeveloperDatabaseFactory</code> have
* contains the methods that the Developer Database Tools uses to show the information.
* <p/>
*
* Created by Yordin Alayn - (y.alayn@gmail.com) on 01/10/15.
*
* @version 1.0
* @since Java JDK 1.7
*/
public class MakeOfflineBankTransferBankMoneyTransactionDeveloperDatabaseFactory implements DealsWithPluginDatabaseSystem, DealsWithPluginIdentity {
/**
* DealsWithPluginDatabaseSystem Interface member variables.
*/
PluginDatabaseSystem pluginDatabaseSystem;
/**
* DealsWithPluginIdentity Interface member variables.
*/
UUID pluginId;
Database database;
/**
* Constructor
*
* @param pluginDatabaseSystem
* @param pluginId
*/
public MakeOfflineBankTransferBankMoneyTransactionDeveloperDatabaseFactory(PluginDatabaseSystem pluginDatabaseSystem, UUID pluginId) {
this.pluginDatabaseSystem = pluginDatabaseSystem;
this.pluginId = pluginId;
}
/**
* This method open or creates the database i'll be working with
*
* @throws CantInitializeMakeOfflineBankTransferBankMoneyTransactionDatabaseException
*/
public void initializeDatabase() throws CantInitializeMakeOfflineBankTransferBankMoneyTransactionDatabaseException {
try {
/*
* Open new database connection
*/
database = this.pluginDatabaseSystem.openDatabase(pluginId, pluginId.toString());
} catch (CantOpenDatabaseException cantOpenDatabaseException) {
/*
* The database exists but cannot be open. I can not handle this situation.
*/
throw new CantInitializeMakeOfflineBankTransferBankMoneyTransactionDatabaseException(cantOpenDatabaseException.getMessage());
} catch (DatabaseNotFoundException e) {
/*
* The database no exist may be the first time the plugin is running on this device,
* We need to create the new database
*/
MakeOfflineBankTransferBankMoneyTransactionDatabaseFactory makeOfflineBankTransferBankMoneyTransactionDatabaseFactory = new MakeOfflineBankTransferBankMoneyTransactionDatabaseFactory(pluginDatabaseSystem);
try {
/*
* We create the new database
*/
database = makeOfflineBankTransferBankMoneyTransactionDatabaseFactory.createDatabase(pluginId, pluginId.toString());
} catch (CantCreateDatabaseException cantCreateDatabaseException) {
/*
* The database cannot be created. I can not handle this situation.
*/
throw new CantInitializeMakeOfflineBankTransferBankMoneyTransactionDatabaseException(cantCreateDatabaseException.getMessage());
}
}
}
public List<DeveloperDatabase> getDatabaseList(DeveloperObjectFactory developerObjectFactory) {
/**
* I only have one database on my plugin. I will return its name.
*/
List<DeveloperDatabase> databases = new ArrayList<DeveloperDatabase>();
databases.add(developerObjectFactory.getNewDeveloperDatabase("Make Offline Bank Transfer", this.pluginId.toString()));
return databases;
}
public List<DeveloperDatabaseTable> getDatabaseTableList(DeveloperObjectFactory developerObjectFactory) {
List<DeveloperDatabaseTable> tables = new ArrayList<DeveloperDatabaseTable>();
/**
* Table Make Offline Bank Transfer columns.
*/
List<String> makeOfflineBankTransferColumns = new ArrayList<String>();
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_TRANSACTION_ID_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_PUBLIC_KEY_BROKER_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_PUBLIC_KEY_CUSTOMER_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_STATUS_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_TRANSACTION_TYPE_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_AMOUNT_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_CURRENCY_TYPE_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_OPERATION_TYPE_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_DOCUMENT_REFERENCE_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_TO_NAME_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_TO_ACCOUNT_NUMBER_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_TO_ACCOUNTTYPE_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_FROM_NAME_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_FROM_ACCOUNT_NUMBER_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_BANK_FROM_ACCOUNT_TYPE_COLUMN_NAME);
makeOfflineBankTransferColumns.add(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_TIMESTAMP_COLUMN_NAME);
/**
* Table Make Offline Bank Transfer addition.
*/
DeveloperDatabaseTable makeOfflineBankTransferTable = developerObjectFactory.getNewDeveloperDatabaseTable(MakeOfflineBankTransferBankMoneyTransactionDatabaseConstants.MAKE_OFFLINE_BANK_TRANSFER_TABLE_NAME, makeOfflineBankTransferColumns);
tables.add(makeOfflineBankTransferTable);
return tables;
}
public List<DeveloperDatabaseTableRecord> getDatabaseTableContent(DeveloperObjectFactory developerObjectFactory, DeveloperDatabaseTable developerDatabaseTable) {
/**
* Will get the records for the given table
*/
List<DeveloperDatabaseTableRecord> returnedRecords = new ArrayList<DeveloperDatabaseTableRecord>();
/**
* I load the passed table name from the SQLite database.
*/
DatabaseTable selectedTable = database.getTable(developerDatabaseTable.getName());
try {
selectedTable.loadToMemory();
} catch (CantLoadTableToMemoryException cantLoadTableToMemory) {
/**
* if there was an error, I will returned an empty list.
*/
return returnedRecords;
}
List<DatabaseTableRecord> records = selectedTable.getRecords();
List<String> developerRow = new ArrayList<String>();
for (DatabaseTableRecord row : records) {
/**
* for each row in the table list
*/
for (DatabaseRecord field : row.getValues()) {
/**
* I get each row and save them into a List<String>
*/
developerRow.add(field.getValue().toString());
}
/**
* I create the Developer Database record
*/
returnedRecords.add(developerObjectFactory.getNewDeveloperDatabaseTableRecord(developerRow));
}
/**
* return the list of DeveloperRecords for the passed table.
*/
return returnedRecords;
}
@Override
public void setPluginDatabaseSystem(PluginDatabaseSystem pluginDatabaseSystem) {
this.pluginDatabaseSystem = pluginDatabaseSystem;
}
@Override
public void setPluginId(UUID pluginId) {
this.pluginId = pluginId;
}
}
|
|
/*
* Copyright 2008 The Portico Project
*
* This file is part of portico.
*
* portico is free software; you can redistribute it and/or modify
* it under the terms of the Common Developer and Distribution License (CDDL)
* as published by Sun Microsystems. For more information see the LICENSE file.
*
* Use of this software is strictly AT YOUR OWN RISK!!!
* If something bad happens you do not have permission to come crying to me.
* (that goes for your lawyer as well)
*
*/
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import hla.rti.AttributeHandleSet;
import hla.rti.FederatesCurrentlyJoined;
import hla.rti.FederationExecutionAlreadyExists;
import hla.rti.FederationExecutionDoesNotExist;
import hla.rti.LogicalTime;
import hla.rti.LogicalTimeInterval;
import hla.rti.RTIambassador;
import hla.rti.RTIexception;
import hla.rti.ResignAction;
import hla.rti.SuppliedAttributes;
import hla.rti.SuppliedParameters;
import hla.rti.jlc.EncodingHelpers;
import hla.rti.jlc.RtiFactoryFactory;
import org.portico.impl.hla13.types.DoubleTime;
import org.portico.impl.hla13.types.DoubleTimeInterval;
/**
* This federate works the same as the HLA 1.3 example federate, only it
* stores up information about all the messages it receives as callbacks
* and logs them at the end of execution (in order).
*/
public class EventLogger
{
//----------------------------------------------------------
// STATIC VARIABLES
//----------------------------------------------------------
/** The number of times we will update our attributes and send an interaction */
public static final int ITERATIONS = 20;
/** The sync point all federates will sync up on before starting */
public static final String READY_TO_RUN = "ReadyToRun";
//----------------------------------------------------------
// INSTANCE VARIABLES
//----------------------------------------------------------
private RTIambassador rtiamb;
private EventLoggerFedAmb fedamb;
//----------------------------------------------------------
// CONSTRUCTORS
//----------------------------------------------------------
//----------------------------------------------------------
// INSTANCE METHODS
//----------------------------------------------------------
/**
* This is just a helper method to make sure all logging it output in the same form
*/
private void log( String message )
{
System.out.println( "ExampleFederate : " + message );
}
/**
* This method will block until the user presses enter
*/
private void waitForUser()
{
log( " >>>>>>>>>> Press Enter to Continue <<<<<<<<<<" );
BufferedReader reader = new BufferedReader( new InputStreamReader(System.in) );
try
{
reader.readLine();
}
catch( Exception e )
{
log( "Error while waiting for user input: " + e.getMessage() );
e.printStackTrace();
}
}
/**
* As all time-related code is Portico-specific, we have isolated it into a
* single method. This way, if you need to move to a different RTI, you only need
* to change this code, rather than more code throughout the whole class.
*/
private LogicalTime convertTime( double time )
{
// PORTICO SPECIFIC!!
return new DoubleTime( time );
}
/**
* Same as for {@link #convertTime(double)}
*/
private LogicalTimeInterval convertInterval( double time )
{
// PORTICO SPECIFIC!!
return new DoubleTimeInterval( time );
}
///////////////////////////////////////////////////////////////////////////
////////////////////////// Main Simulation Method /////////////////////////
///////////////////////////////////////////////////////////////////////////
/**
* This is the main simulation loop. It can be thought of as the main method of
* the federate. For a description of the basic flow of this federate, see the
* class level comments
*/
public void runFederate( String federateName ) throws RTIexception
{
/////////////////////////////////
// 1. create the RTIambassador //
/////////////////////////////////
rtiamb = RtiFactoryFactory.getRtiFactory().createRtiAmbassador();
//////////////////////////////
// 2. create the federation //
//////////////////////////////
// create
// NOTE: some other federate may have already created the federation,
// in that case, we'll just try and join it
try
{
File fom = new File( "testfom.fed" );
rtiamb.createFederationExecution( "ExampleFederation",
fom.toURI().toURL() );
log( "Created Federation" );
}
catch( FederationExecutionAlreadyExists exists )
{
log( "Didn't create federation, it already existed" );
}
catch( MalformedURLException urle )
{
log( "Exception processing fom: " + urle.getMessage() );
urle.printStackTrace();
return;
}
////////////////////////////
// 3. join the federation //
////////////////////////////
// create the federate ambassador and join the federation
fedamb = new EventLoggerFedAmb();
rtiamb.joinFederationExecution( federateName, "ExampleFederation", fedamb );
log( "Joined Federation as " + federateName );
////////////////////////////////
// 4. announce the sync point //
////////////////////////////////
// announce a sync point to get everyone on the same page. if the point
// has already been registered, we'll get a callback saying it failed,
// but we don't care about that, as long as someone registered it
rtiamb.registerFederationSynchronizationPoint( READY_TO_RUN, null );
// wait until the point is announced
while( fedamb.isAnnounced == false )
{
rtiamb.tick();
}
// WAIT FOR USER TO KICK US OFF
// So that there is time to add other federates, we will wait until the
// user hits enter before proceeding. That was, you have time to start
// other federates.
waitForUser();
///////////////////////////////////////////////////////
// 5. achieve the point and wait for synchronization //
///////////////////////////////////////////////////////
// tell the RTI we are ready to move past the sync point and then wait
// until the federation has synchronized on
rtiamb.synchronizationPointAchieved( READY_TO_RUN );
log( "Achieved sync point: " +READY_TO_RUN+ ", waiting for federation..." );
while( fedamb.isReadyToRun == false )
{
rtiamb.tick();
}
/////////////////////////////
// 6. enable time policies //
/////////////////////////////
// in this section we enable/disable all time policies
// note that this step is optional!
enableTimePolicy();
log( "Time Policy Enabled" );
//////////////////////////////
// 7. publish and subscribe //
//////////////////////////////
// in this section we tell the RTI of all the data we are going to
// produce, and all the data we want to know about
publishAndSubscribe();
log( "Published and Subscribed" );
/////////////////////////////////////
// 8. register an object to update //
/////////////////////////////////////
int objectHandle = registerObject();
log( "Registered Object, handle=" + objectHandle );
////////////////////////////////////
// 9. do the main simulation loop //
////////////////////////////////////
// here is where we do the meat of our work. in each iteration, we will
// update the attribute values of the object we registered, and will
// send an interaction.
for( int i = 0; i < ITERATIONS; i++ )
{
// 9.1 update the attribute values of the instance //
updateAttributeValues( objectHandle );
// 9.2 send an interaction
sendInteraction();
// 9.3 request a time advance and wait until we get it
advanceTime( 1.0 );
log( "Time Advanced to " + fedamb.federateTime );
}
//////////////////////////////////////
// 10. delete the object we created //
//////////////////////////////////////
deleteObject( objectHandle );
log( "Deleted Object, handle=" + objectHandle );
////////////////////////////////////
// 11. resign from the federation //
////////////////////////////////////
rtiamb.resignFederationExecution( ResignAction.NO_ACTION );
log( "Resigned from Federation" );
////////////////////////////////////////
// 12. try and destroy the federation //
////////////////////////////////////////
// NOTE: we won't die if we can't do this because other federates
// remain. in that case we'll leave it for them to clean up
try
{
rtiamb.destroyFederationExecution( "ExampleFederation" );
log( "Destroyed Federation" );
}
catch( FederationExecutionDoesNotExist dne )
{
log( "No need to destroy federation, it doesn't exist" );
}
catch( FederatesCurrentlyJoined fcj )
{
log( "Didn't destroy federation, federates still joined" );
}
// print out all the events
System.out.println( " ================= Ordered Event List =================" );
for( int i = 0; i < fedamb.events.size(); i++ )
System.out.println( "["+i+"]: " + fedamb.events.get(i) );
System.out.println( " ======================================================" );
}
////////////////////////////////////////////////////////////////////////////
////////////////////////////// Helper Methods //////////////////////////////
////////////////////////////////////////////////////////////////////////////
/**
* This method will attempt to enable the various time related properties for
* the federate
*/
private void enableTimePolicy() throws RTIexception
{
// NOTE: Unfortunately, the LogicalTime/LogicalTimeInterval create code is
// Portico specific. You will have to alter this if you move to a
// different RTI implementation. As such, we've isolated it into a
// method so that any change only needs to happen in a couple of spots
LogicalTime currentTime = convertTime( fedamb.federateTime );
LogicalTimeInterval lookahead = convertInterval( fedamb.federateLookahead );
////////////////////////////
// enable time regulation //
////////////////////////////
this.rtiamb.enableTimeRegulation( currentTime, lookahead );
// tick until we get the callback
while( fedamb.isRegulating == false )
{
rtiamb.tick();
}
/////////////////////////////
// enable time constrained //
/////////////////////////////
this.rtiamb.enableTimeConstrained();
// tick until we get the callback
while( fedamb.isConstrained == false )
{
rtiamb.tick();
}
}
/**
* This method will inform the RTI about the types of data that the federate will
* be creating, and the types of data we are interested in hearing about as other
* federates produce it.
*/
private void publishAndSubscribe() throws RTIexception
{
////////////////////////////////////////////
// publish all attributes of ObjectRoot.A //
////////////////////////////////////////////
// before we can register instance of the object class ObjectRoot.A and
// update the values of the various attributes, we need to tell the RTI
// that we intend to publish this information
// get all the handle information for the attributes of ObjectRoot.A
int classHandle = rtiamb.getObjectClassHandle( "ObjectRoot.A" );
int aaHandle = rtiamb.getAttributeHandle( "aa", classHandle );
int abHandle = rtiamb.getAttributeHandle( "ab", classHandle );
int acHandle = rtiamb.getAttributeHandle( "ac", classHandle );
// package the information into a handle set
AttributeHandleSet attributes =
RtiFactoryFactory.getRtiFactory().createAttributeHandleSet();
attributes.add( aaHandle );
attributes.add( abHandle );
attributes.add( acHandle );
// do the actual publication
rtiamb.publishObjectClass( classHandle, attributes );
/////////////////////////////////////////////////
// subscribe to all attributes of ObjectRoot.A //
/////////////////////////////////////////////////
// we also want to hear about the same sort of information as it is
// created and altered in other federates, so we need to subscribe to it
rtiamb.subscribeObjectClassAttributes( classHandle, attributes );
/////////////////////////////////////////////////////
// publish the interaction class InteractionRoot.X //
/////////////////////////////////////////////////////
// we want to send interactions of type InteractionRoot.X, so we need
// to tell the RTI that we're publishing it first. We don't need to
// inform it of the parameters, only the class, making it much simpler
int interactionHandle = rtiamb.getInteractionClassHandle( "InteractionRoot.X" );
// do the publication
rtiamb.publishInteractionClass( interactionHandle );
////////////////////////////////////////////////////
// subscribe to the InteractionRoot.X interaction //
////////////////////////////////////////////////////
// we also want to receive other interaction of the same type that are
// sent out by other federates, so we have to subscribe to it first
rtiamb.subscribeInteractionClass( interactionHandle );
}
/**
* This method will register an instance of the class ObjectRoot.A and will
* return the federation-wide unique handle for that instance. Later in the
* simulation, we will update the attribute values for this instance
*/
private int registerObject() throws RTIexception
{
int classHandle = rtiamb.getObjectClassHandle( "ObjectRoot.A" );
return rtiamb.registerObjectInstance( classHandle );
}
/**
* This method will update all the values of the given object instance. It will
* set each of the values to be a string which is equal to the name of the
* attribute plus the current time. eg "aa:10.0" if the time is 10.0.
* <p/>
* Note that we don't actually have to update all the attributes at once, we
* could update them individually, in groups or not at all!
*/
private void updateAttributeValues( int objectHandle ) throws RTIexception
{
///////////////////////////////////////////////
// create the necessary container and values //
///////////////////////////////////////////////
// create the collection to store the values in, as you can see
// this is quite a lot of work
SuppliedAttributes attributes =
RtiFactoryFactory.getRtiFactory().createSuppliedAttributes();
// generate the new values
// we use EncodingHelpers to make things nice friendly for both Java and C++
byte[] aaValue = EncodingHelpers.encodeString( "aa:" + getLbts() );
byte[] abValue = EncodingHelpers.encodeString( "ab:" + getLbts() );
byte[] acValue = EncodingHelpers.encodeString( "ac:" + getLbts() );
// get the handles
// this line gets the object class of the instance identified by the
// object instance the handle points to
int classHandle = rtiamb.getObjectClass( objectHandle );
int aaHandle = rtiamb.getAttributeHandle( "aa", classHandle );
int abHandle = rtiamb.getAttributeHandle( "ab", classHandle );
int acHandle = rtiamb.getAttributeHandle( "ac", classHandle );
// put the values into the collection
attributes.add( aaHandle, aaValue );
attributes.add( abHandle, abValue );
attributes.add( acHandle, acValue );
//////////////////////////
// do the actual update //
//////////////////////////
byte[] tag = EncodingHelpers.encodeString( ""+System.currentTimeMillis() );
rtiamb.updateAttributeValues( objectHandle, attributes, tag );
// note that if you want to associate a particular timestamp with the
// update. here we send another update, this time with a timestamp:
LogicalTime time = convertTime( fedamb.federateTime +
fedamb.federateLookahead );
rtiamb.updateAttributeValues( objectHandle, attributes, tag, time );
}
/**
* This method will send out an interaction of the type InteractionRoot.X. Any
* federates which are subscribed to it will receive a notification the next time
* they tick(). Here we are passing only two of the three parameters we could be
* passing, but we don't actually have to pass any at all!
*/
private void sendInteraction() throws RTIexception
{
///////////////////////////////////////////////
// create the necessary container and values //
///////////////////////////////////////////////
// create the collection to store the values in
SuppliedParameters parameters =
RtiFactoryFactory.getRtiFactory().createSuppliedParameters();
// generate the new values
// we use EncodingHelpers to make things nice friendly for both Java and C++
byte[] xaValue = EncodingHelpers.encodeString( "xa:" + getLbts() );
byte[] xbValue = EncodingHelpers.encodeString( "xb:" + getLbts() );
// get the handles
int classHandle = rtiamb.getInteractionClassHandle( "InteractionRoot.X" );
int xaHandle = rtiamb.getParameterHandle( "xa", classHandle );
int xbHandle = rtiamb.getParameterHandle( "xb", classHandle );
// put the values into the collection
parameters.add( xaHandle, xaValue );
parameters.add( xbHandle, xbValue );
//////////////////////////
// send the interaction //
//////////////////////////
byte[] tag = EncodingHelpers.encodeString( ""+System.currentTimeMillis() );
rtiamb.sendInteraction( classHandle, parameters, tag );
// if you want to associate a particular timestamp with the
// interaction, you will have to supply it to the RTI. Here
// we send another interaction, this time with a timestamp:
LogicalTime time = convertTime( fedamb.federateTime +
fedamb.federateLookahead );
rtiamb.sendInteraction( classHandle, parameters, tag, time );
}
/**
* This method will request a time advance to the current time, plus the given
* timestep. It will then wait until a notification of the time advance grant
* has been received.
*/
private void advanceTime( double timestep ) throws RTIexception
{
// request the advance
fedamb.isAdvancing = true;
LogicalTime newTime = convertTime( fedamb.federateTime + timestep );
rtiamb.timeAdvanceRequest( newTime );
// wait for the time advance to be granted. ticking will tell the
// LRC to start delivering callbacks to the federate
while( fedamb.isAdvancing )
{
rtiamb.tick();
}
}
/**
* This method will attempt to delete the object instance of the given
* handle. We can only delete objects we created, or for which we own the
* privilegeToDelete attribute.
*/
private void deleteObject( int handle ) throws RTIexception
{
rtiamb.deleteObjectInstance( handle, null ); // no tag, we're lazy
}
private double getLbts()
{
return fedamb.federateTime + fedamb.federateLookahead;
}
//----------------------------------------------------------
// STATIC METHODS
//----------------------------------------------------------
public static void main( String[] args )
{
// get a federate name, use "diagnosticsFederate" as default
String federateName = "diagnosticsFederate";
if( args.length != 0 )
{
federateName = args[0];
}
try
{
// run the example federate
new EventLogger().runFederate( federateName );
}
catch( RTIexception rtie )
{
// an exception occurred, just log the information and exit
rtie.printStackTrace();
}
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/services/campaign_label_service.proto
package com.google.ads.googleads.v9.services;
/**
* <pre>
* The result for a campaign label mutate.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.MutateCampaignLabelResult}
*/
public final class MutateCampaignLabelResult extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.services.MutateCampaignLabelResult)
MutateCampaignLabelResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use MutateCampaignLabelResult.newBuilder() to construct.
private MutateCampaignLabelResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MutateCampaignLabelResult() {
resourceName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new MutateCampaignLabelResult();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MutateCampaignLabelResult(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
resourceName_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.CampaignLabelServiceProto.internal_static_google_ads_googleads_v9_services_MutateCampaignLabelResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.CampaignLabelServiceProto.internal_static_google_ads_googleads_v9_services_MutateCampaignLabelResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.MutateCampaignLabelResult.class, com.google.ads.googleads.v9.services.MutateCampaignLabelResult.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object resourceName_;
/**
* <pre>
* Returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.services.MutateCampaignLabelResult)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.services.MutateCampaignLabelResult other = (com.google.ads.googleads.v9.services.MutateCampaignLabelResult) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.services.MutateCampaignLabelResult prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The result for a campaign label mutate.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.MutateCampaignLabelResult}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.services.MutateCampaignLabelResult)
com.google.ads.googleads.v9.services.MutateCampaignLabelResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.CampaignLabelServiceProto.internal_static_google_ads_googleads_v9_services_MutateCampaignLabelResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.CampaignLabelServiceProto.internal_static_google_ads_googleads_v9_services_MutateCampaignLabelResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.MutateCampaignLabelResult.class, com.google.ads.googleads.v9.services.MutateCampaignLabelResult.Builder.class);
}
// Construct using com.google.ads.googleads.v9.services.MutateCampaignLabelResult.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceName_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.services.CampaignLabelServiceProto.internal_static_google_ads_googleads_v9_services_MutateCampaignLabelResult_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.MutateCampaignLabelResult getDefaultInstanceForType() {
return com.google.ads.googleads.v9.services.MutateCampaignLabelResult.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.services.MutateCampaignLabelResult build() {
com.google.ads.googleads.v9.services.MutateCampaignLabelResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.MutateCampaignLabelResult buildPartial() {
com.google.ads.googleads.v9.services.MutateCampaignLabelResult result = new com.google.ads.googleads.v9.services.MutateCampaignLabelResult(this);
result.resourceName_ = resourceName_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.services.MutateCampaignLabelResult) {
return mergeFrom((com.google.ads.googleads.v9.services.MutateCampaignLabelResult)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.services.MutateCampaignLabelResult other) {
if (other == com.google.ads.googleads.v9.services.MutateCampaignLabelResult.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.services.MutateCampaignLabelResult parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.services.MutateCampaignLabelResult) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
onChanged();
return this;
}
/**
* <pre>
* Returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* <pre>
* Returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.services.MutateCampaignLabelResult)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.services.MutateCampaignLabelResult)
private static final com.google.ads.googleads.v9.services.MutateCampaignLabelResult DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.services.MutateCampaignLabelResult();
}
public static com.google.ads.googleads.v9.services.MutateCampaignLabelResult getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MutateCampaignLabelResult>
PARSER = new com.google.protobuf.AbstractParser<MutateCampaignLabelResult>() {
@java.lang.Override
public MutateCampaignLabelResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MutateCampaignLabelResult(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<MutateCampaignLabelResult> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MutateCampaignLabelResult> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.MutateCampaignLabelResult getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
// JRIEngine - REngine-based interface to JRI
// Copyright(c) 2009 Simon Urbanek
//
// Currently it uses low-level calls from org.rosuda.JRI.Rengine, but
// all REXP representations are created based on the org.rosuda.REngine API
package org.rosuda.REngine.JRI;
import org.rosuda.JRI.Rengine;
import org.rosuda.JRI.Mutex;
import org.rosuda.JRI.RMainLoopCallbacks;
import org.rosuda.REngine.*;
/** <code>JRIEngine</code> is a <code>REngine</code> implementation using JRI (Java/R Interface).
<p>
Note that at most one JRI instance can exist in a given JVM process, because R does not support multiple threads. <code>JRIEngine</code> itself is thread-safe, so it is possible to invoke its methods from any thread. However, this is achieved by serializing all entries into R, so be aware of possible deadlock conditions if your R code calls back into Java (<code>JRIEngine</code> is re-entrant from the same thread so deadlock issues can arise only with multiple threads inteacting thorugh R). */
public class JRIEngine extends REngine implements RMainLoopCallbacks {
// internal R types as defined in Rinternals.h
static final int NILSXP = 0; /* nil = NULL */
static final int SYMSXP = 1; /* symbols */
static final int LISTSXP = 2; /* lists of dotted pairs */
static final int CLOSXP = 3; /* closures */
static final int ENVSXP = 4; /* environments */
static final int PROMSXP = 5; /* promises: [un]evaluated closure arguments */
static final int LANGSXP = 6; /* language constructs */
static final int SPECIALSXP = 7; /* special forms */
static final int BUILTINSXP = 8; /* builtin non-special forms */
static final int CHARSXP = 9; /* "scalar" string type (internal only) */
static final int LGLSXP = 10; /* logical vectors */
static final int INTSXP = 13; /* integer vectors */
static final int REALSXP = 14; /* real variables */
static final int CPLXSXP = 15; /* complex variables */
static final int STRSXP = 16; /* string vectors */
static final int DOTSXP = 17; /* dot-dot-dot object */
static final int ANYSXP = 18; /* make "any" args work */
static final int VECSXP = 19; /* generic vectors */
static final int EXPRSXP = 20; /* expressions vectors */
static final int BCODESXP = 21; /* byte code */
static final int EXTPTRSXP = 22; /* external pointer */
static final int WEAKREFSXP = 23; /* weak reference */
static final int RAWSXP = 24; /* raw bytes */
static final int S4SXP = 25; /* S4 object */
/** minimal JRI API version that is required by this class in order to work properly (currently API 1.10, corresponding to JRI 0.5-1 or higher) */
static public final long requiredAPIversion = 0x010a;
/** currently running <code>JRIEngine</code> - there can be only one and we store it here. Essentially if it is <code>null</code> then R was not initialized. */
static JRIEngine jriEngine = null;
/** reference to the underlying low-level JRI (RNI) engine */
Rengine rni = null;
/** event loop callbacks associated with this engine. */
REngineCallbacks callbacks = null;
/** mutex synchronizing access to R through JRIEngine.<p> NOTE: only access through this class is synchronized. Any other access (e.g. using RNI directly) is NOT. */
Mutex rniMutex = null;
// cached pointers of special objects in R
long R_UnboundValue, R_NilValue;
/** special, global references */
public REXPReference globalEnv, emptyEnv, baseEnv, nullValueRef;
/** canonical NULL object */
public REXPNull nullValue;
/** class used for wrapping raw pointers such that they are adequately protected and released according to the lifespan of the Java object */
class JRIPointer {
long ptr;
JRIPointer(long ptr, boolean preserve) {
this.ptr = ptr;
if (preserve && ptr != 0 && ptr != R_NilValue) {
boolean obtainedLock = rniMutex.safeLock(); // this will inherently wait for R to become ready
try {
rni.rniPreserve(ptr);
} finally {
if (obtainedLock) rniMutex.unlock();
}
}
}
protected void finalize() throws Throwable {
try {
if (ptr != 0 && ptr != R_NilValue) {
boolean obtainedLock = rniMutex.safeLock();
try {
rni.rniRelease(ptr);
} finally {
if (obtainedLock)
rniMutex.unlock();
}
}
} finally {
super.finalize();
}
}
long pointer() { return ptr; }
}
/** factory method called by <code>engineForClass</code>
@return new or current engine (new if there is none, current otherwise since R allows only one engine at any time) */
public static REngine createEngine() throws REngineException {
// there can only be one JRI engine in a process
if (jriEngine == null)
jriEngine = new JRIEngine();
return jriEngine;
}
public static REngine createEngine(String[] args, REngineCallbacks callbacks, boolean runREPL) throws REngineException {
if (jriEngine != null)
throw new REngineException(jriEngine, "engine already running - cannot use extended constructor on a running instance");
return jriEngine = new JRIEngine(args, callbacks, runREPL);
}
public Rengine getRni() {
return rni;
}
/** default constructor - this constructor is also used via <code>createEngine</code> factory call and implies --no-save R argument, no callbacks and no REPL.
<p>This is equivalent to <code>JRIEngine(new String[] { "--no-save" }, null, false)</code> */
public JRIEngine() throws REngineException {
this(new String[] { "--no-save" }, (REngineCallbacks) null, false);
}
/** create <code>JRIEngine</code> with specified R command line arguments, no callbacks and no REPL.
<p>This is equivalent to <code>JRIEngine(args, null, false)</code> */
public JRIEngine(String args[]) throws REngineException {
this(args, (REngineCallbacks) null, false);
}
/** creates a JRI engine with specified delegate for callbacks (JRI compatibility mode ONLY!). The event loop is started if <Code>callbacks</code> in not <code>null</code>.
* @param args arguments to pass to R (note that R usually requires something like <code>--no-save</code>!)
* @param callbacks delegate class to process event loop callback from R or <code>null</code> if no event loop is desired
**/
public JRIEngine(String args[], RMainLoopCallbacks callbacks) throws REngineException {
this(args, callbacks, (callbacks == null) ? false : true);
}
/** creates a JRI engine with specified delegate for callbacks
* @param args arguments to pass to R (note that R usually requires something like <code>--no-save</code>!)
* @param callback delegate class to process callbacks from R or <code>null</code> if no callbacks are desired
* @param runREPL if set to <code>true</code> then the event loop (REPL) will be started, otherwise the engine is in direct operation mode.
*/
public JRIEngine(String args[], REngineCallbacks callbacks, boolean runREPL) throws REngineException {
// if Rengine hasn't been able to load the native JRI library in its static
// initializer, throw an exception
if (!Rengine.jriLoaded)
throw new REngineException (null, "Cannot load JRI native library");
if (Rengine.getVersion() < requiredAPIversion)
throw new REngineException(null, "JRI API version is too old, update rJava/JRI to match the REngine API");
this.callbacks = callbacks;
// the default modus operandi is without event loop and with --no-save option
rni = new Rengine(args, runREPL, (callbacks == null) ? null : this);
rniMutex = rni.getRsync();
boolean obtainedLock = rniMutex.safeLock(); // this will inherently wait for R to become ready
try {
if (!rni.waitForR())
throw(new REngineException(this, "Unable to initialize R"));
if (rni.rniGetVersion() < requiredAPIversion)
throw(new REngineException(this, "JRI API version is too old, update rJava/JRI to match the REngine API"));
globalEnv = new REXPReference(this, new Long(rni.rniSpecialObject(Rengine.SO_GlobalEnv)));
nullValueRef = new REXPReference(this, new Long(R_NilValue = rni.rniSpecialObject(Rengine.SO_NilValue)));
emptyEnv = new REXPReference(this, new Long(rni.rniSpecialObject(Rengine.SO_EmptyEnv)));
baseEnv = new REXPReference(this, new Long(rni.rniSpecialObject(Rengine.SO_BaseEnv)));
nullValue = new REXPNull();
R_UnboundValue = rni.rniSpecialObject(Rengine.SO_UnboundValue);
} finally {
if (obtainedLock) rniMutex.unlock();
}
// register ourself as the main and last engine
lastEngine = this;
if (jriEngine == null)
jriEngine = this;
}
/** creates a JRI engine with specified delegate for callbacks (JRI compatibility mode ONLY! Will be deprecated soon!)
* @param args arguments to pass to R (note that R usually requires something like <code>--no-save</code>!)
* @param callback delegate class to process callbacks from R or <code>null</code> if no callbacks are desired
* @param runREPL if set to <code>true</code> then the event loop (REPL) will be started, otherwise the engine is in direct operation mode.
*/
public JRIEngine(String args[], RMainLoopCallbacks callbacks, boolean runREPL) throws REngineException {
// if Rengine hasn't been able to load the native JRI library in its static
// initializer, throw an exception
if (!Rengine.jriLoaded)
throw new REngineException (null, "Cannot load JRI native library");
if (Rengine.getVersion() < requiredAPIversion)
throw new REngineException(null, "JRI API version is too old, update rJava/JRI to match the REngine API");
// the default modus operandi is without event loop and with --no-save option
rni = new Rengine(args, runREPL, callbacks);
rniMutex = rni.getRsync();
boolean obtainedLock = rniMutex.safeLock(); // this will inherently wait for R to become ready
try {
if (!rni.waitForR())
throw(new REngineException(this, "Unable to initialize R"));
if (rni.rniGetVersion() < requiredAPIversion)
throw(new REngineException(this, "JRI API version is too old, update rJava/JRI to match the REngine API"));
globalEnv = new REXPReference(this, new Long(rni.rniSpecialObject(Rengine.SO_GlobalEnv)));
nullValueRef = new REXPReference(this, new Long(R_NilValue = rni.rniSpecialObject(Rengine.SO_NilValue)));
emptyEnv = new REXPReference(this, new Long(rni.rniSpecialObject(Rengine.SO_EmptyEnv)));
baseEnv = new REXPReference(this, new Long(rni.rniSpecialObject(Rengine.SO_BaseEnv)));
nullValue = new REXPNull();
R_UnboundValue = rni.rniSpecialObject(Rengine.SO_UnboundValue);
} finally {
if (obtainedLock) rniMutex.unlock();
}
// register ourself as the main and last engine
lastEngine = this;
if (jriEngine == null)
jriEngine = this;
}
/** WARNING: legacy fallback for hooking from R into an existing Rengine - do NOT use for creating a new Rengine - it will go away eventually */
public JRIEngine(Rengine eng) throws REngineException {
// if Rengine hasn't been able to load the native JRI library in its static
// initializer, throw an exception
if (!Rengine.jriLoaded)
throw new REngineException (null, "Cannot load JRI native library");
rni = eng;
if (rni.rniGetVersion() < 0x109)
throw(new REngineException(this, "R JRI engine is too old - RNI API 1.9 (JRI 0.5) or newer is required"));
rniMutex = rni.getRsync();
boolean obtainedLock = rniMutex.safeLock();
try {
globalEnv = new REXPReference(this, new Long(rni.rniSpecialObject(Rengine.SO_GlobalEnv)));
nullValueRef = new REXPReference(this, new Long(R_NilValue = rni.rniSpecialObject(Rengine.SO_NilValue)));
emptyEnv = new REXPReference(this, new Long(rni.rniSpecialObject(Rengine.SO_EmptyEnv)));
baseEnv = new REXPReference(this, new Long(rni.rniSpecialObject(Rengine.SO_BaseEnv)));
nullValue = new REXPNull();
R_UnboundValue = rni.rniSpecialObject(Rengine.SO_UnboundValue);
} finally {
if (obtainedLock) rniMutex.unlock();
}
// register ourself as the main and last engine
lastEngine = this;
if (jriEngine == null)
jriEngine = this;
}
public REXP parse(String text, boolean resolve) throws REngineException {
REXP ref = null;
boolean obtainedLock = rniMutex.safeLock();
try {
long pr = rni.rniParse(text, -1);
if (pr == 0 || pr == R_NilValue) throw(new REngineException(this, "Parse error"));
rni.rniPreserve(pr);
ref = new REXPReference(this, new Long(pr));
if (resolve)
try { ref = resolveReference(ref); } catch (REXPMismatchException me) { };
} finally {
if (obtainedLock)
rniMutex.unlock();
}
return ref;
}
public REXP eval(REXP what, REXP where, boolean resolve) throws REngineException, REXPMismatchException {
REXP ref = null;
long rho = 0;
if (where != null && !where.isReference()) {
if (!where.isEnvironment() || ((REXPEnvironment)where).getHandle() == null)
throw(new REXPMismatchException(where, "environment"));
else
rho = ((JRIPointer)((REXPEnvironment)where).getHandle()).pointer();
} else
if (where != null) rho = ((Long)((REXPReference)where).getHandle()).longValue();
if (what == null) throw(new REngineException(this, "null object to evaluate"));
if (!what.isReference()) {
if (what.isExpression() || what.isLanguage())
what = createReference(what);
else
throw(new REXPMismatchException(where, "reference, expression or language"));
}
boolean obtainedLock = rniMutex.safeLock();
try {
long pr = rni.rniEval(((Long)((REXPReference)what).getHandle()).longValue(), rho);
if (pr == 0) // rniEval() signals error by passing 0
throw new REngineEvalException(this, "error during evaluation", REngineEvalException.ERROR) ;
rni.rniPreserve(pr);
ref = new REXPReference(this, new Long(pr));
if (resolve)
ref = resolveReference(ref);
} finally {
if (obtainedLock)
rniMutex.unlock();
}
return ref;
}
public void assign(String symbol, REXP value, REXP env) throws REngineException, REXPMismatchException {
long rho = 0;
if (env != null && !env.isReference()) {
if (!env.isEnvironment() || ((REXPEnvironment)env).getHandle() == null)
throw(new REXPMismatchException(env, "environment"));
else
rho = ((JRIPointer)((REXPEnvironment)env).getHandle()).pointer();
} else
if (env != null) rho = ((Long)((REXPReference)env).getHandle()).longValue();
if (value == null) value = nullValueRef;
if (!value.isReference())
value = createReference(value); // if value is not a reference, we have to create one
boolean obtainedLock = rniMutex.safeLock(), succeeded = false;
try {
succeeded = rni.rniAssign(symbol, ((Long)((REXPReference)value).getHandle()).longValue(), rho);
} finally {
if (obtainedLock)
rniMutex.unlock();
}
if (!succeeded)
throw new REngineException(this, "assign failed (probably locked binding");
}
public REXP get(String symbol, REXP env, boolean resolve) throws REngineException, REXPMismatchException {
REXP ref = null;
long rho = 0;
if (env != null && !env.isReference()) {
if (!env.isEnvironment() || ((REXPEnvironment)env).getHandle() == null)
throw(new REXPMismatchException(env, "environment"));
else
rho = ((JRIPointer)((REXPEnvironment)env).getHandle()).pointer();
} else
if (env != null) rho = ((Long)((REXPReference)env).getHandle()).longValue();
boolean obtainedLock = rniMutex.safeLock();
try {
long pr = rni.rniFindVar(symbol, rho);
if (pr == R_UnboundValue || pr == 0) return null;
rni.rniPreserve(pr);
ref = new REXPReference(this, new Long(pr));
if (resolve)
try { ref = resolveReference(ref); } catch (REXPMismatchException me) { };
} finally {
if (obtainedLock)
rniMutex.unlock();
}
return ref;
}
public REXP resolveReference(REXP ref) throws REngineException, REXPMismatchException {
REXP res = null;
if (ref == null) throw(new REngineException(this, "resolveReference called on NULL input"));
if (!ref.isReference()) throw(new REXPMismatchException(ref, "reference"));
long ptr = ((Long)((REXPReference)ref).getHandle()).longValue();
if (ptr == 0) return nullValue;
return resolvePointer(ptr);
}
/**
* Turn an R pointer (long) into a REXP object.
*
* This is the actual implementation of <code>resolveReference</code> but it works directly on the long pointers to be more efficient when performing recursive de-referencing */
REXP resolvePointer(long ptr) throws REngineException, REXPMismatchException {
if (ptr == 0) return nullValue;
REXP res = null;
boolean obtainedLock = rniMutex.safeLock();
try {
int xt = rni.rniExpType(ptr);
String an[] = rni.rniGetAttrNames(ptr);
REXPList attrs = null;
if (an != null && an.length > 0) { // are there attributes? Then we need to resolve them first
// we allow special handling for Java references so we need the class and jobj
long jobj = 0;
String oclass = null;
RList attl = new RList();
for (int i = 0; i < an.length; i++) {
long aptr = rni.rniGetAttr(ptr, an[i]);
if (aptr != 0 && aptr != R_NilValue) {
if (an[i].equals("jobj")) jobj = aptr;
REXP av = resolvePointer(aptr);
if (av != null && av != nullValue) {
attl.put(an[i], av);
if (an[i].equals("class") && av.isString())
oclass = av.asString();
}
}
}
if (attl.size() > 0)
attrs = new REXPList(attl);
// FIXME: in general, we could allow arbitrary convertors here ...
// Note that the jobj hack is only needed because we don't support EXTPTRSXP conversion
// (for a good reason - we can't separate the PTR from the R object so the only way it can
// live is as a reference and we don't want resolvePointer to ever return REXPReference as
// that could trigger infinite recursions), but if we did, we could allow post-processing
// based on the class attribute on the converted REXP.. (better, we can leverage REXPUnknown
// and pass the ptr to the convertor so it can pull things like EXTPTR via rni)
if (jobj != 0 && oclass != null &&
(oclass.equals("jobjRef") ||
oclass.equals("jarrayRef") ||
oclass.equals("jrectRef")))
return new REXPJavaReference(rni.rniXrefToJava(jobj), attrs);
}
switch (xt) {
case NILSXP:
return nullValue;
case STRSXP:
String[] s = rni.rniGetStringArray(ptr);
res = new REXPString(s, attrs);
break;
case INTSXP:
if (rni.rniInherits(ptr, "factor")) {
long levx = rni.rniGetAttr(ptr, "levels");
if (levx != 0) {
String[] levels = null;
// we're using low-level calls here (FIXME?)
int rlt = rni.rniExpType(levx);
if (rlt == STRSXP) {
levels = rni.rniGetStringArray(levx);
int[] ids = rni.rniGetIntArray(ptr);
res = new REXPFactor(ids, levels, attrs);
}
}
}
// if it's not a factor, then we use int[] instead
if (res == null)
res = new REXPInteger(rni.rniGetIntArray(ptr), attrs);
break;
case REALSXP:
res = new REXPDouble(rni.rniGetDoubleArray(ptr), attrs);
break;
case LGLSXP:
{
int ba[] = rni.rniGetBoolArrayI(ptr);
byte b[] = new byte[ba.length];
for (int i = 0; i < ba.length; i++)
b[i] = (ba[i] == 0 || ba[i] == 1) ? (byte) ba[i] : REXPLogical.NA;
res = new REXPLogical(b, attrs);
}
break;
case VECSXP:
{
long l[] = rni.rniGetVector(ptr);
REXP rl[] = new REXP[l.length];
long na = rni.rniGetAttr(ptr, "names");
String[] names = null;
if (na != 0 && rni.rniExpType(na) == STRSXP)
names = rni.rniGetStringArray(na);
for (int i = 0; i < l.length; i++)
rl[i] = resolvePointer(l[i]);
RList list = (names == null) ? new RList(rl) : new RList(rl, names);
res = new REXPGenericVector(list, attrs);
}
break;
case RAWSXP:
res = new REXPRaw(rni.rniGetRawArray(ptr), attrs);
break;
case LISTSXP:
case LANGSXP:
{
RList l = new RList();
// we need to plow through the list iteratively - the recursion occurs at the value level
long cdr = ptr;
while (cdr != 0 && cdr != R_NilValue) {
long car = rni.rniCAR(cdr);
long tag = rni.rniTAG(cdr);
String name = null;
if (rni.rniExpType(tag) == SYMSXP)
name = rni.rniGetSymbolName(tag);
REXP val = resolvePointer(car);
if (name == null) l.add(val); else l.put(name, val);
cdr = rni.rniCDR(cdr);
}
res = (xt == LANGSXP) ? new REXPLanguage(l, attrs) : new REXPList(l, attrs);
}
break;
case SYMSXP:
res = new REXPSymbol(rni.rniGetSymbolName(ptr));
break;
case ENVSXP:
if (ptr != 0) rni.rniPreserve(ptr);
res = new REXPEnvironment(this, new JRIPointer(ptr, false));
break;
case S4SXP:
res = new REXPS4(attrs);
break;
default:
res = new REXPUnknown(xt, attrs);
break;
}
} finally {
if (obtainedLock)
rniMutex.unlock();
}
return res;
}
public REXP createReference(REXP value) throws REngineException, REXPMismatchException {
if (value == null) throw(new REngineException(this, "createReference from a NULL value"));
if (value.isReference()) return value;
long ptr = createReferencePointer(value);
if (ptr == 0) return null;
boolean obtainedLock = rniMutex.safeLock();
try {
rni.rniPreserve(ptr);
} finally {
if (obtainedLock)
rniMutex.unlock();
}
return new REXPReference(this, new Long(ptr));
}
/**
* Create an R object, returning its pointer, from an REXP java object.
*
* @param value
* @return long R pointer
* @throws REngineException if any of the RNI calls fails
* @throws REXPMismatchException only if some internal inconsistency happens. The internal logic should prevent invalid access to valid objects.
*/
long createReferencePointer(REXP value) throws REngineException, REXPMismatchException {
if (value.isReference()) { // if it's reference, return the handle if it's from this engine
REXPReference vref = (REXPReference) value;
if (vref.getEngine() != this)
throw new REXPMismatchException(value, "reference (cross-engine reference is invalid)");
return ((Long)vref.getHandle()).longValue();
}
boolean obtainedLock = rniMutex.safeLock();
int upp = 0;
try {
long ptr = 0;
if (value.isNull()) // NULL cannot have attributes, hence get out right away
return R_NilValue;
else if (value.isLogical()) {
int v[] = value.asIntegers();
for (int i = 0; i < v.length; i++)
v[i] = (v[i] < 0) ? 2 : ((v[i] == 0) ? 0 : 1); // convert to logical NAs as used by R
ptr = rni.rniPutBoolArrayI(v);
}
else if (value.isInteger())
ptr = rni.rniPutIntArray(value.asIntegers());
else if (value.isRaw())
ptr = rni.rniPutRawArray(value.asBytes());
else if (value.isNumeric())
ptr = rni.rniPutDoubleArray(value.asDoubles());
else if (value.isString())
ptr = rni.rniPutStringArray(value.asStrings());
else if (value.isEnvironment()) {
JRIPointer l = (JRIPointer) ((REXPEnvironment)value).getHandle();
if (l == null) { // no associated reference, create a new environemnt
long p = rni.rniParse("new.env(parent=baseenv())", 1);
ptr = rni.rniEval(p, 0);
/* TODO: should we handle REngineEvalException.ERROR and REngineEvalException.INVALID_INPUT here, for completeness */
} else
ptr = l.pointer();
} else if (value.isPairList()) { // LISTSXP / LANGSXP
boolean lang = value.isLanguage();
RList rl = value.asList();
ptr = R_NilValue;
int j = rl.size();
if (j == 0)
ptr = rni.rniCons(R_NilValue, 0, 0, lang);
else
// we are in a somewhat unfortunate situation because we cannot append to the list (RNI has no rniSetCDR!) so we have to use Preserve and bulild the list backwards which may be a bit slower ...
for (int i = j - 1; i >= 0; i--) {
REXP v = rl.at(i);
String n = rl.keyAt(i);
long sn = 0;
if (n != null) sn = rni.rniInstallSymbol(n);
long vptr = createReferencePointer(v);
if (vptr == 0) vptr = R_NilValue;
long ent = rni.rniCons(vptr, ptr, sn, (i == 0) && lang); /* only the head should be LANGSXP I think - verify ... */
rni.rniPreserve(ent); // preserve current head
rni.rniRelease(ptr); // release previous head (since it's part of the new one already)
ptr = ent;
}
} else if (value.isList()) { // VECSXP
int init_upp = upp;
RList rl = value.asList();
long xl[] = new long[rl.size()];
for (int i = 0; i < xl.length; i++) {
REXP rv = rl.at(i);
if (rv == null || rv.isNull())
xl[i] = R_NilValue;
else {
long lv = createReferencePointer(rv);
if (lv != 0 && lv != R_NilValue) {
rni.rniProtect(lv);
upp++;
} else lv = R_NilValue;
xl[i] = lv;
}
}
ptr = rni.rniPutVector(xl);
if (init_upp > upp) {
rni.rniUnprotect(upp - init_upp);
upp = init_upp;
}
} else if (value.isSymbol())
return rni.rniInstallSymbol(value.asString()); // symbols need no attribute handling, hence get out right away
else if (value instanceof REXPJavaReference) { // we wrap Java references by calling new("jobjRef", ...)
Object jval = ((REXPJavaReference)value).getObject();
long jobj = rni.rniJavaToXref(jval);
rni.rniProtect(jobj);
long jobj_sym = rni.rniInstallSymbol("jobj");
long jclass_sym = rni.rniInstallSymbol("jclass");
String clname = "java/lang/Object";
if (jval != null) {
clname = jval.getClass().getName();
clname = clname.replace('.', '/');
}
long jclass = rni.rniPutString(clname);
rni.rniProtect(jclass);
long jobjRef = rni.rniPutString("jobjRef");
rni.rniProtect(jobjRef);
long ro = rni.rniEval(rni.rniLCons(rni.rniInstallSymbol("new"),
rni.rniCons(jobjRef,
rni.rniCons(jobj,
rni.rniCons(jclass, R_NilValue, jclass_sym, false),
jobj_sym, false))
), 0);
rni.rniUnprotect(3);
ptr = ro;
}
if (ptr == R_NilValue)
return ptr;
if (ptr != 0) {
REXPList att = value._attr();
if (att == null || !att.isPairList()) return ptr; // no valid attributes? the we're done
RList al = att.asList();
if (al == null || al.size() < 1 || !al.isNamed()) return ptr; // again - no valid list, get out
rni.rniProtect(ptr); // symbols and other exotic creatures are already out by now, so it's ok to protect
upp++;
for (int i = 0; i < al.size(); i++) {
REXP v = al.at(i);
String n = al.keyAt(i);
if (n != null) {
long vptr = createReferencePointer(v);
if (vptr != 0 && vptr != R_NilValue)
rni.rniSetAttr(ptr, n, vptr);
}
}
return ptr;
}
} finally {
if (upp > 0)
rni.rniUnprotect(upp);
if (obtainedLock)
rniMutex.unlock();
}
// we fall thgough here if the object cannot be handled or something went wrong
return 0;
}
public void finalizeReference(REXP ref) throws REngineException, REXPMismatchException {
if (ref != null && ref.isReference()) {
long ptr = ((Long)((REXPReference)ref).getHandle()).longValue();
boolean obtainedLock = rniMutex.safeLock();
try {
rni.rniRelease(ptr);
} finally {
if (obtainedLock)
rniMutex.unlock();
}
}
}
public REXP getParentEnvironment(REXP env, boolean resolve) throws REngineException, REXPMismatchException {
REXP ref = null;
long rho = 0;
if (env != null && !env.isReference()) {
if (!env.isEnvironment() || ((REXPEnvironment)env).getHandle() == null)
throw(new REXPMismatchException(env, "environment"));
else
rho = ((JRIPointer)((REXPEnvironment)env).getHandle()).pointer();
} else
if (env != null) rho = ((Long)((REXPReference)env).getHandle()).longValue();
boolean obtainedLock = rniMutex.safeLock();
try {
long pr = rni.rniParentEnv(rho);
if (pr == 0 || pr == R_NilValue) return null; // this should never happen, really
rni.rniPreserve(pr);
ref = new REXPReference(this, new Long(pr));
if (resolve)
ref = resolveReference(ref);
} finally {
if (obtainedLock)
rniMutex.unlock();
}
return ref;
}
public REXP newEnvironment(REXP parent, boolean resolve) throws REXPMismatchException, REngineException {
REXP ref = null;
boolean obtainedLock = rniMutex.safeLock();
try {
long rho = 0;
if (parent != null && !parent.isReference()) {
if (!parent.isEnvironment() || ((REXPEnvironment)parent).getHandle() == null)
throw(new REXPMismatchException(parent, "environment"));
else
rho = ((JRIPointer)((REXPEnvironment)parent).getHandle()).pointer();
} else
if (parent != null) rho = ((Long)((REXPReference)parent).getHandle()).longValue();
if (rho == 0)
rho = ((Long)((REXPReference)globalEnv).getHandle()).longValue();
long p = rni.rniEval(rni.rniLCons(rni.rniInstallSymbol("new.env"), rni.rniCons(rho, R_NilValue, rni.rniInstallSymbol("parent"), false)), 0);
/* TODO: should we handle REngineEvalException.INVALID_INPUT and REngineEvalException.ERROR here, for completeness */
if (p != 0) rni.rniPreserve(p);
ref = new REXPReference(this, new Long(p));
if (resolve)
ref = resolveReference(ref);
} finally {
if (obtainedLock)
rniMutex.unlock();
}
return ref;
}
public boolean close() {
if (rni == null) return false;
rni.end();
return true;
}
/** attempts to obtain a lock for this R engine synchronously (without waiting for it).
@return 0 if the lock could not be obtained (R is busy) and some other value otherwise (1 = lock obtained, 2 = the current thread already holds a lock) -- the returned value must be used in a matching call to {@link #unlock(int)}. */
public synchronized int tryLock() {
int res = rniMutex.tryLock();
return (res == 1) ? 0 : ((res == -1) ? 2 : 1);
}
/** obains a lock for this R engine, waiting until it becomes available.
@return value that must be passed to {@link #unlock} in order to release the lock */
public synchronized int lock() {
return rniMutex.safeLock() ? 1 : 2;
}
/** releases a lock previously obtained by {@link #lock()} or {@link #tryLock()}.
@param lockValue value returned by {@link #lock()} or {@link #tryLock()}. */
public synchronized void unlock(int lockValue) {
if (lockValue == 1) rniMutex.unlock();
}
public boolean supportsReferences() { return true; }
public boolean supportsEnvironments() { return true; }
// public boolean supportsREPL() { return true; }
public boolean supportsLocking() { return true; }
/**
* creates a <code>jobjRef</code> reference in R via rJava.<br><b>Important:</b> rJava must be loaded and intialized in R (e.g. via <code>eval("{library(rJava);.jinit()}",false)</code>, otherwise this will fail. Requires rJava 0.4-13 or higher!
*
* @param o object to push to R
*
* @return unresolved REXPReference of the newly created <code>jobjRef</code> object
* or <code>null</code> upon failure
*/
public REXPReference createRJavaRef(Object o) throws REngineException {
/* precaution */
if( o == null ){
return null ;
}
/* call Rengine api and make REXPReference from the result */
REXPReference ref = null ;
boolean obtainedLock = rniMutex.safeLock();
try {
org.rosuda.JRI.REXP rx = rni.createRJavaRef( o );
if( rx == null){
throw new REngineException( this, "Could not push java Object to R" ) ;
} else{
long p = rx.xp;
rni.rniPreserve(p) ;
ref = new REXPReference( this, new Long(p) ) ;
}
} finally {
if (obtainedLock)
rniMutex.unlock();
}
return ref ;
}
/** JRI callbacks forwarding */
public void rWriteConsole (Rengine re, String text, int oType) {
if (callbacks != null && callbacks instanceof REngineOutputInterface)
((REngineOutputInterface)callbacks).RWriteConsole(this, text, oType);
}
public void rBusy (Rengine re, int which) {
if (callbacks != null && callbacks instanceof REngineUIInterface)
((REngineUIInterface)callbacks).RBusyState(this, which);
}
public synchronized String rReadConsole (Rengine re, String prompt, int addToHistory) {
if (callbacks != null && callbacks instanceof REngineInputInterface)
return ((REngineInputInterface)callbacks).RReadConsole(this, prompt, addToHistory);
try { wait(); } catch (Exception e) {}
return "";
}
public void rShowMessage (Rengine re, String message) {
if (callbacks != null && callbacks instanceof REngineOutputInterface)
((REngineOutputInterface)callbacks).RShowMessage(this, message);
}
public String rChooseFile (Rengine re, int newFile) {
if (callbacks != null && callbacks instanceof REngineUIInterface)
return ((REngineUIInterface)callbacks).RChooseFile(this, (newFile == 0));
return null;
}
public void rFlushConsole (Rengine re) {
if (callbacks != null && callbacks instanceof REngineOutputInterface)
((REngineOutputInterface)callbacks).RFlushConsole(this);
}
public void rSaveHistory (Rengine re, String filename) {
if (callbacks != null && callbacks instanceof REngineConsoleHistoryInterface)
((REngineConsoleHistoryInterface)callbacks).RSaveHistory(this, filename);
}
public void rLoadHistory (Rengine re, String filename) {
if (callbacks != null && callbacks instanceof REngineConsoleHistoryInterface)
((REngineConsoleHistoryInterface)callbacks).RLoadHistory(this, filename);
}
}
|
|
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.gallery3d.exif;
import android.util.Log;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
import java.util.Map.Entry;
import java.util.TreeMap;
/**
* This class provides a low-level EXIF parsing API. Given a JPEG format
* InputStream, the caller can request which IFD's to read via
* {@link #parse(InputStream, int)} with given options.
* <p>
* Below is an example of getting EXIF data from IFD 0 and EXIF IFD using the
* parser.
*
* <pre>
* void parse() {
* ExifParser parser = ExifParser.parse(mImageInputStream,
* ExifParser.OPTION_IFD_0 | ExifParser.OPTIONS_IFD_EXIF);
* int event = parser.next();
* while (event != ExifParser.EVENT_END) {
* switch (event) {
* case ExifParser.EVENT_START_OF_IFD:
* break;
* case ExifParser.EVENT_NEW_TAG:
* ExifTag tag = parser.getTag();
* if (!tag.hasValue()) {
* parser.registerForTagValue(tag);
* } else {
* processTag(tag);
* }
* break;
* case ExifParser.EVENT_VALUE_OF_REGISTERED_TAG:
* tag = parser.getTag();
* if (tag.getDataType() != ExifTag.TYPE_UNDEFINED) {
* processTag(tag);
* }
* break;
* }
* event = parser.next();
* }
* }
*
* void processTag(ExifTag tag) {
* // process the tag as you like.
* }
* </pre>
*/
class ExifParser {
private static final boolean LOGV = false;
private static final String TAG = "ExifParser";
/**
* When the parser reaches a new IFD area. Call {@link #getCurrentIfd()} to
* know which IFD we are in.
*/
public static final int EVENT_START_OF_IFD = 0;
/**
* When the parser reaches a new tag. Call {@link #getTag()}to get the
* corresponding tag.
*/
public static final int EVENT_NEW_TAG = 1;
/**
* When the parser reaches the value area of tag that is registered by
* {@link #registerForTagValue(ExifTag)} previously. Call {@link #getTag()}
* to get the corresponding tag.
*/
public static final int EVENT_VALUE_OF_REGISTERED_TAG = 2;
/**
* When the parser reaches the compressed image area.
*/
public static final int EVENT_COMPRESSED_IMAGE = 3;
/**
* When the parser reaches the uncompressed image strip. Call
* {@link #getStripIndex()} to get the index of the strip.
*
* @see #getStripIndex()
* @see #getStripCount()
*/
public static final int EVENT_UNCOMPRESSED_STRIP = 4;
/**
* When there is nothing more to parse.
*/
public static final int EVENT_END = 5;
/**
* Option bit to request to parse IFD0.
*/
public static final int OPTION_IFD_0 = 1 << 0;
/**
* Option bit to request to parse IFD1.
*/
public static final int OPTION_IFD_1 = 1 << 1;
/**
* Option bit to request to parse Exif-IFD.
*/
public static final int OPTION_IFD_EXIF = 1 << 2;
/**
* Option bit to request to parse GPS-IFD.
*/
public static final int OPTION_IFD_GPS = 1 << 3;
/**
* Option bit to request to parse Interoperability-IFD.
*/
public static final int OPTION_IFD_INTEROPERABILITY = 1 << 4;
/**
* Option bit to request to parse thumbnail.
*/
public static final int OPTION_THUMBNAIL = 1 << 5;
protected static final int EXIF_HEADER = 0x45786966; // EXIF header "Exif"
protected static final short EXIF_HEADER_TAIL = (short) 0x0000; // EXIF header in APP1
// TIFF header
protected static final short LITTLE_ENDIAN_TAG = (short) 0x4949; // "II"
protected static final short BIG_ENDIAN_TAG = (short) 0x4d4d; // "MM"
protected static final short TIFF_HEADER_TAIL = 0x002A;
protected static final int TAG_SIZE = 12;
protected static final int OFFSET_SIZE = 2;
private static final Charset US_ASCII = Charset.forName("US-ASCII");
protected static final int DEFAULT_IFD0_OFFSET = 8;
private final CountedDataInputStream mTiffStream;
private final int mOptions;
private int mIfdStartOffset = 0;
private int mNumOfTagInIfd = 0;
private int mIfdType;
private ExifTag mTag;
private ImageEvent mImageEvent;
private int mStripCount;
private ExifTag mStripSizeTag;
private ExifTag mJpegSizeTag;
private boolean mNeedToParseOffsetsInCurrentIfd;
private boolean mContainExifData = false;
private int mApp1End;
private int mOffsetToApp1EndFromSOF = 0;
private byte[] mDataAboveIfd0;
private int mIfd0Position;
private int mTiffStartPosition;
private final ExifInterface mInterface;
private static final short TAG_EXIF_IFD = ExifInterface
.getTrueTagKey(ExifInterface.TAG_EXIF_IFD);
private static final short TAG_GPS_IFD = ExifInterface.getTrueTagKey(ExifInterface.TAG_GPS_IFD);
private static final short TAG_INTEROPERABILITY_IFD = ExifInterface
.getTrueTagKey(ExifInterface.TAG_INTEROPERABILITY_IFD);
private static final short TAG_JPEG_INTERCHANGE_FORMAT = ExifInterface
.getTrueTagKey(ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT);
private static final short TAG_JPEG_INTERCHANGE_FORMAT_LENGTH = ExifInterface
.getTrueTagKey(ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT_LENGTH);
private static final short TAG_STRIP_OFFSETS = ExifInterface
.getTrueTagKey(ExifInterface.TAG_STRIP_OFFSETS);
private static final short TAG_STRIP_BYTE_COUNTS = ExifInterface
.getTrueTagKey(ExifInterface.TAG_STRIP_BYTE_COUNTS);
private final TreeMap<Integer, Object> mCorrespondingEvent = new TreeMap<Integer, Object>();
private boolean isIfdRequested(int ifdType) {
switch (ifdType) {
case IfdId.TYPE_IFD_0:
return (mOptions & OPTION_IFD_0) != 0;
case IfdId.TYPE_IFD_1:
return (mOptions & OPTION_IFD_1) != 0;
case IfdId.TYPE_IFD_EXIF:
return (mOptions & OPTION_IFD_EXIF) != 0;
case IfdId.TYPE_IFD_GPS:
return (mOptions & OPTION_IFD_GPS) != 0;
case IfdId.TYPE_IFD_INTEROPERABILITY:
return (mOptions & OPTION_IFD_INTEROPERABILITY) != 0;
}
return false;
}
private boolean isThumbnailRequested() {
return (mOptions & OPTION_THUMBNAIL) != 0;
}
private ExifParser(InputStream inputStream, int options, ExifInterface iRef)
throws IOException, ExifInvalidFormatException {
if (inputStream == null) {
throw new IOException("Null argument inputStream to ExifParser");
}
if (LOGV) {
Log.v(TAG, "Reading exif...");
}
mInterface = iRef;
mContainExifData = seekTiffData(inputStream);
mTiffStream = new CountedDataInputStream(inputStream);
mOptions = options;
if (!mContainExifData) {
return;
}
parseTiffHeader();
long offset = mTiffStream.readUnsignedInt();
if (offset > Integer.MAX_VALUE) {
throw new ExifInvalidFormatException("Invalid offset " + offset);
}
mIfd0Position = (int) offset;
mIfdType = IfdId.TYPE_IFD_0;
if (isIfdRequested(IfdId.TYPE_IFD_0) || needToParseOffsetsInCurrentIfd()) {
registerIfd(IfdId.TYPE_IFD_0, offset);
if (offset != DEFAULT_IFD0_OFFSET) {
mDataAboveIfd0 = new byte[(int) offset - DEFAULT_IFD0_OFFSET];
read(mDataAboveIfd0);
}
}
}
/**
* Parses the the given InputStream with the given options
*
* @exception IOException
* @exception ExifInvalidFormatException
*/
protected static ExifParser parse(InputStream inputStream, int options, ExifInterface iRef)
throws IOException, ExifInvalidFormatException {
return new ExifParser(inputStream, options, iRef);
}
/**
* Parses the the given InputStream with default options; that is, every IFD
* and thumbnaill will be parsed.
*
* @exception IOException
* @exception ExifInvalidFormatException
* @see #parse(InputStream, int)
*/
protected static ExifParser parse(InputStream inputStream, ExifInterface iRef)
throws IOException, ExifInvalidFormatException {
return new ExifParser(inputStream, OPTION_IFD_0 | OPTION_IFD_1
| OPTION_IFD_EXIF | OPTION_IFD_GPS | OPTION_IFD_INTEROPERABILITY
| OPTION_THUMBNAIL, iRef);
}
/**
* Moves the parser forward and returns the next parsing event
*
* @exception IOException
* @exception ExifInvalidFormatException
* @see #EVENT_START_OF_IFD
* @see #EVENT_NEW_TAG
* @see #EVENT_VALUE_OF_REGISTERED_TAG
* @see #EVENT_COMPRESSED_IMAGE
* @see #EVENT_UNCOMPRESSED_STRIP
* @see #EVENT_END
*/
protected int next() throws IOException, ExifInvalidFormatException {
if (!mContainExifData) {
return EVENT_END;
}
int offset = mTiffStream.getReadByteCount();
int endOfTags = mIfdStartOffset + OFFSET_SIZE + TAG_SIZE * mNumOfTagInIfd;
if (offset < endOfTags) {
mTag = readTag();
if (mTag == null) {
return next();
}
if (mNeedToParseOffsetsInCurrentIfd) {
checkOffsetOrImageTag(mTag);
}
return EVENT_NEW_TAG;
} else if (offset == endOfTags) {
// There is a link to ifd1 at the end of ifd0
if (mIfdType == IfdId.TYPE_IFD_0) {
long ifdOffset = readUnsignedLong();
if (isIfdRequested(IfdId.TYPE_IFD_1) || isThumbnailRequested()) {
if (ifdOffset != 0) {
registerIfd(IfdId.TYPE_IFD_1, ifdOffset);
}
}
} else {
int offsetSize = 4;
// Some camera models use invalid length of the offset
if (mCorrespondingEvent.size() > 0) {
offsetSize = mCorrespondingEvent.firstEntry().getKey() -
mTiffStream.getReadByteCount();
}
if (offsetSize < 4) {
Log.w(TAG, "Invalid size of link to next IFD: " + offsetSize);
} else {
long ifdOffset = readUnsignedLong();
if (ifdOffset != 0) {
Log.w(TAG, "Invalid link to next IFD: " + ifdOffset);
}
}
}
}
while (mCorrespondingEvent.size() != 0) {
Entry<Integer, Object> entry = mCorrespondingEvent.pollFirstEntry();
Object event = entry.getValue();
try {
skipTo(entry.getKey());
} catch (IOException e) {
Log.w(TAG, "Failed to skip to data at: " + entry.getKey() +
" for " + event.getClass().getName() + ", the file may be broken.");
continue;
}
if (event instanceof IfdEvent) {
mIfdType = ((IfdEvent) event).ifd;
mNumOfTagInIfd = mTiffStream.readUnsignedShort();
mIfdStartOffset = entry.getKey();
if (mNumOfTagInIfd * TAG_SIZE + mIfdStartOffset + OFFSET_SIZE > mApp1End) {
Log.w(TAG, "Invalid size of IFD " + mIfdType);
return EVENT_END;
}
mNeedToParseOffsetsInCurrentIfd = needToParseOffsetsInCurrentIfd();
if (((IfdEvent) event).isRequested) {
return EVENT_START_OF_IFD;
} else {
skipRemainingTagsInCurrentIfd();
}
} else if (event instanceof ImageEvent) {
mImageEvent = (ImageEvent) event;
return mImageEvent.type;
} else {
ExifTagEvent tagEvent = (ExifTagEvent) event;
mTag = tagEvent.tag;
if (mTag.getDataType() != ExifTag.TYPE_UNDEFINED) {
readFullTagValue(mTag);
checkOffsetOrImageTag(mTag);
}
if (tagEvent.isRequested) {
return EVENT_VALUE_OF_REGISTERED_TAG;
}
}
}
return EVENT_END;
}
/**
* Skips the tags area of current IFD, if the parser is not in the tag area,
* nothing will happen.
*
* @throws IOException
* @throws ExifInvalidFormatException
*/
protected void skipRemainingTagsInCurrentIfd() throws IOException, ExifInvalidFormatException {
int endOfTags = mIfdStartOffset + OFFSET_SIZE + TAG_SIZE * mNumOfTagInIfd;
int offset = mTiffStream.getReadByteCount();
if (offset > endOfTags) {
return;
}
if (mNeedToParseOffsetsInCurrentIfd) {
while (offset < endOfTags) {
mTag = readTag();
offset += TAG_SIZE;
if (mTag == null) {
continue;
}
checkOffsetOrImageTag(mTag);
}
} else {
skipTo(endOfTags);
}
long ifdOffset = readUnsignedLong();
// For ifd0, there is a link to ifd1 in the end of all tags
if (mIfdType == IfdId.TYPE_IFD_0
&& (isIfdRequested(IfdId.TYPE_IFD_1) || isThumbnailRequested())) {
if (ifdOffset > 0) {
registerIfd(IfdId.TYPE_IFD_1, ifdOffset);
}
}
}
private boolean needToParseOffsetsInCurrentIfd() {
switch (mIfdType) {
case IfdId.TYPE_IFD_0:
return isIfdRequested(IfdId.TYPE_IFD_EXIF) || isIfdRequested(IfdId.TYPE_IFD_GPS)
|| isIfdRequested(IfdId.TYPE_IFD_INTEROPERABILITY)
|| isIfdRequested(IfdId.TYPE_IFD_1);
case IfdId.TYPE_IFD_1:
return isThumbnailRequested();
case IfdId.TYPE_IFD_EXIF:
// The offset to interoperability IFD is located in Exif IFD
return isIfdRequested(IfdId.TYPE_IFD_INTEROPERABILITY);
default:
return false;
}
}
/**
* If {@link #next()} return {@link #EVENT_NEW_TAG} or
* {@link #EVENT_VALUE_OF_REGISTERED_TAG}, call this function to get the
* corresponding tag.
* <p>
* For {@link #EVENT_NEW_TAG}, the tag may not contain the value if the size
* of the value is greater than 4 bytes. One should call
* {@link ExifTag#hasValue()} to check if the tag contains value. If there
* is no value,call {@link #registerForTagValue(ExifTag)} to have the parser
* emit {@link #EVENT_VALUE_OF_REGISTERED_TAG} when it reaches the area
* pointed by the offset.
* <p>
* When {@link #EVENT_VALUE_OF_REGISTERED_TAG} is emitted, the value of the
* tag will have already been read except for tags of undefined type. For
* tags of undefined type, call one of the read methods to get the value.
*
* @see #registerForTagValue(ExifTag)
* @see #read(byte[])
* @see #read(byte[], int, int)
* @see #readLong()
* @see #readRational()
* @see #readString(int)
* @see #readString(int, Charset)
*/
protected ExifTag getTag() {
return mTag;
}
/**
* Gets number of tags in the current IFD area.
*/
protected int getTagCountInCurrentIfd() {
return mNumOfTagInIfd;
}
/**
* Gets the ID of current IFD.
*
* @see IfdId#TYPE_IFD_0
* @see IfdId#TYPE_IFD_1
* @see IfdId#TYPE_IFD_GPS
* @see IfdId#TYPE_IFD_INTEROPERABILITY
* @see IfdId#TYPE_IFD_EXIF
*/
protected int getCurrentIfd() {
return mIfdType;
}
/**
* When receiving {@link #EVENT_UNCOMPRESSED_STRIP}, call this function to
* get the index of this strip.
*
* @see #getStripCount()
*/
protected int getStripIndex() {
return mImageEvent.stripIndex;
}
/**
* When receiving {@link #EVENT_UNCOMPRESSED_STRIP}, call this function to
* get the number of strip data.
*
* @see #getStripIndex()
*/
protected int getStripCount() {
return mStripCount;
}
/**
* When receiving {@link #EVENT_UNCOMPRESSED_STRIP}, call this function to
* get the strip size.
*/
protected int getStripSize() {
if (mStripSizeTag == null)
return 0;
return (int) mStripSizeTag.getValueAt(0);
}
/**
* When receiving {@link #EVENT_COMPRESSED_IMAGE}, call this function to get
* the image data size.
*/
protected int getCompressedImageSize() {
if (mJpegSizeTag == null) {
return 0;
}
return (int) mJpegSizeTag.getValueAt(0);
}
private void skipTo(int offset) throws IOException {
mTiffStream.skipTo(offset);
while (!mCorrespondingEvent.isEmpty() && mCorrespondingEvent.firstKey() < offset) {
mCorrespondingEvent.pollFirstEntry();
}
}
/**
* When getting {@link #EVENT_NEW_TAG} in the tag area of IFD, the tag may
* not contain the value if the size of the value is greater than 4 bytes.
* When the value is not available here, call this method so that the parser
* will emit {@link #EVENT_VALUE_OF_REGISTERED_TAG} when it reaches the area
* where the value is located.
*
* @see #EVENT_VALUE_OF_REGISTERED_TAG
*/
protected void registerForTagValue(ExifTag tag) {
if (tag.getOffset() >= mTiffStream.getReadByteCount()) {
mCorrespondingEvent.put(tag.getOffset(), new ExifTagEvent(tag, true));
}
}
private void registerIfd(int ifdType, long offset) {
// Cast unsigned int to int since the offset is always smaller
// than the size of APP1 (65536)
mCorrespondingEvent.put((int) offset, new IfdEvent(ifdType, isIfdRequested(ifdType)));
}
private void registerCompressedImage(long offset) {
mCorrespondingEvent.put((int) offset, new ImageEvent(EVENT_COMPRESSED_IMAGE));
}
private void registerUncompressedStrip(int stripIndex, long offset) {
mCorrespondingEvent.put((int) offset, new ImageEvent(EVENT_UNCOMPRESSED_STRIP
, stripIndex));
}
private ExifTag readTag() throws IOException, ExifInvalidFormatException {
short tagId = mTiffStream.readShort();
short dataFormat = mTiffStream.readShort();
long numOfComp = mTiffStream.readUnsignedInt();
if (numOfComp > Integer.MAX_VALUE) {
throw new ExifInvalidFormatException(
"Number of component is larger then Integer.MAX_VALUE");
}
// Some invalid image file contains invalid data type. Ignore those tags
if (!ExifTag.isValidType(dataFormat)) {
Log.w(TAG, String.format("Tag %04x: Invalid data type %d", tagId, dataFormat));
mTiffStream.skip(4);
return null;
}
// TODO: handle numOfComp overflow
ExifTag tag = new ExifTag(tagId, dataFormat, (int) numOfComp, mIfdType,
((int) numOfComp) != ExifTag.SIZE_UNDEFINED);
int dataSize = tag.getDataSize();
if (dataSize > 4) {
long offset = mTiffStream.readUnsignedInt();
if (offset > Integer.MAX_VALUE) {
throw new ExifInvalidFormatException(
"offset is larger then Integer.MAX_VALUE");
}
// Some invalid images put some undefined data before IFD0.
// Read the data here.
if ((offset < mIfd0Position) && (dataFormat == ExifTag.TYPE_UNDEFINED)) {
byte[] buf = new byte[(int) numOfComp];
System.arraycopy(mDataAboveIfd0, (int) offset - DEFAULT_IFD0_OFFSET,
buf, 0, (int) numOfComp);
tag.setValue(buf);
} else {
tag.setOffset((int) offset);
}
} else {
boolean defCount = tag.hasDefinedCount();
// Set defined count to 0 so we can add \0 to non-terminated strings
tag.setHasDefinedCount(false);
// Read value
readFullTagValue(tag);
tag.setHasDefinedCount(defCount);
mTiffStream.skip(4 - dataSize);
// Set the offset to the position of value.
tag.setOffset(mTiffStream.getReadByteCount() - 4);
}
return tag;
}
/**
* Check the tag, if the tag is one of the offset tag that points to the IFD
* or image the caller is interested in, register the IFD or image.
*/
private void checkOffsetOrImageTag(ExifTag tag) {
// Some invalid formattd image contains tag with 0 size.
if (tag.getComponentCount() == 0) {
return;
}
short tid = tag.getTagId();
int ifd = tag.getIfd();
if (tid == TAG_EXIF_IFD && checkAllowed(ifd, ExifInterface.TAG_EXIF_IFD)) {
if (isIfdRequested(IfdId.TYPE_IFD_EXIF)
|| isIfdRequested(IfdId.TYPE_IFD_INTEROPERABILITY)) {
registerIfd(IfdId.TYPE_IFD_EXIF, tag.getValueAt(0));
}
} else if (tid == TAG_GPS_IFD && checkAllowed(ifd, ExifInterface.TAG_GPS_IFD)) {
if (isIfdRequested(IfdId.TYPE_IFD_GPS)) {
registerIfd(IfdId.TYPE_IFD_GPS, tag.getValueAt(0));
}
} else if (tid == TAG_INTEROPERABILITY_IFD
&& checkAllowed(ifd, ExifInterface.TAG_INTEROPERABILITY_IFD)) {
if (isIfdRequested(IfdId.TYPE_IFD_INTEROPERABILITY)) {
registerIfd(IfdId.TYPE_IFD_INTEROPERABILITY, tag.getValueAt(0));
}
} else if (tid == TAG_JPEG_INTERCHANGE_FORMAT
&& checkAllowed(ifd, ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT)) {
if (isThumbnailRequested()) {
registerCompressedImage(tag.getValueAt(0));
}
} else if (tid == TAG_JPEG_INTERCHANGE_FORMAT_LENGTH
&& checkAllowed(ifd, ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT_LENGTH)) {
if (isThumbnailRequested()) {
mJpegSizeTag = tag;
}
} else if (tid == TAG_STRIP_OFFSETS && checkAllowed(ifd, ExifInterface.TAG_STRIP_OFFSETS)) {
if (isThumbnailRequested()) {
if (tag.hasValue()) {
for (int i = 0; i < tag.getComponentCount(); i++) {
if (tag.getDataType() == ExifTag.TYPE_UNSIGNED_SHORT) {
registerUncompressedStrip(i, tag.getValueAt(i));
} else {
registerUncompressedStrip(i, tag.getValueAt(i));
}
}
} else {
mCorrespondingEvent.put(tag.getOffset(), new ExifTagEvent(tag, false));
}
}
} else if (tid == TAG_STRIP_BYTE_COUNTS
&& checkAllowed(ifd, ExifInterface.TAG_STRIP_BYTE_COUNTS)
&&isThumbnailRequested() && tag.hasValue()) {
mStripSizeTag = tag;
}
}
private boolean checkAllowed(int ifd, int tagId) {
int info = mInterface.getTagInfo().get(tagId);
if (info == ExifInterface.DEFINITION_NULL) {
return false;
}
return ExifInterface.isIfdAllowed(info, ifd);
}
protected void readFullTagValue(ExifTag tag) throws IOException {
// Some invalid images contains tags with wrong size, check it here
short type = tag.getDataType();
if (type == ExifTag.TYPE_ASCII || type == ExifTag.TYPE_UNDEFINED ||
type == ExifTag.TYPE_UNSIGNED_BYTE) {
int size = tag.getComponentCount();
if (mCorrespondingEvent.size() > 0) {
if (mCorrespondingEvent.firstEntry().getKey() < mTiffStream.getReadByteCount()
+ size) {
Object event = mCorrespondingEvent.firstEntry().getValue();
if (event instanceof ImageEvent) {
// Tag value overlaps thumbnail, ignore thumbnail.
Log.w(TAG, "Thumbnail overlaps value for tag: \n" + tag.toString());
Entry<Integer, Object> entry = mCorrespondingEvent.pollFirstEntry();
Log.w(TAG, "Invalid thumbnail offset: " + entry.getKey());
} else {
// Tag value overlaps another tag, shorten count
if (event instanceof IfdEvent) {
Log.w(TAG, "Ifd " + ((IfdEvent) event).ifd
+ " overlaps value for tag: \n" + tag.toString());
} else if (event instanceof ExifTagEvent) {
Log.w(TAG, "Tag value for tag: \n"
+ ((ExifTagEvent) event).tag.toString()
+ " overlaps value for tag: \n" + tag.toString());
}
size = mCorrespondingEvent.firstEntry().getKey()
- mTiffStream.getReadByteCount();
Log.w(TAG, "Invalid size of tag: \n" + tag.toString()
+ " setting count to: " + size);
tag.forceSetComponentCount(size);
}
}
}
}
switch (tag.getDataType()) {
case ExifTag.TYPE_UNSIGNED_BYTE:
case ExifTag.TYPE_UNDEFINED: {
byte buf[] = new byte[tag.getComponentCount()];
read(buf);
tag.setValue(buf);
}
break;
case ExifTag.TYPE_ASCII:
tag.setValue(readString(tag.getComponentCount()));
break;
case ExifTag.TYPE_UNSIGNED_LONG: {
long value[] = new long[tag.getComponentCount()];
for (int i = 0, n = value.length; i < n; i++) {
value[i] = readUnsignedLong();
}
tag.setValue(value);
}
break;
case ExifTag.TYPE_UNSIGNED_RATIONAL: {
Rational value[] = new Rational[tag.getComponentCount()];
for (int i = 0, n = value.length; i < n; i++) {
value[i] = readUnsignedRational();
}
tag.setValue(value);
}
break;
case ExifTag.TYPE_UNSIGNED_SHORT: {
int value[] = new int[tag.getComponentCount()];
for (int i = 0, n = value.length; i < n; i++) {
value[i] = readUnsignedShort();
}
tag.setValue(value);
}
break;
case ExifTag.TYPE_LONG: {
int value[] = new int[tag.getComponentCount()];
for (int i = 0, n = value.length; i < n; i++) {
value[i] = readLong();
}
tag.setValue(value);
}
break;
case ExifTag.TYPE_RATIONAL: {
Rational value[] = new Rational[tag.getComponentCount()];
for (int i = 0, n = value.length; i < n; i++) {
value[i] = readRational();
}
tag.setValue(value);
}
break;
}
if (LOGV) {
Log.v(TAG, "\n" + tag.toString());
}
}
private void parseTiffHeader() throws IOException,
ExifInvalidFormatException {
short byteOrder = mTiffStream.readShort();
if (LITTLE_ENDIAN_TAG == byteOrder) {
mTiffStream.setByteOrder(ByteOrder.LITTLE_ENDIAN);
} else if (BIG_ENDIAN_TAG == byteOrder) {
mTiffStream.setByteOrder(ByteOrder.BIG_ENDIAN);
} else {
throw new ExifInvalidFormatException("Invalid TIFF header");
}
if (mTiffStream.readShort() != TIFF_HEADER_TAIL) {
throw new ExifInvalidFormatException("Invalid TIFF header");
}
}
private boolean seekTiffData(InputStream inputStream) throws IOException,
ExifInvalidFormatException {
CountedDataInputStream dataStream = new CountedDataInputStream(inputStream);
if (dataStream.readShort() != JpegHeader.SOI) {
throw new ExifInvalidFormatException("Invalid JPEG format");
}
short marker = dataStream.readShort();
while (marker != JpegHeader.EOI
&& !JpegHeader.isSofMarker(marker)) {
int length = dataStream.readUnsignedShort();
// Some invalid formatted image contains multiple APP1,
// try to find the one with Exif data.
if (marker == JpegHeader.APP1) {
int header = 0;
short headerTail = 0;
if (length >= 8) {
header = dataStream.readInt();
headerTail = dataStream.readShort();
length -= 6;
if (header == EXIF_HEADER && headerTail == EXIF_HEADER_TAIL) {
mTiffStartPosition = dataStream.getReadByteCount();
mApp1End = length;
mOffsetToApp1EndFromSOF = mTiffStartPosition + mApp1End;
return true;
}
}
}
if (length < 2 || (length - 2) != dataStream.skip(length - 2)) {
Log.w(TAG, "Invalid JPEG format.");
return false;
}
marker = dataStream.readShort();
dataStream.close();
}
return false;
}
protected int getOffsetToExifEndFromSOF() {
return mOffsetToApp1EndFromSOF;
}
protected int getTiffStartPosition() {
return mTiffStartPosition;
}
/**
* Reads bytes from the InputStream.
*/
protected int read(byte[] buffer, int offset, int length) throws IOException {
return mTiffStream.read(buffer, offset, length);
}
/**
* Equivalent to read(buffer, 0, buffer.length).
*/
protected int read(byte[] buffer) throws IOException {
return mTiffStream.read(buffer);
}
/**
* Reads a String from the InputStream with US-ASCII charset. The parser
* will read n bytes and convert it to ascii string. This is used for
* reading values of type {@link ExifTag#TYPE_ASCII}.
*/
protected String readString(int n) throws IOException {
return readString(n, US_ASCII);
}
/**
* Reads a String from the InputStream with the given charset. The parser
* will read n bytes and convert it to string. This is used for reading
* values of type {@link ExifTag#TYPE_ASCII}.
*/
protected String readString(int n, Charset charset) throws IOException {
if (n > 0) {
return mTiffStream.readString(n, charset);
} else {
return "";
}
}
/**
* Reads value of type {@link ExifTag#TYPE_UNSIGNED_SHORT} from the
* InputStream.
*/
protected int readUnsignedShort() throws IOException {
return mTiffStream.readShort() & 0xffff;
}
/**
* Reads value of type {@link ExifTag#TYPE_UNSIGNED_LONG} from the
* InputStream.
*/
protected long readUnsignedLong() throws IOException {
return readLong() & 0xffffffffL;
}
/**
* Reads value of type {@link ExifTag#TYPE_UNSIGNED_RATIONAL} from the
* InputStream.
*/
protected Rational readUnsignedRational() throws IOException {
long nomi = readUnsignedLong();
long denomi = readUnsignedLong();
return new Rational(nomi, denomi);
}
/**
* Reads value of type {@link ExifTag#TYPE_LONG} from the InputStream.
*/
protected int readLong() throws IOException {
return mTiffStream.readInt();
}
/**
* Reads value of type {@link ExifTag#TYPE_RATIONAL} from the InputStream.
*/
protected Rational readRational() throws IOException {
int nomi = readLong();
int denomi = readLong();
return new Rational(nomi, denomi);
}
private static class ImageEvent {
int stripIndex;
int type;
ImageEvent(int type) {
this.stripIndex = 0;
this.type = type;
}
ImageEvent(int type, int stripIndex) {
this.type = type;
this.stripIndex = stripIndex;
}
}
private static class IfdEvent {
int ifd;
boolean isRequested;
IfdEvent(int ifd, boolean isInterestedIfd) {
this.ifd = ifd;
this.isRequested = isInterestedIfd;
}
}
private static class ExifTagEvent {
ExifTag tag;
boolean isRequested;
ExifTagEvent(ExifTag tag, boolean isRequireByUser) {
this.tag = tag;
this.isRequested = isRequireByUser;
}
}
/**
* Gets the byte order of the current InputStream.
*/
protected ByteOrder getByteOrder() {
return mTiffStream.getByteOrder();
}
}
|
|
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.yms.app.yob;
import org.onosproject.yangutils.datamodel.RpcNotificationContainer;
import org.onosproject.yangutils.datamodel.YangDerivedInfo;
import org.onosproject.yangutils.datamodel.YangIdentity;
import org.onosproject.yangutils.datamodel.YangIdentityRef;
import org.onosproject.yangutils.datamodel.YangLeaf;
import org.onosproject.yangutils.datamodel.YangLeafList;
import org.onosproject.yangutils.datamodel.YangLeafRef;
import org.onosproject.yangutils.datamodel.YangNode;
import org.onosproject.yangutils.datamodel.YangSchemaNode;
import org.onosproject.yangutils.datamodel.YangSchemaNodeContextInfo;
import org.onosproject.yangutils.datamodel.YangType;
import org.onosproject.yangutils.datamodel.utils.builtindatatype.YangDataTypes;
import org.onosproject.yms.app.ydt.YdtExtendedContext;
import org.onosproject.yms.app.yob.exception.YobException;
import org.onosproject.yms.app.ysr.YangSchemaRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Base64;
import static org.onosproject.yangutils.datamodel.YangSchemaNodeType.YANG_AUGMENT_NODE;
import static org.onosproject.yangutils.translator.tojava.utils.JavaIdentifierSyntax.getEnumJavaAttribute;
import static org.onosproject.yms.app.ydt.AppType.YOB;
import static org.onosproject.yms.app.yob.YobConstants.DEFAULT;
import static org.onosproject.yms.app.yob.YobConstants.EVENT;
import static org.onosproject.yms.app.yob.YobConstants.EVENT_SUBJECT;
import static org.onosproject.yms.app.yob.YobConstants.E_DATA_TYPE_NOT_SUPPORT;
import static org.onosproject.yms.app.yob.YobConstants.E_FAIL_TO_CREATE_OBJ;
import static org.onosproject.yms.app.yob.YobConstants.E_FAIL_TO_GET_FIELD;
import static org.onosproject.yms.app.yob.YobConstants.E_FAIL_TO_GET_METHOD;
import static org.onosproject.yms.app.yob.YobConstants.E_FAIL_TO_INVOKE_METHOD;
import static org.onosproject.yms.app.yob.YobConstants.E_FAIL_TO_LOAD_CLASS;
import static org.onosproject.yms.app.yob.YobConstants.E_FAIL_TO_LOAD_CONSTRUCTOR;
import static org.onosproject.yms.app.yob.YobConstants.E_INVALID_DATA_TREE;
import static org.onosproject.yms.app.yob.YobConstants.E_INVALID_EMPTY_DATA;
import static org.onosproject.yms.app.yob.YobConstants.FROM_STRING;
import static org.onosproject.yms.app.yob.YobConstants.LEAF_IDENTIFIER;
import static org.onosproject.yms.app.yob.YobConstants.L_FAIL_TO_GET_FIELD;
import static org.onosproject.yms.app.yob.YobConstants.L_FAIL_TO_GET_METHOD;
import static org.onosproject.yms.app.yob.YobConstants.L_FAIL_TO_INVOKE_METHOD;
import static org.onosproject.yms.app.yob.YobConstants.L_FAIL_TO_LOAD_CLASS;
import static org.onosproject.yms.app.yob.YobConstants.OF;
import static org.onosproject.yms.app.yob.YobConstants.OP_PARAM;
import static org.onosproject.yms.app.yob.YobConstants.PERIOD;
import static org.onosproject.yms.app.yob.YobConstants.SELECT_LEAF;
import static org.onosproject.yms.app.yob.YobConstants.TYPE;
import static org.onosproject.yms.app.yob.YobConstants.VALUE_OF;
/**
* Utils to support object creation.
*/
public final class YobUtils {
private static final Logger log = LoggerFactory.getLogger(YobUtils.class);
// no instantiation
private YobUtils() {
}
/**
* Sets data from string value in parent method.
*
* @param type refers to YANG type
* @param leafValue leafValue argument is used to set the value
* in method
* @param parentSetterMethod Invokes the underlying method represented
* by this parentSetterMethod
* @param parentBuilderObject the parentBuilderObject is to invoke the
* underlying method
* @param ydtExtendedContext ydtExtendedContext is used to get
* application related
* information maintained in YDT
* @throws InvocationTargetException if failed to invoke method
* @throws IllegalAccessException if member cannot be accessed
* @throws NoSuchMethodException if method is not found
*/
static void setDataFromStringValue(YangDataTypes type, String leafValue,
Method parentSetterMethod,
Object parentBuilderObject,
YdtExtendedContext ydtExtendedContext)
throws InvocationTargetException, IllegalAccessException,
NoSuchMethodException {
switch (type) {
case INT8:
parentSetterMethod.invoke(parentBuilderObject,
Byte.parseByte(leafValue));
break;
case UINT8:
case INT16:
parentSetterMethod.invoke(parentBuilderObject,
Short.parseShort(leafValue));
break;
case UINT16:
case INT32:
parentSetterMethod.invoke(parentBuilderObject,
Integer.parseInt(leafValue));
break;
case UINT32:
case INT64:
parentSetterMethod.invoke(parentBuilderObject,
Long.parseLong(leafValue));
break;
case UINT64:
parentSetterMethod.invoke(parentBuilderObject,
new BigInteger(leafValue));
break;
case EMPTY:
if (leafValue == null || "".equals(leafValue)) {
parentSetterMethod.invoke(parentBuilderObject, true);
} else {
log.info(E_INVALID_EMPTY_DATA);
}
break;
case BOOLEAN:
parentSetterMethod.invoke(parentBuilderObject,
Boolean.parseBoolean(leafValue));
break;
case STRING:
parentSetterMethod.invoke(parentBuilderObject, leafValue);
break;
case BINARY:
byte[] value = Base64.getDecoder().decode(leafValue);
parentSetterMethod.invoke(parentBuilderObject, value);
break;
case BITS:
parseBitSetTypeInfo(ydtExtendedContext, parentSetterMethod,
parentBuilderObject, leafValue);
break;
case DECIMAL64:
parentSetterMethod.invoke(parentBuilderObject,
new BigDecimal(leafValue));
break;
case DERIVED:
parseDerivedTypeInfo(ydtExtendedContext, parentSetterMethod,
parentBuilderObject, leafValue, false);
break;
case IDENTITYREF:
parseIdentityRefInfo(ydtExtendedContext, parentSetterMethod,
parentBuilderObject, leafValue);
break;
case UNION:
parseDerivedTypeInfo(ydtExtendedContext, parentSetterMethod,
parentBuilderObject, leafValue, false);
break;
case LEAFREF:
parseLeafRefTypeInfo(ydtExtendedContext, parentSetterMethod,
parentBuilderObject, leafValue);
break;
case ENUMERATION:
parseDerivedTypeInfo(ydtExtendedContext, parentSetterMethod,
parentBuilderObject, leafValue, true);
break;
default:
log.error(E_DATA_TYPE_NOT_SUPPORT);
}
}
/**
* Sets the select leaf flag for leaf.
*
* @param builderClass builder in which the select leaf flag needs to be
* set
* @param leafNode YANG data tree leaf node
* @param schemaRegistry YANG schema registry
* @param builderObject the parent build object on which to invoke
* the method
* @throws InvocationTargetException if method could not be invoked
* @throws IllegalAccessException if method could not be accessed
* @throws NoSuchMethodException if method does not exist
*/
static void setSelectLeaf(Class builderClass,
YdtExtendedContext leafNode,
YangSchemaRegistry schemaRegistry,
Object builderObject) throws NoSuchMethodException,
InvocationTargetException, IllegalAccessException {
YangSchemaNode parentSchema = ((YdtExtendedContext) leafNode
.getParent()).getYangSchemaNode();
while (parentSchema.getReferredSchema() != null) {
parentSchema = parentSchema.getReferredSchema();
}
while (((YangNode) parentSchema).getParent() != null) {
parentSchema = ((YangNode) parentSchema).getParent();
}
String qualName = getQualifiedinterface(parentSchema);
Class<?> regClass = schemaRegistry.getRegisteredClass(parentSchema);
if (regClass == null) {
throw new YobException(E_FAIL_TO_LOAD_CLASS + qualName);
}
Class<?> interfaceClass = null;
try {
interfaceClass = regClass.getClassLoader().loadClass(qualName);
} catch (ClassNotFoundException e) {
log.info(E_FAIL_TO_LOAD_CLASS, qualName);
return;
}
Class<?>[] innerClasses = interfaceClass.getClasses();
for (Class<?> innerEnumClass : innerClasses) {
if (innerEnumClass.getSimpleName().equals(LEAF_IDENTIFIER)) {
Method valueOfMethod = innerEnumClass
.getDeclaredMethod(VALUE_OF, String.class);
String leafName = leafNode.getYangSchemaNode()
.getJavaAttributeName().toUpperCase();
Object obj = valueOfMethod.invoke(null, leafName);
Method selectLeafMethod = builderClass
.getDeclaredMethod(SELECT_LEAF, innerEnumClass);
selectLeafMethod.invoke(builderObject, obj);
break;
}
}
}
/**
* To set data into parent setter method from string value for derived type.
*
* @param leafValue value to be set in method
* @param parentSetterMethod the parent setter method to be invoked
* @param parentBuilderObject the parent build object on which to invoke the
* method
* @param ydtExtendedContext application context
* @param isEnum flag to check whether type is enum or derived
* @throws InvocationTargetException if failed to invoke method
* @throws IllegalAccessException if member cannot be accessed
* @throws NoSuchMethodException if the required method is not found
*/
private static void parseDerivedTypeInfo(YdtExtendedContext ydtExtendedContext,
Method parentSetterMethod,
Object parentBuilderObject,
String leafValue, boolean isEnum)
throws InvocationTargetException, IllegalAccessException,
NoSuchMethodException {
Class<?> childSetClass = null;
Constructor<?> childConstructor = null;
Object childValue = null;
Object childObject = null;
Method childMethod = null;
YangSchemaNode yangJavaModule = ydtExtendedContext.getYangSchemaNode();
while (yangJavaModule.getReferredSchema() != null) {
yangJavaModule = yangJavaModule.getReferredSchema();
}
String qualifiedClassName = yangJavaModule.getJavaPackage() + PERIOD +
getCapitalCase(yangJavaModule.getJavaClassNameOrBuiltInType());
ClassLoader classLoader = getClassLoader(null, qualifiedClassName,
ydtExtendedContext, null);
try {
childSetClass = classLoader.loadClass(qualifiedClassName);
} catch (ClassNotFoundException e) {
log.error(L_FAIL_TO_LOAD_CLASS, qualifiedClassName);
}
if (!isEnum) {
if (childSetClass != null) {
childConstructor = childSetClass.getDeclaredConstructor();
}
if (childConstructor != null) {
childConstructor.setAccessible(true);
}
try {
if (childConstructor != null) {
childObject = childConstructor.newInstance();
}
} catch (InstantiationException e) {
log.error(E_FAIL_TO_LOAD_CONSTRUCTOR, qualifiedClassName);
}
if (childSetClass != null) {
childMethod = childSetClass
.getDeclaredMethod(FROM_STRING, String.class);
}
} else {
if (childSetClass != null) {
childMethod = childSetClass.getDeclaredMethod(OF, String.class);
}
}
if (childMethod != null) {
childValue = childMethod.invoke(childObject, leafValue);
}
parentSetterMethod.invoke(parentBuilderObject, childValue);
}
/**
* To set data into parent setter method from string value for bits type.
*
* @param leafValue value to be set in method
* @param parentSetterMethod the parent setter method to be invoked
* @param parentBuilderObject the parent build object on which to invoke the
* method
* @param ydtExtendedContext application context
* @throws InvocationTargetException if failed to invoke method
* @throws IllegalAccessException if member cannot be accessed
* @throws NoSuchMethodException if the required method is not found
*/
private static void parseBitSetTypeInfo(YdtExtendedContext ydtExtendedContext,
Method parentSetterMethod,
Object parentBuilderObject,
String leafValue)
throws InvocationTargetException, IllegalAccessException,
NoSuchMethodException {
Class<?> childSetClass = null;
Object childValue = null;
Object childObject = null;
Method childMethod = null;
YangSchemaNode schemaNode = ydtExtendedContext.getYangSchemaNode();
while (schemaNode.getReferredSchema() != null) {
schemaNode = schemaNode.getReferredSchema();
}
YangSchemaNode parentSchema = ((YdtExtendedContext) ydtExtendedContext
.getParent()).getYangSchemaNode();
String qualifiedClassName = parentSchema.getJavaPackage() + PERIOD +
parentSchema.getJavaAttributeName().toLowerCase() +
PERIOD + getCapitalCase(schemaNode.getJavaAttributeName());
ClassLoader classLoader = getClassLoader(null, qualifiedClassName,
ydtExtendedContext, null);
try {
childSetClass = classLoader.loadClass(qualifiedClassName);
} catch (ClassNotFoundException e) {
log.error(L_FAIL_TO_LOAD_CLASS, qualifiedClassName);
}
if (childSetClass != null) {
childMethod = childSetClass.getDeclaredMethod(FROM_STRING, String.class);
}
if (childMethod != null) {
childValue = childMethod.invoke(childObject, leafValue);
}
parentSetterMethod.invoke(parentBuilderObject, childValue);
}
/**
* To set data into parent setter method from string value for leafref type.
*
* @param leafValue leaf value to be set
* @param parentSetterMethod the parent setter method to be invoked
* @param parentBuilderObject the parent build object on which to invoke
* the method
* @param ydtExtendedContext application context
* @throws InvocationTargetException if method could not be invoked
* @throws IllegalAccessException if method could not be accessed
* @throws NoSuchMethodException if method does not exist
*/
private static void parseLeafRefTypeInfo(YdtExtendedContext ydtExtendedContext,
Method parentSetterMethod,
Object parentBuilderObject,
String leafValue)
throws InvocationTargetException, IllegalAccessException,
NoSuchMethodException {
YangSchemaNode schemaNode = ydtExtendedContext.getYangSchemaNode();
while (schemaNode.getReferredSchema() != null) {
schemaNode = schemaNode.getReferredSchema();
}
YangLeafRef leafRef;
if (schemaNode instanceof YangLeaf) {
leafRef = (YangLeafRef) ((YangLeaf) schemaNode)
.getDataType().getDataTypeExtendedInfo();
} else {
leafRef = (YangLeafRef) ((YangLeafList) schemaNode)
.getDataType().getDataTypeExtendedInfo();
}
YangType type = leafRef.getEffectiveDataType();
if (type.getDataType() == YangDataTypes.DERIVED &&
schemaNode.getJavaPackage().equals(YobConstants.JAVA_LANG)) {
/*
* If leaf is inside grouping, then its return type will be of type
* Object and if its actual type is derived type then get the
* effective built-in type and set the value.
*/
YangDerivedInfo derivedInfo = (YangDerivedInfo) leafRef
.getEffectiveDataType()
.getDataTypeExtendedInfo();
YobUtils.setDataFromStringValue(derivedInfo.getEffectiveBuiltInType(),
leafValue, parentSetterMethod,
parentBuilderObject,
ydtExtendedContext);
} else {
YobUtils.setDataFromStringValue(type.getDataType(),
leafValue, parentSetterMethod,
parentBuilderObject,
ydtExtendedContext);
}
}
/**
* Updates class loader for all the classes.
*
* @param registry YANG schema registry
* @param qualifiedClassName qualified class name
* @param curNode YDT context
* @param rootNode application root node
* @return current class loader
*/
static ClassLoader getClassLoader(YangSchemaRegistry registry,
String qualifiedClassName,
YdtExtendedContext curNode,
YdtExtendedContext rootNode) {
if (rootNode != null && curNode == rootNode) {
YangSchemaNode curSchemaNode = curNode.getYangSchemaNode();
while (!(curSchemaNode instanceof RpcNotificationContainer)) {
curNode = (YdtExtendedContext) curNode.getParent();
if (curNode == null) {
throw new YobException(E_INVALID_DATA_TREE);
}
curSchemaNode = curNode.getYangSchemaNode();
}
Class<?> regClass = registry.getRegisteredClass(curSchemaNode);
return regClass.getClassLoader();
}
YdtExtendedContext parent = (YdtExtendedContext) curNode.getParent();
YobWorkBench parentBuilderContainer = (YobWorkBench) parent.getAppInfo(YOB);
Object parentObj = parentBuilderContainer.getParentBuilder(curNode,
registry);
return parentObj.getClass().getClassLoader();
}
/**
* Returns the class loader to be used for the switched context schema node.
*
* @param curLoader current context class loader
* @param context switched context
* @param registry schema registry
* @return class loader to be used for the switched context schema node
*/
static ClassLoader getTargetClassLoader(ClassLoader curLoader,
YangSchemaNodeContextInfo context,
YangSchemaRegistry registry) {
YangSchemaNode augmentSchemaNode = context.getContextSwitchedNode();
if (augmentSchemaNode.getYangSchemaNodeType() == YANG_AUGMENT_NODE) {
YangSchemaNode moduleNode = ((YangNode) augmentSchemaNode).getParent();
Class<?> moduleClass = registry.getRegisteredClass(moduleNode);
if (moduleClass == null) {
throw new YobException(E_FAIL_TO_LOAD_CLASS + moduleNode
.getJavaClassNameOrBuiltInType());
}
return moduleClass.getClassLoader();
}
return curLoader;
}
/**
* Returns the schema node's module interface.
*
* @param schemaNode YANG schema node
* @param schemaRegistry YANG schema registry
* @return schema node's module interface
*/
public static Class<?> getModuleInterface(YangSchemaNode schemaNode,
YangSchemaRegistry schemaRegistry) {
YangNode yangNode = (YangNode) schemaNode;
while (yangNode.getReferredSchema() != null) {
yangNode = (YangNode) yangNode.getReferredSchema();
}
while (yangNode.getParent() != null) {
yangNode = yangNode.getParent();
}
String qualName = getQualifiedinterface(yangNode);
Class<?> regClass = schemaRegistry.getRegisteredClass(yangNode);
if (regClass == null) {
throw new YobException(E_FAIL_TO_LOAD_CLASS + qualName);
}
try {
return regClass.getClassLoader().loadClass(qualName);
} catch (ClassNotFoundException e) {
log.error(L_FAIL_TO_LOAD_CLASS, qualName);
}
return null;
}
/**
* Returns the qualified default / op param class.
*
* @param schemaNode schema node of the required class
* @return qualified default / op param class name
*/
static String getQualifiedDefaultClass(YangSchemaNode schemaNode) {
String packageName = schemaNode.getJavaPackage();
String className = getCapitalCase(
schemaNode.getJavaClassNameOrBuiltInType());
if (schemaNode instanceof RpcNotificationContainer) {
return packageName + PERIOD + className + OP_PARAM;
}
return packageName + PERIOD + DEFAULT + className;
}
/**
* Returns the qualified interface name.
*
* @param schemaNode schema node of the required class
* @return qualified interface name
*/
static String getQualifiedinterface(YangSchemaNode schemaNode) {
String packageName = schemaNode.getJavaPackage();
String className = getCapitalCase(
schemaNode.getJavaClassNameOrBuiltInType());
return packageName + PERIOD + className;
}
/**
* Returns the capital cased first letter of the given string.
*
* @param name string to be capital cased
* @return capital cased string
*/
public static String getCapitalCase(String name) {
// TODO: It will be removed if common util is committed.
return name.substring(0, 1).toUpperCase() +
name.substring(1);
}
/**
* To set data into parent setter method from string value for identity ref.
*
* @param leafValue leaf value to be set
* @param parentSetterMethod the parent setter method to be invoked
* @param parentBuilderObject the parent build object on which to invoke
* the method
* @param ydtExtendedContext application context
* @throws InvocationTargetException if method could not be invoked
* @throws IllegalAccessException if method could not be accessed
* @throws NoSuchMethodException if method does not exist
*/
private static void parseIdentityRefInfo(YdtExtendedContext
ydtExtendedContext,
Method parentSetterMethod,
Object parentBuilderObject,
String leafValue)
throws InvocationTargetException, IllegalAccessException,
NoSuchMethodException {
Class<?> childSetClass = null;
Object childValue = null;
Method childMethod = null;
YangSchemaNode yangJavaModule = ydtExtendedContext.getYangSchemaNode();
while (yangJavaModule.getReferredSchema() != null) {
yangJavaModule = yangJavaModule.getReferredSchema();
}
String qualifiedClassName = null;
YangType type;
if (yangJavaModule instanceof YangLeaf) {
type = ((YangLeaf) yangJavaModule).getDataType();
} else {
type = ((YangLeafList) yangJavaModule).getDataType();
}
if (type.getDataType() == YangDataTypes.LEAFREF && yangJavaModule
.getJavaPackage().equals(YobConstants.JAVA_LANG)) {
YangLeafRef leafref = ((YangLeafRef) type.getDataTypeExtendedInfo());
YangType effectiveType = leafref.getEffectiveDataType();
if (effectiveType.getDataType() == YangDataTypes.IDENTITYREF) {
YangIdentityRef identityref = ((YangIdentityRef) effectiveType
.getDataTypeExtendedInfo());
YangIdentity identity = identityref.getReferredIdentity();
qualifiedClassName = identity.getJavaPackage() + PERIOD +
getCapitalCase(identity.getJavaClassNameOrBuiltInType());
}
} else {
qualifiedClassName = yangJavaModule.getJavaPackage() + PERIOD +
getCapitalCase(yangJavaModule.getJavaClassNameOrBuiltInType());
}
ClassLoader classLoader = getClassLoader(null, qualifiedClassName,
ydtExtendedContext, null);
try {
childSetClass = classLoader.loadClass(qualifiedClassName);
} catch (ClassNotFoundException e) {
log.error(L_FAIL_TO_LOAD_CLASS, qualifiedClassName);
}
if (childSetClass != null) {
childMethod = childSetClass
.getDeclaredMethod(FROM_STRING, String.class);
}
if (childMethod != null) {
childValue = childMethod.invoke(null, leafValue);
}
parentSetterMethod.invoke(parentBuilderObject, childValue);
}
/**
* Creates and sets default notification object in event subject object.
*
* @param defaultObj default notification object
* @param curNode application context
* @param registry YANG schema registry
* @return notification event subject object
*/
public static Object createAndSetInEventSubjectInstance(Object defaultObj,
YdtExtendedContext curNode,
YangSchemaRegistry registry) {
YangSchemaNode childSchema = ((YdtExtendedContext) curNode
.getFirstChild()).getYangSchemaNode();
String packageName = childSchema.getJavaPackage();
String className = getCapitalCase(curNode.getYangSchemaNode()
.getJavaClassNameOrBuiltInType());
String qualName = packageName + PERIOD + className + EVENT_SUBJECT;
ClassLoader classLoader = YobUtils.getClassLoader(registry, qualName,
curNode, curNode);
Object eventSubObj;
Class<?> eventSubjectClass = null;
try {
eventSubjectClass = classLoader.loadClass(qualName);
eventSubObj = eventSubjectClass.newInstance();
} catch (ClassNotFoundException e) {
log.error(E_FAIL_TO_LOAD_CLASS, className);
throw new YobException(E_FAIL_TO_LOAD_CLASS +
qualName);
} catch (InstantiationException e) {
log.error(E_FAIL_TO_CREATE_OBJ, className);
throw new YobException(E_FAIL_TO_CREATE_OBJ +
eventSubjectClass.getName());
} catch (IllegalAccessException e) {
log.error(L_FAIL_TO_INVOKE_METHOD, className);
throw new YobException(E_FAIL_TO_INVOKE_METHOD +
eventSubjectClass.getName());
}
setInEventSubject(((YdtExtendedContext) curNode.getFirstChild()),
eventSubObj, defaultObj);
return eventSubObj;
}
/**
* Sets the default notification object in event subject class.
*
* @param ydtNode application context
* @param eventSubObj notification event subject instance
* @param defaultObj default notification instance
*/
public static void setInEventSubject(YdtExtendedContext ydtNode,
Object eventSubObj,
Object defaultObj) {
Class<?> eventSubjectClass = eventSubObj.getClass();
String className = eventSubjectClass.getName();
String setter = ydtNode.getYangSchemaNode().getJavaAttributeName();
try {
Class<?> type = null;
Field fieldName = eventSubjectClass.getDeclaredField(setter);
if (fieldName != null) {
type = fieldName.getType();
}
Method method;
method = eventSubjectClass.getDeclaredMethod(setter, type);
method.invoke(eventSubObj, defaultObj);
} catch (NoSuchFieldException e) {
log.error(L_FAIL_TO_GET_FIELD, className);
throw new YobException(E_FAIL_TO_GET_FIELD + className);
} catch (NoSuchMethodException e) {
log.error(L_FAIL_TO_GET_METHOD, className);
throw new YobException(E_FAIL_TO_GET_METHOD + className);
} catch (InvocationTargetException | IllegalAccessException e) {
log.error(L_FAIL_TO_INVOKE_METHOD, className);
throw new YobException(E_FAIL_TO_INVOKE_METHOD + className);
}
}
/**
* Creates an object of notification event class and sets event subject
* in event class.
*
* @param eventSubObj instance of event subject class
* @param curNode current YDT node
* @param registry YANG schema registry
* @return notification event object
*/
public static Object createAndSetInEventInstance(Object eventSubObj,
YdtExtendedContext curNode,
YangSchemaRegistry registry) {
YangSchemaNode childSchema = ((YdtExtendedContext) curNode
.getFirstChild()).getYangSchemaNode();
String packageName = childSchema.getJavaPackage();
String className = getCapitalCase(curNode.getYangSchemaNode()
.getJavaClassNameOrBuiltInType());
String qualName = packageName + PERIOD + className + EVENT;
try {
ClassLoader classLoader = YobUtils.getClassLoader(registry, qualName,
curNode, curNode);
Class<?> eventClass = classLoader.loadClass(qualName);
Class<?>[] innerClasses = eventClass.getClasses();
Object typeObj = null;
for (Class<?> innerEnumClass : innerClasses) {
if (innerEnumClass.getSimpleName().equals(TYPE)) {
Method valueOfMethod = innerEnumClass
.getDeclaredMethod(VALUE_OF, String.class);
String eventType = getEnumJavaAttribute(childSchema.getName())
.toUpperCase();
typeObj = valueOfMethod.invoke(null, eventType);
break;
}
}
Constructor constructor = eventClass
.getDeclaredConstructor(typeObj.getClass(),
eventSubObj.getClass());
constructor.setAccessible(true);
return constructor.newInstance(typeObj, eventSubObj);
} catch (ClassNotFoundException e) {
log.error(L_FAIL_TO_INVOKE_METHOD, className);
throw new YobException(E_FAIL_TO_INVOKE_METHOD + className);
} catch (InstantiationException e) {
log.error(E_FAIL_TO_CREATE_OBJ, className);
throw new YobException(E_FAIL_TO_CREATE_OBJ + className);
} catch (NoSuchMethodException e) {
log.error(L_FAIL_TO_GET_METHOD, className);
throw new YobException(E_FAIL_TO_GET_METHOD + className);
} catch (InvocationTargetException | IllegalAccessException e) {
log.error(L_FAIL_TO_INVOKE_METHOD, className);
throw new YobException(E_FAIL_TO_INVOKE_METHOD + className);
}
}
}
|
|
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.http.server.reactive;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Supplier;
import org.junit.jupiter.api.Test;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.netty.channel.AbortedException;
import reactor.test.StepVerifier;
import org.springframework.core.ResolvableType;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DefaultDataBuffer;
import org.springframework.core.io.buffer.DefaultDataBufferFactory;
import org.springframework.core.testfixture.io.buffer.LeakAwareDataBufferFactory;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseCookie;
import org.springframework.http.codec.EncoderHttpMessageWriter;
import org.springframework.http.codec.HttpMessageWriter;
import org.springframework.http.codec.json.Jackson2JsonEncoder;
import org.springframework.web.testfixture.http.server.reactive.MockServerHttpRequest;
import org.springframework.web.testfixture.http.server.reactive.MockServerHttpResponse;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link AbstractServerHttpRequest}.
*
* @author Rossen Stoyanchev
* @author Sebastien Deleuze
* @author Brian Clozel
*/
public class ServerHttpResponseTests {
@Test
void writeWith() {
TestServerHttpResponse response = new TestServerHttpResponse();
response.writeWith(Flux.just(wrap("a"), wrap("b"), wrap("c"))).block();
assertThat(response.statusCodeWritten).isTrue();
assertThat(response.headersWritten).isTrue();
assertThat(response.cookiesWritten).isTrue();
assertThat(response.body.size()).isEqualTo(3);
assertThat(new String(response.body.get(0).asByteBuffer().array(), StandardCharsets.UTF_8)).isEqualTo("a");
assertThat(new String(response.body.get(1).asByteBuffer().array(), StandardCharsets.UTF_8)).isEqualTo("b");
assertThat(new String(response.body.get(2).asByteBuffer().array(), StandardCharsets.UTF_8)).isEqualTo("c");
}
@Test // SPR-14952
void writeAndFlushWithFluxOfDefaultDataBuffer() {
TestServerHttpResponse response = new TestServerHttpResponse();
Flux<Flux<DefaultDataBuffer>> flux = Flux.just(Flux.just(wrap("foo")));
response.writeAndFlushWith(flux).block();
assertThat(response.statusCodeWritten).isTrue();
assertThat(response.headersWritten).isTrue();
assertThat(response.cookiesWritten).isTrue();
assertThat(response.body.size()).isEqualTo(1);
assertThat(new String(response.body.get(0).asByteBuffer().array(), StandardCharsets.UTF_8)).isEqualTo("foo");
}
@Test
void writeWithFluxError() {
IllegalStateException error = new IllegalStateException("boo");
writeWithError(Flux.error(error));
}
@Test
void writeWithMonoError() {
IllegalStateException error = new IllegalStateException("boo");
writeWithError(Mono.error(error));
}
void writeWithError(Publisher<DataBuffer> body) {
TestServerHttpResponse response = new TestServerHttpResponse();
HttpHeaders headers = response.getHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.set(HttpHeaders.CONTENT_ENCODING, "gzip");
headers.setContentLength(12);
response.writeWith(body).onErrorResume(ex -> Mono.empty()).block();
assertThat(response.statusCodeWritten).isFalse();
assertThat(response.headersWritten).isFalse();
assertThat(response.cookiesWritten).isFalse();
assertThat(headers).doesNotContainKeys(HttpHeaders.CONTENT_TYPE, HttpHeaders.CONTENT_LENGTH,
HttpHeaders.CONTENT_ENCODING);
assertThat(response.body.isEmpty()).isTrue();
}
@Test
void setComplete() {
TestServerHttpResponse response = new TestServerHttpResponse();
response.setComplete().block();
assertThat(response.statusCodeWritten).isTrue();
assertThat(response.headersWritten).isTrue();
assertThat(response.cookiesWritten).isTrue();
assertThat(response.body.isEmpty()).isTrue();
}
@Test
void beforeCommitWithComplete() {
ResponseCookie cookie = ResponseCookie.from("ID", "123").build();
TestServerHttpResponse response = new TestServerHttpResponse();
response.beforeCommit(() -> Mono.fromRunnable(() -> response.getCookies().add(cookie.getName(), cookie)));
response.writeWith(Flux.just(wrap("a"), wrap("b"), wrap("c"))).block();
assertThat(response.statusCodeWritten).isTrue();
assertThat(response.headersWritten).isTrue();
assertThat(response.cookiesWritten).isTrue();
assertThat(response.getCookies().getFirst("ID")).isSameAs(cookie);
assertThat(response.body.size()).isEqualTo(3);
assertThat(new String(response.body.get(0).asByteBuffer().array(), StandardCharsets.UTF_8)).isEqualTo("a");
assertThat(new String(response.body.get(1).asByteBuffer().array(), StandardCharsets.UTF_8)).isEqualTo("b");
assertThat(new String(response.body.get(2).asByteBuffer().array(), StandardCharsets.UTF_8)).isEqualTo("c");
}
@Test
void beforeCommitActionWithSetComplete() {
ResponseCookie cookie = ResponseCookie.from("ID", "123").build();
TestServerHttpResponse response = new TestServerHttpResponse();
response.beforeCommit(() -> {
response.getCookies().add(cookie.getName(), cookie);
return Mono.empty();
});
response.setComplete().block();
assertThat(response.statusCodeWritten).isTrue();
assertThat(response.headersWritten).isTrue();
assertThat(response.cookiesWritten).isTrue();
assertThat(response.body.isEmpty()).isTrue();
assertThat(response.getCookies().getFirst("ID")).isSameAs(cookie);
}
@Test // gh-24186, gh-25753
void beforeCommitErrorShouldLeaveResponseNotCommitted() {
Consumer<Supplier<Mono<Void>>> tester = preCommitAction -> {
TestServerHttpResponse response = new TestServerHttpResponse();
response.getHeaders().setContentType(MediaType.APPLICATION_JSON);
response.getHeaders().setContentLength(3);
response.beforeCommit(preCommitAction);
StepVerifier.create(response.writeWith(Flux.just(wrap("body"))))
.expectErrorMessage("Max sessions")
.verify();
assertThat(response.statusCodeWritten).isFalse();
assertThat(response.headersWritten).isFalse();
assertThat(response.cookiesWritten).isFalse();
assertThat(response.isCommitted()).isFalse();
assertThat(response.getHeaders()).isEmpty();
// Handle the error
response.setStatusCode(HttpStatus.SERVICE_UNAVAILABLE);
StepVerifier.create(response.setComplete()).verifyComplete();
assertThat(response.statusCodeWritten).isTrue();
assertThat(response.headersWritten).isTrue();
assertThat(response.cookiesWritten).isTrue();
assertThat(response.isCommitted()).isTrue();
};
tester.accept(() -> Mono.error(new IllegalStateException("Max sessions")));
tester.accept(() -> {
throw new IllegalStateException("Max sessions");
});
}
@Test // gh-26232
void monoResponseShouldNotLeakIfCancelled() {
LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory();
MockServerHttpRequest request = MockServerHttpRequest.get("/").build();
MockServerHttpResponse response = new MockServerHttpResponse(bufferFactory);
response.setWriteHandler(flux -> {
throw AbortedException.beforeSend();
});
HttpMessageWriter<Object> messageWriter = new EncoderHttpMessageWriter<>(new Jackson2JsonEncoder());
Mono<Void> result = messageWriter.write(Mono.just(Collections.singletonMap("foo", "bar")),
ResolvableType.forClass(Mono.class), ResolvableType.forClass(Map.class), null,
request, response, Collections.emptyMap());
StepVerifier.create(result).expectError(AbortedException.class).verify();
bufferFactory.checkForLeaks();
}
private DefaultDataBuffer wrap(String a) {
return DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(a.getBytes(StandardCharsets.UTF_8)));
}
private static class TestServerHttpResponse extends AbstractServerHttpResponse {
private boolean statusCodeWritten;
private boolean headersWritten;
private boolean cookiesWritten;
private final List<DataBuffer> body = new ArrayList<>();
public TestServerHttpResponse() {
super(DefaultDataBufferFactory.sharedInstance);
}
@Override
public <T> T getNativeResponse() {
throw new IllegalStateException("This is a mock. No running server, no native response.");
}
@Override
public void applyStatusCode() {
assertThat(this.statusCodeWritten).isFalse();
this.statusCodeWritten = true;
}
@Override
protected void applyHeaders() {
assertThat(this.headersWritten).isFalse();
this.headersWritten = true;
}
@Override
protected void applyCookies() {
assertThat(this.cookiesWritten).isFalse();
this.cookiesWritten = true;
}
@Override
protected Mono<Void> writeWithInternal(Publisher<? extends DataBuffer> body) {
return Flux.from(body).map(b -> {
this.body.add(b);
return b;
}).then();
}
@Override
protected Mono<Void> writeAndFlushWithInternal(
Publisher<? extends Publisher<? extends DataBuffer>> bodyWithFlush) {
return Flux.from(bodyWithFlush).flatMap(body ->
Flux.from(body).map(b -> {
this.body.add(b);
return b;
})
).then();
}
}
}
|
|
/*
* Copyright (c) 2015-present, Parse, LLC.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.parse;
import android.net.SSLCertificateSocketFactory;
import android.net.SSLSessionCache;
import android.net.http.AndroidHttpClient;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.params.HttpClientParams;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.params.ConnManagerParams;
import org.apache.http.conn.params.ConnPerRouteBean;
import org.apache.http.conn.params.ConnRoutePNames;
import org.apache.http.conn.scheme.PlainSocketFactory;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.params.HttpProtocolParams;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
/**
* An implementation of ParseHttpClient using Apache httpclient library
*/
@SuppressWarnings("deprecation")
/** package */ class ParseApacheHttpClient extends ParseHttpClient<HttpUriRequest, HttpResponse> {
private DefaultHttpClient apacheClient;
public ParseApacheHttpClient(int socketOperationTimeout, SSLSessionCache sslSessionCache) {
// Most of this is from AndroidHttpClient#newInstance() except [1] and [2]
HttpParams params = new BasicHttpParams();
// Turn off stale checking. Our connections break all the time anyway,
// and it's not worth it to pay the penalty of checking every time.
HttpConnectionParams.setStaleCheckingEnabled(params, false);
HttpConnectionParams.setConnectionTimeout(params, socketOperationTimeout);
HttpConnectionParams.setSoTimeout(params, socketOperationTimeout);
HttpConnectionParams.setSocketBufferSize(params, 8192);
// Don't handle redirects. We copy the setting from AndroidHttpClient.
// For detail, check https://quip.com/Px8jAxnaun2r
HttpClientParams.setRedirecting(params, false);
// Register standard protocols.
SchemeRegistry schemeRegistry = new SchemeRegistry();
schemeRegistry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80));
schemeRegistry.register(new Scheme("https", SSLCertificateSocketFactory.getHttpSocketFactory(
socketOperationTimeout, sslSessionCache), 443));
// [1] AndroidHttpClient defaults to 2 connections per route. Not fun. AND you can't set these
// properties after AndroidHttpClient#newInstance(context)
String maxConnectionsStr = System.getProperty("http.maxConnections");
if (maxConnectionsStr != null) {
int maxConnections = Integer.parseInt(maxConnectionsStr);
ConnManagerParams.setMaxConnectionsPerRoute(params, new ConnPerRouteBean(maxConnections));
ConnManagerParams.setMaxTotalConnections(params, maxConnections);
}
// [2] Originally from ParseCommand, check proxy
String host = System.getProperty("http.proxyHost");
String portString = System.getProperty("http.proxyPort");
if (host != null && host.length() != 0 && portString != null && portString.length() != 0) {
int port = Integer.parseInt(portString);
HttpHost proxy = new HttpHost(host, port, "http");
params.setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy);
}
ClientConnectionManager manager = new ThreadSafeClientConnManager(params, schemeRegistry);
apacheClient = new DefaultHttpClient(manager, params);
}
@Override
/* package */ ParseHttpResponse executeInternal(ParseHttpRequest parseRequest) throws IOException {
HttpUriRequest apacheRequest = getRequest(parseRequest);
HttpResponse apacheResponse = apacheClient.execute(apacheRequest);
return getResponse(apacheResponse);
}
@Override
/* package */ ParseHttpResponse getResponse(HttpResponse apacheResponse)
throws IOException {
if (apacheResponse == null) {
throw new IllegalArgumentException(
"HttpResponse passed to getResponse should not be null."
);
}
// Status code
int statusCode = apacheResponse.getStatusLine().getStatusCode();
// Content
InputStream content = AndroidHttpClient.getUngzippedContent(apacheResponse.getEntity());
// Total size
int totalSize = -1;
Header[] contentLengthHeader = apacheResponse.getHeaders("Content-Length");
// Some encodings, such as chunked encoding, forbid the
// content-length header.
if (contentLengthHeader.length > 0) {
totalSize = Integer.parseInt(contentLengthHeader[0].getValue());
}
// Reason phrase
String reasonPhrase = apacheResponse.getStatusLine().getReasonPhrase();
// Headers
Map<String, String> headers = new HashMap<>();
for (Header header : apacheResponse.getAllHeaders()) {
headers.put(header.getName(), header.getValue());
}
// Content type
String contentType = null;
HttpEntity entity = apacheResponse.getEntity();
if (entity != null && entity.getContentType() != null) {
contentType = entity.getContentType().getValue();
}
return new ParseHttpResponse.Builder()
.setStatusCode(statusCode)
.setContent(content)
.setTotalSize(totalSize)
.setReasonPhase(reasonPhrase)
.setHeaders(headers)
.setContentType(contentType)
.build();
}
@Override
/* package */ HttpUriRequest getRequest(ParseHttpRequest parseRequest)
throws IOException {
if (parseRequest == null) {
throw new IllegalArgumentException(
"ParseHttpRequest passed to getApacheRequest should not be null."
);
}
HttpUriRequest apacheRequest;
ParseRequest.Method method = parseRequest.getMethod();
String url = parseRequest.getUrl();
switch (method) {
case GET:
apacheRequest = new HttpGet(url);
break;
case DELETE:
apacheRequest = new HttpDelete(url);
break;
case POST:
apacheRequest = new HttpPost(url);
break;
case PUT:
apacheRequest = new HttpPut(url);
break;
default:
// This case will never be reached since we have already handled this case in
// ParseRequest.newRequest().
throw new IllegalStateException("Unsupported http method " + method.toString());
}
// Set header
for (Map.Entry<String, String> entry : parseRequest.getAllHeaders().entrySet()) {
apacheRequest.setHeader(entry.getKey(), entry.getValue());
}
AndroidHttpClient.modifyRequestToAcceptGzipResponse(apacheRequest);
// Set entity
ParseHttpBody body = parseRequest.getBody();
switch (method) {
case POST:
((HttpPost) apacheRequest).setEntity(new ParseApacheHttpEntity(body));
break;
case PUT:
((HttpPut) apacheRequest).setEntity(new ParseApacheHttpEntity(body));
break;
default:
break;
}
return apacheRequest;
}
/**
* An wrapper of Apache InputStreamEntity. It takes a ParseHttpBody
* and transfer it to a HttpEntity
*/
private static class ParseApacheHttpEntity extends InputStreamEntity {
private ParseHttpBody parseBody;
public ParseApacheHttpEntity(ParseHttpBody parseBody) {
super(parseBody.getContent(), parseBody.getContentLength());
super.setContentType(parseBody.getContentType());
this.parseBody = parseBody;
}
@Override
public void writeTo(OutputStream out) throws IOException {
parseBody.writeTo(out);
}
}
}
|
|
/**
*
*/
package SearchAlgorithms;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Comparator;
import java.util.PriorityQueue;
import java.util.Queue;
import MazeReadIn.Maze;
import MazeReadIn.Pair;
import MazeReadIn.ReadMaze;
import MazeReadIn.WriteMaze;
import static java.lang.System.*;
/**
* @author zhenchengwang
* Data structure: 1, queue -> Queue<Pair<Integer, Integer>> myQueue
* 2, table to mark visited -> int[][] canTravel
* 3, dictionary to remember the path -> Map<Pair<Integer, Integer>, Pair<Integer, Integer>> myMap
* 4, path -> List<Pair<Integer, Integer>> myPath
* 5, table to check path cost so far -> int[][] pathCost
* Result:
* 1, solution
* 2, number of nodes expanded: # nodes on path - # marked visited
* 3, maximum tree depth: BFS: path length, DFS: stackSizeMax
* 4, maximum size of frontier BFS: queueSizeMax, DFS: 1
*
*/
public class Astar {
/*
* canTravel: push adjacent node into queue, mark them as visited */
public static int pushAdjacent(PriorityQueue<Pair<Integer, Integer>> myQueue, int[][] canTravel, Pair<Integer, Integer> curr, Map<String, String> myMap, AstarComparator comparator){
int currX = curr.getFirst();
int currY = curr.getSecond();
if(currX+1<canTravel.length && canTravel[currX+1][currY]==0){
Pair<Integer, Integer> rightChild = new Pair(currX+1, currY);
String parent = ""+currX+","+currY+"";
String child = ""+(currX+1)+","+currY+"";
myMap.put(child, parent);
comparator.setCost(currX+1, currY, comparator.getCost(currX, currY)+getCost(currX + 1));
myQueue.offer(rightChild);
}
if(currY+1<canTravel[0].length && canTravel[currX][currY+1]==0){
Pair<Integer, Integer> upChild = new Pair(currX, currY+1);
String parent = ""+currX+","+currY+"";
String child = ""+(currX)+","+(currY+1)+"";
myMap.put(child, parent);
comparator.setCost(currX, currY+1, comparator.getCost(currX, currY)+getCost(currX));
myQueue.offer(upChild);
}
if(currX-1>=0 && canTravel[currX-1][currY]==0){
Pair<Integer, Integer> leftChild = new Pair(currX-1, currY);
String parent = ""+currX+","+currY+"";
String child = ""+(currX-1)+","+currY+"";
myMap.put(child, parent);
comparator.setCost(currX-1, currY, comparator.getCost(currX, currY)+getCost(currX - 1));
myQueue.offer(leftChild);
}
if(currY-1>=0 && canTravel[currX][currY-1]==0){
Pair<Integer, Integer> downChild = new Pair(currX, currY-1);
String parent = ""+currX+","+currY+"";
String child = ""+(currX)+","+(currY-1)+"";
myMap.put(child, parent);
comparator.setCost(currX, currY-1, comparator.getCost(currX, currY)+getCost(currX));
myQueue.offer(downChild);
}
return myQueue.size();
}
private static double getCost(int x)
{
return 1.0; //unit step cost
//return Math.exp(x); //prefer left
//return 1.0 / Math.exp(x); //right
}
/* findGoal: check if we find the goal */
/* GBS Main function: */
public static ArrayList<String> BFS(Maze myMaze, ArrayList<Integer> counter, ArrayList<Pair<Integer, Integer>> reachedGoals){
ArrayList<String> path = new ArrayList<String>();
int mazeWidth = myMaze.maze.length;
int mazeHeight = myMaze.maze[0].length;
ArrayList<Pair<Integer, Integer>> myGoal = myMaze.goals;
Pair<Integer, Integer> myStart = new Pair<Integer, Integer>(myMaze.start[0], myMaze.start[1]);
double[][] compareCost = new double[mazeWidth][mazeHeight];
AstarComparator comparator = new AstarComparator(myStart,myGoal,compareCost);
PriorityQueue<Pair<Integer, Integer>> myQueue = new PriorityQueue<Pair<Integer, Integer>>(10,comparator);
int[][] canTravel = new int[mazeWidth][mazeHeight];
Map<String, String> myMap = new HashMap<String, String>();
List<String> myPath = new ArrayList<String>();
// initialize queue/pathcost, push the start point into queue, initial canTravel, initialize pathCost:
Pair<Integer, Integer> curr = new Pair(myMaze.start[0], myMaze.start[1]);
for(int i=0; i<mazeWidth; i++){
for(int j=0; j<mazeHeight; j++){
if(myMaze.maze[i][j] == 1)
canTravel[i][j] = 1;
else
canTravel[i][j] = 0;
}
}
for(int i=0; i<mazeWidth; i++){
for(int j=0; j<mazeHeight; j++){
comparator.setCost(i, j, Integer.MAX_VALUE);
}
}
comparator.setCost(myStart.getFirst(), myStart.getSecond(), 0);
myQueue.offer( curr );
while(myQueue.size() != 0){
// pop one element from queue, mark it visited
Pair checkCurr = myQueue.poll();
int currX = (int) checkCurr.getFirst();
int currY = (int) checkCurr.getSecond();
canTravel[currX][currY] = 1;
Integer nVisited = counter.get(0);
nVisited++;
counter.set(0, nVisited);
// check if we reach the goal(s)
Iterator<Pair<Integer, Integer>> goalsIterator = myMaze.goals.iterator();
boolean found = false;
while (goalsIterator.hasNext()) {
Pair<Integer, Integer> g = goalsIterator.next();
int goalX = g.getFirst();
int goalY = g.getSecond();
if(currX == goalX && currY == goalY ){
reachedGoals.add(new Pair<Integer, Integer>(goalX, goalY));
myMaze.start[0] = g.getFirst();
myMaze.start[1] = g.getSecond();
System.out.println("COST:" + compareCost[goalX][goalY]);
goalsIterator.remove();
found = true;
break;
}
}
// if so terminate
if(found){
break;
}
// if not push unvisited adjacents into queue, update dictionary, so we can find our path when we reach the goal:
else{
int currExpand = pushAdjacent(myQueue, canTravel, checkCurr, myMap, comparator);
Integer maxExpand = counter.get(1);
if( maxExpand < currExpand){
maxExpand = currExpand;
counter.set(1, maxExpand);
}
}
}
// we are out of the loop, now report the path we found
// initialize initial key: the
String currKey = ""+myMaze.start[0]+"," + myMaze.start[1]+"";
path.add(currKey);
while(myMap.containsKey(currKey)){
currKey = myMap.get(currKey);
path.add(currKey);
}
return path;
}
/**
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
out.println("Please enter the maze you would like to run:");
String input = "src/MazeReadIn/trickysearch2";//console().readLine();
String output = input+"Solution";
Maze myMaze = ReadMaze.parseMaze(input); // className.methodName
ArrayList<String> result = new ArrayList<String>();
boolean firstLoop = true;
ArrayList<Integer> counter = new ArrayList<Integer>();
counter.add(new Integer(0));
counter.add(new Integer(0));
ArrayList<Pair<Integer, Integer>> reachedGoals = new ArrayList<Pair<Integer, Integer>>();
while(myMaze.goals.size()>0){
ArrayList<String> partialResult = BFS(myMaze, counter, reachedGoals);
if(firstLoop){
result = ArrayListHelper.add(partialResult, result);
firstLoop = false;
}
else
{
result = ArrayListHelper.add(partialResult.subList(0, partialResult.size()-1), result);
}
}
out.println("SOLUTION:");
double cost = 0;
for(int i = result.size() - 1; i >= 0; i--){
out.println(result.get(i));
}
for(int i = result.size() - 2; i >= 0; i--){
int currFirst = Integer.parseInt(result.get(i).split(",")[0]);
double thisCost = getCost(currFirst);
//System.out.println("2^" + currFirst + " = " + thisCost);
cost += thisCost;
}
out.println("PATH COST: " + cost);
//out.println("PATH LEN:" + (result.size() - 1));
out.println("MAX TREE DEPTH:"+ (result.size() - 1));
out.println("VISITED:"+ counter.get(0));
out.println("FRONTIER COUNT:"+ counter.get(1));
WriteMaze.writeSolution(input, result, output);
//WriteMaze.writeSolution(input, result, output, reachedGoals);
}
}
|
|
package com.github.hm1rafael.statement;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Date;
import java.sql.NClob;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Calendar;
public abstract class IntegrationTestPreparedStatementDoesNothingOperations implements CallableStatement, PreparedStatement {
@Override
public int getMaxFieldSize() throws SQLException {
return 0;
}
@Override
public void setMaxFieldSize(int max) throws SQLException {
}
@Override
public int getMaxRows() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setMaxRows(int max) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setEscapeProcessing(boolean enable) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public int getQueryTimeout() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setQueryTimeout(int seconds) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void cancel() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public SQLWarning getWarnings() throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public void clearWarnings() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCursorName(String name) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public boolean getMoreResults() throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public void setFetchDirection(int direction) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public int getFetchDirection() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setFetchSize(int rows) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public int getFetchSize() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int getResultSetConcurrency() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int getResultSetType() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public void addBatch(String sql) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void clearBatch() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBlob(String parameterName, Blob x) throws SQLException {
}
@Override
public void setClob(String parameterName, Clob x) throws SQLException {
}
@Override
public void setAsciiStream(String parameterName, InputStream x, long length) throws SQLException {
}
@Override
public void setBinaryStream(String parameterName, InputStream x, long length) throws SQLException {
}
@Override
public void setCharacterStream(String parameterName, Reader reader, long length) throws SQLException {
}
@Override
public void setAsciiStream(String parameterName, InputStream x) throws SQLException {
}
@Override
public void setBinaryStream(String parameterName, InputStream x) throws SQLException {
}
@Override
public void setCharacterStream(String parameterName, Reader reader) throws SQLException {
}
@Override
public void setNCharacterStream(String parameterName, Reader value) throws SQLException {
}
@Override
public void setClob(String parameterName, Reader reader) throws SQLException {
}
@Override
public void setBlob(String parameterName, InputStream inputStream) throws SQLException {
}
@Override
public void setNClob(String parameterName, Reader reader) throws SQLException {
}
@Override
public void setRowId(String parameterName, RowId x) throws SQLException {
}
@Override
public void setNString(String parameterName, String value) throws SQLException {
}
@Override
public void setNCharacterStream(String parameterName, Reader value, long length) throws SQLException {
}
@Override
public void setNClob(String parameterName, NClob value) throws SQLException {
}
@Override
public void setClob(String parameterName, Reader reader, long length) throws SQLException {
}
@Override
public void setBlob(String parameterName, InputStream inputStream, long length) throws SQLException {
}
@Override
public void setNClob(String parameterName, Reader reader, long length) throws SQLException {
}
@Override
public void setURL(String parameterName, URL val) throws SQLException {
}
@Override
public void setNull(String parameterName, int sqlType) throws SQLException {
}
@Override
public void setBoolean(String parameterName, boolean x) throws SQLException {
}
@Override
public void setByte(String parameterName, byte x) throws SQLException {
}
@Override
public void setShort(String parameterName, short x) throws SQLException {
}
@Override
public void setInt(String parameterName, int x) throws SQLException {
}
@Override
public void setLong(String parameterName, long x) throws SQLException {
}
@Override
public void setFloat(String parameterName, float x) throws SQLException {
}
@Override
public void setDouble(String parameterName, double x) throws SQLException {
}
@Override
public void setBigDecimal(String parameterName, BigDecimal x) throws SQLException {
}
@Override
public void setString(String parameterName, String x) throws SQLException {
}
@Override
public void setBytes(String parameterName, byte[] x) throws SQLException {
}
@Override
public void setDate(String parameterName, Date x) throws SQLException {
}
@Override
public void setTime(String parameterName, Time x) throws SQLException {
}
@Override
public void setTimestamp(String parameterName, Timestamp x) throws SQLException {
}
@Override
public void setAsciiStream(String parameterName, InputStream x, int length) throws SQLException {
}
@Override
public void setBinaryStream(String parameterName, InputStream x, int length) throws SQLException {
}
@Override
public void setObject(String parameterName, Object x, int targetSqlType, int scale) throws SQLException {
}
@Override
public void setObject(String parameterName, Object x, int targetSqlType) throws SQLException {
}
@Override
public void setObject(String parameterName, Object x) throws SQLException {
}
@Override
public void setCharacterStream(String parameterName, Reader reader, int length) throws SQLException {
}
@Override
public void setDate(String parameterName, Date x, Calendar cal) throws SQLException {
}
@Override
public void setTime(String parameterName, Time x, Calendar cal) throws SQLException {
}
@Override
public void setTimestamp(String parameterName, Timestamp x, Calendar cal) throws SQLException {
}
@Override
public void setNull(String parameterName, int sqlType, String typeName) throws SQLException {
}
@Override
public void setRowId(int parameterIndex, RowId x) throws SQLException {
}
@Override
public void setNString(int parameterIndex, String value) throws SQLException {
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException {
}
@Override
public void setNClob(int parameterIndex, NClob value) throws SQLException {
}
@Override
public void setClob(int parameterIndex, Reader reader, long length) throws SQLException {
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException {
}
@Override
public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException {
}
@Override
public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
}
@Override
public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException {
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException {
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException {
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException {
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException {
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException {
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException {
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException {
}
@Override
public void setClob(int parameterIndex, Reader reader) throws SQLException {
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException {
}
@Override
public void setNClob(int parameterIndex, Reader reader) throws SQLException {
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException {
}
@Override
public void setRef(int parameterIndex, Ref x) throws SQLException {
}
@Override
public void setBlob(int parameterIndex, Blob x) throws SQLException {
}
@Override
public void setClob(int parameterIndex, Clob x) throws SQLException {
}
@Override
public void setArray(int parameterIndex, Array x) throws SQLException {
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
return null;
}
@Override
public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException {
}
@Override
public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException {
}
@Override
public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException {
}
@Override
public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException {
}
@Override
public void setURL(int parameterIndex, URL x) throws SQLException {
}
@Override
public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException {
}
@Override
public void setObject(int parameterIndex, Object x) throws SQLException {
}
@Override
public void setNull(int parameterIndex, int sqlType) throws SQLException {
}
@Override
public void setBoolean(int parameterIndex, boolean x) throws SQLException {
}
@Override
public void setByte(int parameterIndex, byte x) throws SQLException {
}
@Override
public void setShort(int parameterIndex, short x) throws SQLException {
}
@Override
public void setInt(int parameterIndex, int x) throws SQLException {
}
@Override
public void setLong(int parameterIndex, long x) throws SQLException {
}
@Override
public void setFloat(int parameterIndex, float x) throws SQLException {
}
@Override
public void setDouble(int parameterIndex, double x) throws SQLException {
}
@Override
public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException {
}
@Override
public void setString(int parameterIndex, String x) throws SQLException {
}
@Override
public void setBytes(int parameterIndex, byte[] x) throws SQLException {
}
@Override
public void setDate(int parameterIndex, Date x) throws SQLException {
}
@Override
public void setTime(int parameterIndex, Time x) throws SQLException {
}
@Override
public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException {
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException {
}
@Override
public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException {
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException {
}
@Override
public int getResultSetHoldability() throws SQLException {
return 0;
}
@Override
public boolean isClosed() throws SQLException {
return false;
}
@Override
public void setPoolable(boolean poolable) throws SQLException {
}
@Override
public boolean isPoolable() throws SQLException {
return false;
}
@Override
public void closeOnCompletion() throws SQLException {
}
@Override
public boolean isCloseOnCompletion() throws SQLException {
return false;
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
return null;
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return false;
}
@Override
public boolean getMoreResults(int current) throws SQLException {
return false;
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
return null;
}
@Override
public int getUpdateCount() throws SQLException {
return 0;
}
@Override
public void close() throws SQLException {
}
@Override
public void clearParameters() throws SQLException {
}
@Override
public void addBatch() throws SQLException {
}
@Override
public ParameterMetaData getParameterMetaData() throws SQLException {
return null;
}
@Override
public void registerOutParameter(int parameterIndex, int sqlType) throws SQLException {
}
@Override
public void registerOutParameter(int parameterIndex, int sqlType, int scale) throws SQLException {
}
@Override
public boolean wasNull() throws SQLException {
return false;
}
@Override
public void registerOutParameter(String parameterName, int sqlType, String typeName) throws SQLException {
}
@Override
public void setSQLXML(String parameterName, SQLXML xmlObject) throws SQLException {
}
@Override
public SQLXML getSQLXML(int parameterIndex) throws SQLException {
return null;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper.compiler;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.Vector;
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
import org.apache.jasper.JasperException;
import org.apache.jasper.JspCompilationContext;
import org.xml.sax.Attributes;
/**
* This class has all the utility method(s). Ideally should move all the bean
* containers here.
*
* @author Mandar Raje.
* @author Rajiv Mordani.
* @author Danno Ferrin
* @author Pierre Delisle
* @author Shawn Bayern
* @author Mark Roth
*/
public class JspUtil {
private static final String WEB_INF_TAGS = "/WEB-INF/tags/";
private static final String META_INF_TAGS = "/META-INF/tags/";
// Delimiters for request-time expressions (JSP and XML syntax)
private static final String OPEN_EXPR = "<%=";
private static final String CLOSE_EXPR = "%>";
private static final String javaKeywords[] = { "abstract", "assert",
"boolean", "break", "byte", "case", "catch", "char", "class",
"const", "continue", "default", "do", "double", "else", "enum",
"extends", "final", "finally", "float", "for", "goto", "if",
"implements", "import", "instanceof", "int", "interface", "long",
"native", "new", "package", "private", "protected", "public",
"return", "short", "static", "strictfp", "super", "switch",
"synchronized", "this", "throw", "throws", "transient", "try",
"void", "volatile", "while" };
public static final int CHUNKSIZE = 1024;
/**
* Takes a potential expression and converts it into XML form
*/
public static String getExprInXml(String expression) {
String returnString;
int length = expression.length();
if (expression.startsWith(OPEN_EXPR) &&
expression.endsWith(CLOSE_EXPR)) {
returnString = expression.substring(1, length - 1);
} else {
returnString = expression;
}
return escapeXml(returnString);
}
/**
* Checks to see if the given scope is valid.
*
* @param scope
* The scope to be checked
* @param n
* The Node containing the 'scope' attribute whose value is to be
* checked
* @param err
* error dispatcher
*
* @throws JasperException
* if scope is not null and different from "page",
* "request", "session", and
* "application"
*/
public static void checkScope(String scope, Node n, ErrorDispatcher err)
throws JasperException {
if (scope != null && !scope.equals("page") && !scope.equals("request")
&& !scope.equals("session") && !scope.equals("application")) {
err.jspError(n, "jsp.error.invalid.scope", scope);
}
}
/**
* Checks if all mandatory attributes are present and if all attributes
* present have valid names. Checks attributes specified as XML-style
* attributes as well as attributes specified using the jsp:attribute
* standard action.
*/
public static void checkAttributes(String typeOfTag, Node n,
ValidAttribute[] validAttributes, ErrorDispatcher err)
throws JasperException {
Attributes attrs = n.getAttributes();
Mark start = n.getStart();
boolean valid = true;
// AttributesImpl.removeAttribute is broken, so we do this...
int tempLength = (attrs == null) ? 0 : attrs.getLength();
Vector<String> temp = new Vector<String>(tempLength, 1);
for (int i = 0; i < tempLength; i++) {
@SuppressWarnings("null") // If attrs==null, tempLength == 0
String qName = attrs.getQName(i);
if ((!qName.equals("xmlns")) && (!qName.startsWith("xmlns:"))) {
temp.addElement(qName);
}
}
// Add names of attributes specified using jsp:attribute
Node.Nodes tagBody = n.getBody();
if (tagBody != null) {
int numSubElements = tagBody.size();
for (int i = 0; i < numSubElements; i++) {
Node node = tagBody.getNode(i);
if (node instanceof Node.NamedAttribute) {
String attrName = node.getAttributeValue("name");
temp.addElement(attrName);
// Check if this value appear in the attribute of the node
if (n.getAttributeValue(attrName) != null) {
err.jspError(n,
"jsp.error.duplicate.name.jspattribute",
attrName);
}
} else {
// Nothing can come before jsp:attribute, and only
// jsp:body can come after it.
break;
}
}
}
/*
* First check to see if all the mandatory attributes are present. If so
* only then proceed to see if the other attributes are valid for the
* particular tag.
*/
String missingAttribute = null;
for (int i = 0; i < validAttributes.length; i++) {
int attrPos;
if (validAttributes[i].mandatory) {
attrPos = temp.indexOf(validAttributes[i].name);
if (attrPos != -1) {
temp.remove(attrPos);
valid = true;
} else {
valid = false;
missingAttribute = validAttributes[i].name;
break;
}
}
}
// If mandatory attribute is missing then the exception is thrown
if (!valid) {
err.jspError(start, "jsp.error.mandatory.attribute", typeOfTag,
missingAttribute);
}
// Check to see if there are any more attributes for the specified tag.
int attrLeftLength = temp.size();
if (attrLeftLength == 0) {
return;
}
// Now check to see if the rest of the attributes are valid too.
String attribute = null;
for (int j = 0; j < attrLeftLength; j++) {
valid = false;
attribute = temp.elementAt(j);
for (int i = 0; i < validAttributes.length; i++) {
if (attribute.equals(validAttributes[i].name)) {
valid = true;
break;
}
}
if (!valid) {
err.jspError(start, "jsp.error.invalid.attribute", typeOfTag,
attribute);
}
}
// XXX *could* move EL-syntax validation here... (sb)
}
/**
* Escape the 5 entities defined by XML.
*/
public static String escapeXml(String s) {
if (s == null) {
return null;
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (c == '<') {
sb.append("<");
} else if (c == '>') {
sb.append(">");
} else if (c == '\'') {
sb.append("'");
} else if (c == '&') {
sb.append("&");
} else if (c == '"') {
sb.append(""");
} else {
sb.append(c);
}
}
return sb.toString();
}
/**
* Replaces any occurrences of the character <tt>replace</tt> with the
* string <tt>with</tt>.
*/
public static String replace(String name, char replace, String with) {
StringBuilder buf = new StringBuilder();
int begin = 0;
int end;
int last = name.length();
while (true) {
end = name.indexOf(replace, begin);
if (end < 0) {
end = last;
}
buf.append(name.substring(begin, end));
if (end == last) {
break;
}
buf.append(with);
begin = end + 1;
}
return buf.toString();
}
public static class ValidAttribute {
String name;
boolean mandatory;
public ValidAttribute(String name, boolean mandatory) {
this.name = name;
this.mandatory = mandatory;
}
public ValidAttribute(String name) {
this(name, false);
}
}
/**
* Convert a String value to 'boolean'. Besides the standard conversions
* done by Boolean.valueOf(s).booleanValue(), the value "yes" (ignore case)
* is also converted to 'true'. If 's' is null, then 'false' is returned.
*
* @param s
* the string to be converted
* @return the boolean value associated with the string s
*/
public static boolean booleanValue(String s) {
boolean b = false;
if (s != null) {
if (s.equalsIgnoreCase("yes")) {
b = true;
} else {
b = Boolean.valueOf(s).booleanValue();
}
}
return b;
}
/**
* Returns the <tt>Class</tt> object associated with the class or
* interface with the given string name.
*
* <p>
* The <tt>Class</tt> object is determined by passing the given string
* name to the <tt>Class.forName()</tt> method, unless the given string
* name represents a primitive type, in which case it is converted to a
* <tt>Class</tt> object by appending ".class" to it (e.g., "int.class").
*/
public static Class<?> toClass(String type, ClassLoader loader)
throws ClassNotFoundException {
Class<?> c = null;
int i0 = type.indexOf('[');
int dims = 0;
if (i0 > 0) {
// This is an array. Count the dimensions
for (int i = 0; i < type.length(); i++) {
if (type.charAt(i) == '[') {
dims++;
}
}
type = type.substring(0, i0);
}
if ("boolean".equals(type)) {
c = boolean.class;
} else if ("char".equals(type)) {
c = char.class;
} else if ("byte".equals(type)) {
c = byte.class;
} else if ("short".equals(type)) {
c = short.class;
} else if ("int".equals(type)) {
c = int.class;
} else if ("long".equals(type)) {
c = long.class;
} else if ("float".equals(type)) {
c = float.class;
} else if ("double".equals(type)) {
c = double.class;
} else if ("void".equals(type)) {
c = void.class;
} else if (type.indexOf('[') < 0) {
c = loader.loadClass(type);
}
if (dims == 0) {
return c;
}
if (dims == 1) {
return java.lang.reflect.Array.newInstance(c, 1).getClass();
}
// Array of more than i dimension
return java.lang.reflect.Array.newInstance(c, new int[dims]).getClass();
}
/**
* Produces a String representing a call to the EL interpreter.
*
* @param expression
* a String containing zero or more "${}" expressions
* @param expectedType
* the expected type of the interpreted result
* @param fnmapvar
* Variable pointing to a function map.
* @param XmlEscape
* True if the result should do XML escaping
* @return a String representing a call to the EL interpreter.
*/
public static String interpreterCall(boolean isTagFile, String expression,
Class<?> expectedType, String fnmapvar, boolean XmlEscape) {
/*
* Determine which context object to use.
*/
String jspCtxt = null;
if (isTagFile) {
jspCtxt = "this.getJspContext()";
} else {
jspCtxt = "_jspx_page_context";
}
/*
* Determine whether to use the expected type's textual name or, if it's
* a primitive, the name of its correspondent boxed type.
*/
String targetType = expectedType.getCanonicalName();
String primitiveConverterMethod = null;
if (expectedType.isPrimitive()) {
if (expectedType.equals(Boolean.TYPE)) {
targetType = Boolean.class.getName();
primitiveConverterMethod = "booleanValue";
} else if (expectedType.equals(Byte.TYPE)) {
targetType = Byte.class.getName();
primitiveConverterMethod = "byteValue";
} else if (expectedType.equals(Character.TYPE)) {
targetType = Character.class.getName();
primitiveConverterMethod = "charValue";
} else if (expectedType.equals(Short.TYPE)) {
targetType = Short.class.getName();
primitiveConverterMethod = "shortValue";
} else if (expectedType.equals(Integer.TYPE)) {
targetType = Integer.class.getName();
primitiveConverterMethod = "intValue";
} else if (expectedType.equals(Long.TYPE)) {
targetType = Long.class.getName();
primitiveConverterMethod = "longValue";
} else if (expectedType.equals(Float.TYPE)) {
targetType = Float.class.getName();
primitiveConverterMethod = "floatValue";
} else if (expectedType.equals(Double.TYPE)) {
targetType = Double.class.getName();
primitiveConverterMethod = "doubleValue";
}
}
if (primitiveConverterMethod != null) {
XmlEscape = false;
}
/*
* Build up the base call to the interpreter.
*/
// XXX - We use a proprietary call to the interpreter for now
// as the current standard machinery is inefficient and requires
// lots of wrappers and adapters. This should all clear up once
// the EL interpreter moves out of JSTL and into its own project.
// In the future, this should be replaced by code that calls
// ExpressionEvaluator.parseExpression() and then cache the resulting
// expression objects. The interpreterCall would simply select
// one of the pre-cached expressions and evaluate it.
// Note that PageContextImpl implements VariableResolver and
// the generated Servlet/SimpleTag implements FunctionMapper, so
// that machinery is already in place (mroth).
targetType = toJavaSourceType(targetType);
StringBuilder call = new StringBuilder(
"("
+ targetType
+ ") "
+ "org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate"
+ "(" + Generator.quote(expression) + ", " + targetType
+ ".class, " + "(javax.servlet.jsp.PageContext)" + jspCtxt + ", "
+ fnmapvar + ", " + XmlEscape + ")");
/*
* Add the primitive converter method if we need to.
*/
if (primitiveConverterMethod != null) {
call.insert(0, "(");
call.append(")." + primitiveConverterMethod + "()");
}
return call.toString();
}
public static String coerceToPrimitiveBoolean(String s,
boolean isNamedAttribute) {
if (isNamedAttribute) {
return "org.apache.jasper.runtime.JspRuntimeLibrary.coerceToBoolean("
+ s + ")";
} else {
if (s == null || s.length() == 0) {
return "false";
} else {
return Boolean.valueOf(s).toString();
}
}
}
public static String coerceToBoolean(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "(java.lang.Boolean) org.apache.jasper.runtime.JspRuntimeLibrary.coerce("
+ s + ", java.lang.Boolean.class)";
} else {
if (s == null || s.length() == 0) {
return "new java.lang.Boolean(false)";
} else {
// Detect format error at translation time
return "new java.lang.Boolean(" + Boolean.valueOf(s).toString() + ")";
}
}
}
public static String coerceToPrimitiveByte(String s,
boolean isNamedAttribute) {
if (isNamedAttribute) {
return "org.apache.jasper.runtime.JspRuntimeLibrary.coerceToByte("
+ s + ")";
} else {
if (s == null || s.length() == 0) {
return "(byte) 0";
} else {
return "((byte)" + Byte.valueOf(s).toString() + ")";
}
}
}
public static String coerceToByte(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "(java.lang.Byte) org.apache.jasper.runtime.JspRuntimeLibrary.coerce("
+ s + ", java.lang.Byte.class)";
} else {
if (s == null || s.length() == 0) {
return "new java.lang.Byte((byte) 0)";
} else {
// Detect format error at translation time
return "new java.lang.Byte((byte)" + Byte.valueOf(s).toString() + ")";
}
}
}
public static String coerceToChar(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "org.apache.jasper.runtime.JspRuntimeLibrary.coerceToChar("
+ s + ")";
} else {
if (s == null || s.length() == 0) {
return "(char) 0";
} else {
char ch = s.charAt(0);
// this trick avoids escaping issues
return "((char) " + (int) ch + ")";
}
}
}
public static String coerceToCharacter(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "(java.lang.Character) org.apache.jasper.runtime.JspRuntimeLibrary.coerce("
+ s + ", java.lang.Character.class)";
} else {
if (s == null || s.length() == 0) {
return "new java.lang.Character((char) 0)";
} else {
char ch = s.charAt(0);
// this trick avoids escaping issues
return "new java.lang.Character((char) " + (int) ch + ")";
}
}
}
public static String coerceToPrimitiveDouble(String s,
boolean isNamedAttribute) {
if (isNamedAttribute) {
return "org.apache.jasper.runtime.JspRuntimeLibrary.coerceToDouble("
+ s + ")";
} else {
if (s == null || s.length() == 0) {
return "(double) 0";
} else {
return Double.valueOf(s).toString();
}
}
}
public static String coerceToDouble(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "(java.lang.Double) org.apache.jasper.runtime.JspRuntimeLibrary.coerce("
+ s + ", Double.class)";
} else {
if (s == null || s.length() == 0) {
return "new java.lang.Double(0)";
} else {
// Detect format error at translation time
return "new java.lang.Double(" + Double.valueOf(s).toString() + ")";
}
}
}
public static String coerceToPrimitiveFloat(String s,
boolean isNamedAttribute) {
if (isNamedAttribute) {
return "org.apache.jasper.runtime.JspRuntimeLibrary.coerceToFloat("
+ s + ")";
} else {
if (s == null || s.length() == 0) {
return "(float) 0";
} else {
return Float.valueOf(s).toString() + "f";
}
}
}
public static String coerceToFloat(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "(java.lang.Float) org.apache.jasper.runtime.JspRuntimeLibrary.coerce("
+ s + ", java.lang.Float.class)";
} else {
if (s == null || s.length() == 0) {
return "new java.lang.Float(0)";
} else {
// Detect format error at translation time
return "new java.lang.Float(" + Float.valueOf(s).toString() + "f)";
}
}
}
public static String coerceToInt(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "org.apache.jasper.runtime.JspRuntimeLibrary.coerceToInt("
+ s + ")";
} else {
if (s == null || s.length() == 0) {
return "0";
} else {
return Integer.valueOf(s).toString();
}
}
}
public static String coerceToInteger(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "(java.lang.Integer) org.apache.jasper.runtime.JspRuntimeLibrary.coerce("
+ s + ", java.lang.Integer.class)";
} else {
if (s == null || s.length() == 0) {
return "new java.lang.Integer(0)";
} else {
// Detect format error at translation time
return "new java.lang.Integer(" + Integer.valueOf(s).toString() + ")";
}
}
}
public static String coerceToPrimitiveShort(String s,
boolean isNamedAttribute) {
if (isNamedAttribute) {
return "org.apache.jasper.runtime.JspRuntimeLibrary.coerceToShort("
+ s + ")";
} else {
if (s == null || s.length() == 0) {
return "(short) 0";
} else {
return "((short) " + Short.valueOf(s).toString() + ")";
}
}
}
public static String coerceToShort(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "(java.lang.Short) org.apache.jasper.runtime.JspRuntimeLibrary.coerce("
+ s + ", java.lang.Short.class)";
} else {
if (s == null || s.length() == 0) {
return "new java.lang.Short((short) 0)";
} else {
// Detect format error at translation time
return "new java.lang.Short(\"" + Short.valueOf(s).toString() + "\")";
}
}
}
public static String coerceToPrimitiveLong(String s,
boolean isNamedAttribute) {
if (isNamedAttribute) {
return "org.apache.jasper.runtime.JspRuntimeLibrary.coerceToLong("
+ s + ")";
} else {
if (s == null || s.length() == 0) {
return "(long) 0";
} else {
return Long.valueOf(s).toString() + "l";
}
}
}
public static String coerceToLong(String s, boolean isNamedAttribute) {
if (isNamedAttribute) {
return "(java.lang.Long) org.apache.jasper.runtime.JspRuntimeLibrary.coerce("
+ s + ", java.lang.Long.class)";
} else {
if (s == null || s.length() == 0) {
return "new java.lang.Long(0)";
} else {
// Detect format error at translation time
return "new java.lang.Long(" + Long.valueOf(s).toString() + "l)";
}
}
}
public static InputStream getInputStream(String fname, JarFile jarFile,
JspCompilationContext ctxt, ErrorDispatcher err)
throws JasperException, IOException {
InputStream in = null;
if (jarFile != null) {
String jarEntryName = fname.substring(1, fname.length());
ZipEntry jarEntry = jarFile.getEntry(jarEntryName);
if (jarEntry == null) {
throw new FileNotFoundException(Localizer.getMessage(
"jsp.error.file.not.found", fname));
}
in = jarFile.getInputStream(jarEntry);
} else {
in = ctxt.getResourceAsStream(fname);
}
if (in == null) {
throw new FileNotFoundException(Localizer.getMessage(
"jsp.error.file.not.found", fname));
}
return in;
}
/**
* Gets the fully-qualified class name of the tag handler corresponding to
* the given tag file path.
*
* @param path
* Tag file path
* @param err
* Error dispatcher
*
* @return Fully-qualified class name of the tag handler corresponding to
* the given tag file path
*/
public static String getTagHandlerClassName(String path, String urn,
ErrorDispatcher err) throws JasperException {
String className = null;
int begin = 0;
int index;
index = path.lastIndexOf(".tag");
if (index == -1) {
err.jspError("jsp.error.tagfile.badSuffix", path);
}
// It's tempting to remove the ".tag" suffix here, but we can't.
// If we remove it, the fully-qualified class name of this tag
// could conflict with the package name of other tags.
// For instance, the tag file
// /WEB-INF/tags/foo.tag
// would have fully-qualified class name
// org.apache.jsp.tag.web.foo
// which would conflict with the package name of the tag file
// /WEB-INF/tags/foo/bar.tag
index = path.indexOf(WEB_INF_TAGS);
if (index != -1) {
className = "org.apache.jsp.tag.web.";
begin = index + WEB_INF_TAGS.length();
} else {
index = path.indexOf(META_INF_TAGS);
if (index != -1) {
className = getClassNameBase(urn);
begin = index + META_INF_TAGS.length();
} else {
err.jspError("jsp.error.tagfile.illegalPath", path);
}
}
className += makeJavaPackage(path.substring(begin));
return className;
}
private static String getClassNameBase(String urn) {
StringBuilder base = new StringBuilder("org.apache.jsp.tag.meta.");
if (urn != null) {
base.append(makeJavaPackage(urn));
base.append('.');
}
return base.toString();
}
/**
* Converts the given path to a Java package or fully-qualified class name
*
* @param path
* Path to convert
*
* @return Java package corresponding to the given path
*/
public static final String makeJavaPackage(String path) {
String classNameComponents[] = split(path, "/");
StringBuilder legalClassNames = new StringBuilder();
for (int i = 0; i < classNameComponents.length; i++) {
legalClassNames.append(makeJavaIdentifier(classNameComponents[i]));
if (i < classNameComponents.length - 1) {
legalClassNames.append('.');
}
}
return legalClassNames.toString();
}
/**
* Splits a string into it's components.
*
* @param path
* String to split
* @param pat
* Pattern to split at
* @return the components of the path
*/
private static final String[] split(String path, String pat) {
Vector<String> comps = new Vector<String>();
int pos = path.indexOf(pat);
int start = 0;
while (pos >= 0) {
if (pos > start) {
String comp = path.substring(start, pos);
comps.add(comp);
}
start = pos + pat.length();
pos = path.indexOf(pat, start);
}
if (start < path.length()) {
comps.add(path.substring(start));
}
String[] result = new String[comps.size()];
for (int i = 0; i < comps.size(); i++) {
result[i] = comps.elementAt(i);
}
return result;
}
/**
* Converts the given identifier to a legal Java identifier
*
* @param identifier
* Identifier to convert
*
* @return Legal Java identifier corresponding to the given identifier
*/
public static final String makeJavaIdentifier(String identifier) {
return makeJavaIdentifier(identifier, true);
}
/**
* Converts the given identifier to a legal Java identifier
* to be used for JSP Tag file attribute names.
*
* @param identifier
* Identifier to convert
*
* @return Legal Java identifier corresponding to the given identifier
*/
public static final String makeJavaIdentifierForAttribute(String identifier) {
return makeJavaIdentifier(identifier, false);
}
/**
* Converts the given identifier to a legal Java identifier.
*
* @param identifier
* Identifier to convert
*
* @return Legal Java identifier corresponding to the given identifier
*/
private static final String makeJavaIdentifier(String identifier,
boolean periodToUnderscore) {
StringBuilder modifiedIdentifier = new StringBuilder(identifier.length());
if (!Character.isJavaIdentifierStart(identifier.charAt(0))) {
modifiedIdentifier.append('_');
}
for (int i = 0; i < identifier.length(); i++) {
char ch = identifier.charAt(i);
if (Character.isJavaIdentifierPart(ch) &&
(ch != '_' || !periodToUnderscore)) {
modifiedIdentifier.append(ch);
} else if (ch == '.' && periodToUnderscore) {
modifiedIdentifier.append('_');
} else {
modifiedIdentifier.append(mangleChar(ch));
}
}
if (isJavaKeyword(modifiedIdentifier.toString())) {
modifiedIdentifier.append('_');
}
return modifiedIdentifier.toString();
}
/**
* Mangle the specified character to create a legal Java class name.
*/
public static final String mangleChar(char ch) {
char[] result = new char[5];
result[0] = '_';
result[1] = Character.forDigit((ch >> 12) & 0xf, 16);
result[2] = Character.forDigit((ch >> 8) & 0xf, 16);
result[3] = Character.forDigit((ch >> 4) & 0xf, 16);
result[4] = Character.forDigit(ch & 0xf, 16);
return new String(result);
}
/**
* Test whether the argument is a Java keyword
*/
public static boolean isJavaKeyword(String key) {
int i = 0;
int j = javaKeywords.length;
while (i < j) {
int k = (i + j) / 2;
int result = javaKeywords[k].compareTo(key);
if (result == 0) {
return true;
}
if (result < 0) {
i = k + 1;
} else {
j = k;
}
}
return false;
}
public static boolean isJavaIdentifier(String key) {
// Should not be the case but check to be sure
if (key == null || key.length() == 0) {
return false;
}
if (isJavaKeyword(key)) {
return false;
}
// Check the start character that has more restrictions
if (!Character.isJavaIdentifierStart(key.charAt(0))) {
return false;
}
// Check each remaining character used is permitted
for (int idx = 1; idx < key.length(); idx++) {
if (!Character.isJavaIdentifierPart(key.charAt(idx))) {
return false;
}
}
return true;
}
static InputStreamReader getReader(String fname, String encoding,
JarFile jarFile, JspCompilationContext ctxt, ErrorDispatcher err)
throws JasperException, IOException {
return getReader(fname, encoding, jarFile, ctxt, err, 0);
}
static InputStreamReader getReader(String fname, String encoding,
JarFile jarFile, JspCompilationContext ctxt, ErrorDispatcher err,
int skip) throws JasperException, IOException {
InputStreamReader reader = null;
InputStream in = getInputStream(fname, jarFile, ctxt, err);
for (int i = 0; i < skip; i++) {
in.read();
}
try {
reader = new InputStreamReader(in, encoding);
} catch (UnsupportedEncodingException ex) {
err.jspError("jsp.error.unsupported.encoding", encoding);
}
return reader;
}
/**
* Handles taking input from TLDs 'java.lang.Object' ->
* 'java.lang.Object.class' 'int' -> 'int.class' 'void' -> 'Void.TYPE'
* 'int[]' -> 'int[].class'
*
* @param type
*/
public static String toJavaSourceTypeFromTld(String type) {
if (type == null || "void".equals(type)) {
return "java.lang.Void.TYPE";
}
return type + ".class";
}
/**
* Class.getName() return arrays in the form "[[[<et>", where et, the
* element type can be one of ZBCDFIJS or L<classname>; It is converted
* into forms that can be understood by javac.
*/
public static String toJavaSourceType(String type) {
if (type.charAt(0) != '[') {
return type;
}
int dims = 1;
String t = null;
for (int i = 1; i < type.length(); i++) {
if (type.charAt(i) == '[') {
dims++;
} else {
switch (type.charAt(i)) {
case 'Z': t = "boolean"; break;
case 'B': t = "byte"; break;
case 'C': t = "char"; break;
case 'D': t = "double"; break;
case 'F': t = "float"; break;
case 'I': t = "int"; break;
case 'J': t = "long"; break;
case 'S': t = "short"; break;
case 'L': t = type.substring(i+1, type.indexOf(';')); break;
}
break;
}
}
StringBuilder resultType = new StringBuilder(t);
for (; dims > 0; dims--) {
resultType.append("[]");
}
return resultType.toString();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.server.group.impl;
import javax.management.ObjectName;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.activemq.artemis.api.core.BroadcastGroupConfiguration;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.api.core.management.ManagementHelper;
import org.apache.activemq.artemis.api.core.management.ObjectNameBuilder;
import org.apache.activemq.artemis.core.config.BridgeConfiguration;
import org.apache.activemq.artemis.core.config.ClusterConnectionConfiguration;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.config.DivertConfiguration;
import org.apache.activemq.artemis.core.management.impl.ActiveMQServerControlImpl;
import org.apache.activemq.artemis.core.messagecounter.MessageCounterManager;
import org.apache.activemq.artemis.core.paging.PagingManager;
import org.apache.activemq.artemis.core.persistence.StorageManager;
import org.apache.activemq.artemis.core.postoffice.PostOffice;
import org.apache.activemq.artemis.core.remoting.server.RemotingService;
import org.apache.activemq.artemis.core.security.Role;
import org.apache.activemq.artemis.core.security.SecurityStore;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.server.Divert;
import org.apache.activemq.artemis.core.server.Queue;
import org.apache.activemq.artemis.core.server.QueueFactory;
import org.apache.activemq.artemis.core.server.RoutingType;
import org.apache.activemq.artemis.core.server.ServerMessage;
import org.apache.activemq.artemis.core.server.cluster.Bridge;
import org.apache.activemq.artemis.core.server.cluster.BroadcastGroup;
import org.apache.activemq.artemis.core.server.cluster.ClusterConnection;
import org.apache.activemq.artemis.core.server.impl.AddressInfo;
import org.apache.activemq.artemis.core.server.management.ManagementService;
import org.apache.activemq.artemis.core.server.management.Notification;
import org.apache.activemq.artemis.core.server.management.NotificationListener;
import org.apache.activemq.artemis.core.settings.HierarchicalRepository;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.core.transaction.ResourceManager;
import org.apache.activemq.artemis.spi.core.remoting.Acceptor;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.apache.activemq.artemis.utils.ConcurrentHashSet;
import org.apache.activemq.artemis.utils.ReusableLatch;
import org.junit.Assert;
import org.junit.Test;
/**
* this is testing the case for resending notifications from RemotingGroupHandler
* There is a small window where you could receive notifications wrongly
* this test will make sure the component would play well with that notification
*/
public class ClusteredResetMockTest extends ActiveMQTestBase {
public static final SimpleString ANYCLUSTER = SimpleString.toSimpleString("anycluster");
@Test
public void testMultipleSenders() throws Throwable {
int NUMBER_OF_SENDERS = 100;
ReusableLatch latchSends = new ReusableLatch(NUMBER_OF_SENDERS);
FakeManagement fake = new FakeManagement(latchSends);
RemoteGroupingHandler handler = new RemoteGroupingHandler(fake, SimpleString.toSimpleString("tst1"), SimpleString.toSimpleString("tst2"), 50000, 499);
handler.start();
Sender[] sn = new Sender[NUMBER_OF_SENDERS];
for (int i = 0; i < sn.length; i++) {
sn[i] = new Sender("grp" + i, handler);
sn[i].start();
}
try {
// Waiting two requests to arrive
Assert.assertTrue(latchSends.await(1, TimeUnit.MINUTES));
// we will ask a resend.. need to add 2 back
for (int i = 0; i < NUMBER_OF_SENDERS; i++) {
// There is no countUp(NUMBER_OF_SENDERS); adding two back on the reusable latch
latchSends.countUp();
}
fake.pendingNotifications.clear();
handler.resendPending();
assertTrue(latchSends.await(10, TimeUnit.SECONDS));
HashSet<SimpleString> codesAsked = new HashSet<>();
for (Notification notification : fake.pendingNotifications) {
codesAsked.add(notification.getProperties().getSimpleStringProperty(ManagementHelper.HDR_PROPOSAL_GROUP_ID));
}
for (Sender snItem : sn) {
assertTrue(codesAsked.contains(snItem.code));
}
for (int i = NUMBER_OF_SENDERS - 1; i >= 0; i--) {
// Sending back the response as Notifications would be doing
Response response = new Response(sn[i].code, ANYCLUSTER);
handler.proposed(response);
}
for (Sender sni : sn) {
sni.join();
if (sni.ex != null) {
throw sni.ex;
}
}
} finally {
for (Sender sni : sn) {
sni.interrupt();
}
}
}
class Sender extends Thread {
SimpleString code;
public RemoteGroupingHandler handler;
Throwable ex;
Sender(String code, RemoteGroupingHandler handler) {
super("Sender::" + code);
this.code = SimpleString.toSimpleString(code);
this.handler = handler;
}
@Override
public void run() {
Proposal proposal = new Proposal(code, ANYCLUSTER);
try {
Response response = handler.propose(proposal);
if (response == null) {
ex = new NullPointerException("expected value on " + getName());
} else if (!response.getGroupId().equals(code)) {
ex = new IllegalStateException("expected code=" + code + " but it was " + response.getGroupId());
}
} catch (Throwable ex) {
ex.printStackTrace();
this.ex = ex;
}
}
}
class FakeManagement implements ManagementService {
public ConcurrentHashSet<Notification> pendingNotifications = new ConcurrentHashSet<>();
final ReusableLatch latch;
FakeManagement(ReusableLatch latch) {
this.latch = latch;
}
@Override
public MessageCounterManager getMessageCounterManager() {
return null;
}
@Override
public SimpleString getManagementAddress() {
return null;
}
@Override
public SimpleString getManagementNotificationAddress() {
return null;
}
@Override
public ObjectNameBuilder getObjectNameBuilder() {
return null;
}
@Override
public void setStorageManager(StorageManager storageManager) {
}
@Override
public ActiveMQServerControlImpl registerServer(PostOffice postOffice,
SecurityStore securityStore,
StorageManager storageManager,
Configuration configuration,
HierarchicalRepository<AddressSettings> addressSettingsRepository,
HierarchicalRepository<Set<Role>> securityRepository,
ResourceManager resourceManager,
RemotingService remotingService,
ActiveMQServer messagingServer,
QueueFactory queueFactory,
ScheduledExecutorService scheduledThreadPool,
PagingManager pagingManager,
boolean backup) throws Exception {
return null;
}
@Override
public void unregisterServer() throws Exception {
}
@Override
public void registerInJMX(ObjectName objectName, Object managedResource) throws Exception {
}
@Override
public void unregisterFromJMX(ObjectName objectName) throws Exception {
}
@Override
public void registerInRegistry(String resourceName, Object managedResource) {
}
@Override
public void unregisterFromRegistry(String resourceName) {
}
@Override
public void registerAddress(AddressInfo addressInfo) throws Exception {
}
@Override
public void unregisterAddress(SimpleString address) throws Exception {
}
@Override
public void registerQueue(Queue queue, SimpleString address, StorageManager storageManager) throws Exception {
}
@Override
public void unregisterQueue(SimpleString name, SimpleString address, RoutingType routingType) throws Exception {
}
@Override
public void registerAcceptor(Acceptor acceptor, TransportConfiguration configuration) throws Exception {
}
@Override
public void unregisterAcceptors() {
}
@Override
public void registerDivert(Divert divert, DivertConfiguration config) throws Exception {
}
@Override
public void unregisterDivert(SimpleString name, SimpleString address) throws Exception {
}
@Override
public void registerBroadcastGroup(BroadcastGroup broadcastGroup,
BroadcastGroupConfiguration configuration) throws Exception {
}
@Override
public void unregisterBroadcastGroup(String name) throws Exception {
}
@Override
public void registerBridge(Bridge bridge, BridgeConfiguration configuration) throws Exception {
}
@Override
public void unregisterBridge(String name) throws Exception {
}
@Override
public void registerCluster(ClusterConnection cluster,
ClusterConnectionConfiguration configuration) throws Exception {
}
@Override
public void unregisterCluster(String name) throws Exception {
}
@Override
public Object getResource(String resourceName) {
return null;
}
@Override
public Object[] getResources(Class<?> resourceType) {
return new Object[0];
}
@Override
public ServerMessage handleMessage(ServerMessage message) throws Exception {
return null;
}
@Override
public void start() throws Exception {
}
@Override
public void stop() throws Exception {
}
@Override
public boolean isStarted() {
return false;
}
@Override
public void sendNotification(Notification notification) throws Exception {
pendingNotifications.add(notification);
latch.countDown();
}
@Override
public void enableNotifications(boolean enable) {
}
@Override
public void addNotificationListener(NotificationListener listener) {
}
@Override
public void removeNotificationListener(NotificationListener listener) {
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.conf;
import org.junit.Assert;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import org.junit.Test;
import java.net.InetSocketAddress;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
public class TestYarnConfiguration {
@Test
public void testDefaultRMWebUrl() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
String rmWebUrl = WebAppUtils.getRMWebAppURLWithScheme(conf);
// shouldn't have a "/" on the end of the url as all the other uri routinnes
// specifically add slashes and Jetty doesn't handle double slashes.
Assert.assertNotSame("RM Web Url is not correct", "http://0.0.0.0:8088",
rmWebUrl);
// test it in HA scenario
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf.set(YarnConfiguration.RM_HA_IDS, "rm1, rm2");
conf.set("yarn.resourcemanager.webapp.address.rm1", "10.10.10.10:18088");
conf.set("yarn.resourcemanager.webapp.address.rm2", "20.20.20.20:28088");
String rmWebUrlinHA = WebAppUtils.getRMWebAppURLWithScheme(conf);
Assert.assertEquals("http://10.10.10.10:18088", rmWebUrlinHA);
YarnConfiguration conf2 = new YarnConfiguration();
conf2.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf2.set(YarnConfiguration.RM_HA_IDS, "rm1, rm2");
conf2.set("yarn.resourcemanager.hostname.rm1", "30.30.30.30");
conf2.set("yarn.resourcemanager.hostname.rm2", "40.40.40.40");
String rmWebUrlinHA2 = WebAppUtils.getRMWebAppURLWithScheme(conf2);
Assert.assertEquals("http://30.30.30.30:8088", rmWebUrlinHA2);
rmWebUrlinHA2 = WebAppUtils.getRMWebAppURLWithScheme(conf2, 0);
Assert.assertEquals("http://30.30.30.30:8088", rmWebUrlinHA2);
rmWebUrlinHA2 = WebAppUtils.getRMWebAppURLWithScheme(conf2, 1);
Assert.assertEquals("http://40.40.40.40:8088", rmWebUrlinHA2);
}
@Test
public void testRMWebUrlSpecified() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
// seems a bit odd but right now we are forcing webapp for RM to be
// RM_ADDRESS
// for host and use the port from the RM_WEBAPP_ADDRESS
conf.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "fortesting:24543");
conf.set(YarnConfiguration.RM_ADDRESS, "rmtesting:9999");
String rmWebUrl = WebAppUtils.getRMWebAppURLWithScheme(conf);
String[] parts = rmWebUrl.split(":");
Assert.assertEquals("RM Web URL Port is incrrect", 24543,
Integer.parseInt(parts[parts.length - 1]));
Assert.assertNotSame(
"RM Web Url not resolved correctly. Should not be rmtesting",
"http://rmtesting:24543", rmWebUrl);
}
@Test
public void testGetSocketAddressForNMWithHA() {
YarnConfiguration conf = new YarnConfiguration();
// Set NM address
conf.set(YarnConfiguration.NM_ADDRESS, "0.0.0.0:1234");
// Set HA
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf.set(YarnConfiguration.RM_HA_ID, "rm1");
assertTrue(HAUtil.isHAEnabled(conf));
InetSocketAddress addr = conf.getSocketAddr(YarnConfiguration.NM_ADDRESS,
YarnConfiguration.DEFAULT_NM_ADDRESS,
YarnConfiguration.DEFAULT_NM_PORT);
assertEquals(1234, addr.getPort());
}
@Test
public void testGetSocketAddr() throws Exception {
YarnConfiguration conf;
InetSocketAddress resourceTrackerAddress;
//all default
conf = new YarnConfiguration();
resourceTrackerAddress = conf.getSocketAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
assertEquals(
new InetSocketAddress(
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS.split(":")[0],
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT),
resourceTrackerAddress);
//with address
conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, "10.0.0.1");
resourceTrackerAddress = conf.getSocketAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
assertEquals(
new InetSocketAddress(
"10.0.0.1",
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT),
resourceTrackerAddress);
//address and socket
conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, "10.0.0.2:5001");
resourceTrackerAddress = conf.getSocketAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
assertEquals(
new InetSocketAddress(
"10.0.0.2",
5001),
resourceTrackerAddress);
//bind host only
conf = new YarnConfiguration();
conf.set(YarnConfiguration.RM_BIND_HOST, "10.0.0.3");
resourceTrackerAddress = conf.getSocketAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
assertEquals(
new InetSocketAddress(
"10.0.0.3",
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT),
resourceTrackerAddress);
//bind host and address no port
conf.set(YarnConfiguration.RM_BIND_HOST, "0.0.0.0");
conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, "10.0.0.2");
resourceTrackerAddress = conf.getSocketAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
assertEquals(
new InetSocketAddress(
"0.0.0.0",
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT),
resourceTrackerAddress);
//bind host and address with port
conf.set(YarnConfiguration.RM_BIND_HOST, "0.0.0.0");
conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, "10.0.0.2:5003");
resourceTrackerAddress = conf.getSocketAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
assertEquals(
new InetSocketAddress(
"0.0.0.0",
5003),
resourceTrackerAddress);
}
@Test
public void testUpdateConnectAddr() throws Exception {
YarnConfiguration conf;
InetSocketAddress resourceTrackerConnectAddress;
InetSocketAddress serverAddress;
//no override, old behavior. Won't work on a host named "yo.yo.yo"
conf = new YarnConfiguration();
conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, "yo.yo.yo");
serverAddress = new InetSocketAddress(
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS.split(":")[0],
Integer.parseInt(YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS.split(":")[1]));
resourceTrackerConnectAddress = conf.updateConnectAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
serverAddress);
assertFalse(resourceTrackerConnectAddress.toString().startsWith("yo.yo.yo"));
//cause override with address
conf = new YarnConfiguration();
conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, "yo.yo.yo");
conf.set(YarnConfiguration.RM_BIND_HOST, "0.0.0.0");
serverAddress = new InetSocketAddress(
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS.split(":")[0],
Integer.parseInt(YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS.split(":")[1]));
resourceTrackerConnectAddress = conf.updateConnectAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
serverAddress);
assertTrue(resourceTrackerConnectAddress.toString().startsWith("yo.yo.yo"));
//tests updateConnectAddr won't add suffix to NM service address configurations
conf = new YarnConfiguration();
conf.set(YarnConfiguration.NM_LOCALIZER_ADDRESS, "yo.yo.yo");
conf.set(YarnConfiguration.NM_BIND_HOST, "0.0.0.0");
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf.set(YarnConfiguration.RM_HA_ID, "rm1");
serverAddress = new InetSocketAddress(
YarnConfiguration.DEFAULT_NM_LOCALIZER_ADDRESS.split(":")[0],
Integer.parseInt(YarnConfiguration.DEFAULT_NM_LOCALIZER_ADDRESS.split(":")[1]));
InetSocketAddress localizerAddress = conf.updateConnectAddr(
YarnConfiguration.NM_BIND_HOST,
YarnConfiguration.NM_LOCALIZER_ADDRESS,
YarnConfiguration.DEFAULT_NM_LOCALIZER_ADDRESS,
serverAddress);
assertTrue(localizerAddress.toString().startsWith("yo.yo.yo"));
assertNull(conf.get(
HAUtil.addSuffix(YarnConfiguration.NM_LOCALIZER_ADDRESS, "rm1")));
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.metadata;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.collect.Maps;
import org.apache.druid.indexer.TaskInfo;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.joda.time.DateTime;
import org.skife.jdbi.v2.FoldController;
import org.skife.jdbi.v2.Folder3;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import org.skife.jdbi.v2.StatementContext;
import org.skife.jdbi.v2.exceptions.CallbackFailedException;
import org.skife.jdbi.v2.exceptions.StatementException;
import org.skife.jdbi.v2.tweak.HandleCallback;
import org.skife.jdbi.v2.tweak.ResultSetMapper;
import org.skife.jdbi.v2.util.ByteArrayMapper;
import javax.annotation.Nullable;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
public abstract class SQLMetadataStorageActionHandler<EntryType, StatusType, LogType, LockType>
implements MetadataStorageActionHandler<EntryType, StatusType, LogType, LockType>
{
private static final EmittingLogger log = new EmittingLogger(SQLMetadataStorageActionHandler.class);
private final SQLMetadataConnector connector;
private final ObjectMapper jsonMapper;
private final TypeReference entryType;
private final TypeReference statusType;
private final TypeReference logType;
private final TypeReference lockType;
private final String entryTypeName;
private final String entryTable;
private final String logTable;
private final String lockTable;
private final TaskInfoMapper<EntryType, StatusType> taskInfoMapper;
public SQLMetadataStorageActionHandler(
final SQLMetadataConnector connector,
final ObjectMapper jsonMapper,
final MetadataStorageActionHandlerTypes<EntryType, StatusType, LogType, LockType> types,
final String entryTypeName,
final String entryTable,
final String logTable,
final String lockTable
)
{
this.connector = connector;
this.jsonMapper = jsonMapper;
this.entryType = types.getEntryType();
this.statusType = types.getStatusType();
this.logType = types.getLogType();
this.lockType = types.getLockType();
this.entryTypeName = entryTypeName;
this.entryTable = entryTable;
this.logTable = logTable;
this.lockTable = lockTable;
this.taskInfoMapper = new TaskInfoMapper<>(jsonMapper, entryType, statusType);
}
protected SQLMetadataConnector getConnector()
{
return connector;
}
protected ObjectMapper getJsonMapper()
{
return jsonMapper;
}
protected TypeReference getStatusType()
{
return statusType;
}
protected String getEntryTable()
{
return entryTable;
}
public TypeReference getEntryType()
{
return entryType;
}
@Override
public void insert(
final String id,
final DateTime timestamp,
final String dataSource,
final EntryType entry,
final boolean active,
final StatusType status
) throws EntryExistsException
{
try {
getConnector().retryWithHandle(
(HandleCallback<Void>) handle -> {
final String sql = StringUtils.format(
"INSERT INTO %s (id, created_date, datasource, payload, active, status_payload) "
+ "VALUES (:id, :created_date, :datasource, :payload, :active, :status_payload)",
getEntryTable()
);
handle.createStatement(sql)
.bind("id", id)
.bind("created_date", timestamp.toString())
.bind("datasource", dataSource)
.bind("payload", jsonMapper.writeValueAsBytes(entry))
.bind("active", active)
.bind("status_payload", jsonMapper.writeValueAsBytes(status))
.execute();
return null;
},
e -> getConnector().isTransientException(e) && !(isStatementException(e) && getEntry(id).isPresent())
);
}
catch (Exception e) {
if (isStatementException(e) && getEntry(id).isPresent()) {
throw new EntryExistsException(id, e);
} else {
throw new RuntimeException(e);
}
}
}
@VisibleForTesting
protected static boolean isStatementException(Throwable e)
{
return e instanceof StatementException ||
(e instanceof CallbackFailedException && e.getCause() instanceof StatementException);
}
@Override
public boolean setStatus(final String entryId, final boolean active, final StatusType status)
{
return connector.retryWithHandle(
new HandleCallback<Boolean>()
{
@Override
public Boolean withHandle(Handle handle) throws Exception
{
return handle.createStatement(
StringUtils.format(
"UPDATE %s SET active = :active, status_payload = :status_payload WHERE id = :id AND active = TRUE",
entryTable
)
)
.bind("id", entryId)
.bind("active", active)
.bind("status_payload", jsonMapper.writeValueAsBytes(status))
.execute() == 1;
}
}
);
}
@Override
public Optional<EntryType> getEntry(final String entryId)
{
return connector.retryWithHandle(
new HandleCallback<Optional<EntryType>>()
{
@Override
public Optional<EntryType> withHandle(Handle handle) throws Exception
{
byte[] res = handle.createQuery(
StringUtils.format("SELECT payload FROM %s WHERE id = :id", entryTable)
)
.bind("id", entryId)
.map(ByteArrayMapper.FIRST)
.first();
return Optional.fromNullable(
res == null ? null : jsonMapper.readValue(res, entryType)
);
}
}
);
}
@Override
public Optional<StatusType> getStatus(final String entryId)
{
return connector.retryWithHandle(
new HandleCallback<Optional<StatusType>>()
{
@Override
public Optional<StatusType> withHandle(Handle handle) throws Exception
{
byte[] res = handle.createQuery(
StringUtils.format("SELECT status_payload FROM %s WHERE id = :id", entryTable)
)
.bind("id", entryId)
.map(ByteArrayMapper.FIRST)
.first();
return Optional.fromNullable(
res == null ? null : jsonMapper.readValue(res, statusType)
);
}
}
);
}
@Override
@Nullable
public TaskInfo<EntryType, StatusType> getTaskInfo(String entryId)
{
return connector.retryWithHandle(handle -> {
final String query = StringUtils.format(
"SELECT id, status_payload, payload, datasource, created_date FROM %s WHERE id = :id",
entryTable
);
return handle.createQuery(query)
.bind("id", entryId)
.map(taskInfoMapper)
.first();
});
}
@Override
public List<TaskInfo<EntryType, StatusType>> getCompletedTaskInfo(
DateTime timestamp,
@Nullable Integer maxNumStatuses,
@Nullable String dataSource
)
{
return getConnector().retryWithHandle(
handle -> {
final Query<Map<String, Object>> query = createCompletedTaskInfoQuery(
handle,
timestamp,
maxNumStatuses,
dataSource
);
return query.map(taskInfoMapper).list();
}
);
}
@Override
public List<TaskInfo<EntryType, StatusType>> getActiveTaskInfo(@Nullable String dataSource)
{
return getConnector().retryWithHandle(
handle -> {
final Query<Map<String, Object>> query = createActiveTaskInfoQuery(
handle,
dataSource
);
return query.map(taskInfoMapper).list();
}
);
}
private Query<Map<String, Object>> createActiveTaskInfoQuery(Handle handle, @Nullable String dataSource)
{
String sql = StringUtils.format(
"SELECT "
+ " id, "
+ " status_payload, "
+ " payload, "
+ " datasource, "
+ " created_date "
+ "FROM "
+ " %s "
+ "WHERE "
+ getWhereClauseForActiveStatusesQuery(dataSource)
+ "ORDER BY created_date",
entryTable
);
Query<Map<String, Object>> query = handle.createQuery(sql);
if (dataSource != null) {
query = query.bind("ds", dataSource);
}
return query;
}
private String getWhereClauseForActiveStatusesQuery(String dataSource)
{
String sql = StringUtils.format("active = TRUE ");
if (dataSource != null) {
sql += " AND datasource = :ds ";
}
return sql;
}
static class TaskInfoMapper<EntryType, StatusType> implements ResultSetMapper<TaskInfo<EntryType, StatusType>>
{
private final ObjectMapper objectMapper;
private final TypeReference<EntryType> entryType;
private final TypeReference<StatusType> statusType;
TaskInfoMapper(ObjectMapper objectMapper, TypeReference<EntryType> entryType, TypeReference<StatusType> statusType)
{
this.objectMapper = objectMapper;
this.entryType = entryType;
this.statusType = statusType;
}
@Override
public TaskInfo<EntryType, StatusType> map(int index, ResultSet resultSet, StatementContext context)
throws SQLException
{
final TaskInfo<EntryType, StatusType> taskInfo;
EntryType task;
StatusType status;
try {
task = objectMapper.readValue(resultSet.getBytes("payload"), entryType);
}
catch (IOException e) {
log.error(e, "Encountered exception while deserializing task payload, setting task to null");
task = null;
}
try {
status = objectMapper.readValue(resultSet.getBytes("status_payload"), statusType);
}
catch (IOException e) {
log.error(e, "Encountered exception while deserializing task status_payload");
throw new SQLException(e);
}
taskInfo = new TaskInfo<>(
resultSet.getString("id"),
DateTimes.of(resultSet.getString("created_date")),
status,
resultSet.getString("datasource"),
task
);
return taskInfo;
}
}
protected abstract Query<Map<String, Object>> createCompletedTaskInfoQuery(
Handle handle,
DateTime timestamp,
@Nullable Integer maxNumStatuses,
@Nullable String dataSource
);
@Override
public boolean addLock(final String entryId, final LockType lock)
{
return connector.retryWithHandle(
new HandleCallback<Boolean>()
{
@Override
public Boolean withHandle(Handle handle) throws Exception
{
return addLock(handle, entryId, lock);
}
}
);
}
private boolean addLock(Handle handle, String entryId, LockType lock) throws JsonProcessingException
{
final String statement = StringUtils.format(
"INSERT INTO %1$s (%2$s_id, lock_payload) VALUES (:entryId, :payload)",
lockTable, entryTypeName
);
return handle.createStatement(statement)
.bind("entryId", entryId)
.bind("payload", jsonMapper.writeValueAsBytes(lock))
.execute() == 1;
}
@Override
public boolean replaceLock(final String entryId, final long oldLockId, final LockType newLock)
{
return connector.retryTransaction(
(handle, transactionStatus) -> {
int numDeletedRows = removeLock(handle, oldLockId);
if (numDeletedRows != 1) {
transactionStatus.setRollbackOnly();
final String message = numDeletedRows == 0 ?
StringUtils.format("Cannot find lock[%d]", oldLockId) :
StringUtils.format("Found multiple locks for lockId[%d]", oldLockId);
throw new RuntimeException(message);
}
return addLock(handle, entryId, newLock);
},
3,
SQLMetadataConnector.DEFAULT_MAX_TRIES
);
}
@Override
public void removeLock(final long lockId)
{
connector.retryWithHandle(
new HandleCallback<Void>()
{
@Override
public Void withHandle(Handle handle)
{
removeLock(handle, lockId);
return null;
}
}
);
}
private int removeLock(Handle handle, long lockId)
{
return handle.createStatement(StringUtils.format("DELETE FROM %s WHERE id = :id", lockTable))
.bind("id", lockId)
.execute();
}
@Override
public boolean addLog(final String entryId, final LogType log)
{
return connector.retryWithHandle(
new HandleCallback<Boolean>()
{
@Override
public Boolean withHandle(Handle handle) throws Exception
{
return handle.createStatement(
StringUtils.format(
"INSERT INTO %1$s (%2$s_id, log_payload) VALUES (:entryId, :payload)",
logTable, entryTypeName
)
)
.bind("entryId", entryId)
.bind("payload", jsonMapper.writeValueAsBytes(log))
.execute() == 1;
}
}
);
}
@Override
public List<LogType> getLogs(final String entryId)
{
return connector.retryWithHandle(
new HandleCallback<List<LogType>>()
{
@Override
public List<LogType> withHandle(Handle handle)
{
return handle
.createQuery(
StringUtils.format(
"SELECT log_payload FROM %1$s WHERE %2$s_id = :entryId",
logTable, entryTypeName
)
)
.bind("entryId", entryId)
.map(ByteArrayMapper.FIRST)
.fold(
new ArrayList<>(),
(List<LogType> list, byte[] bytes, FoldController control, StatementContext ctx) -> {
try {
list.add(jsonMapper.readValue(bytes, logType));
return list;
}
catch (IOException e) {
log.makeAlert(e, "Failed to deserialize log")
.addData("entryId", entryId)
.addData("payload", StringUtils.fromUtf8(bytes))
.emit();
throw new SQLException(e);
}
}
);
}
}
);
}
@Override
public Map<Long, LockType> getLocks(final String entryId)
{
return connector.retryWithHandle(
new HandleCallback<Map<Long, LockType>>()
{
@Override
public Map<Long, LockType> withHandle(Handle handle)
{
return handle.createQuery(
StringUtils.format(
"SELECT id, lock_payload FROM %1$s WHERE %2$s_id = :entryId",
lockTable, entryTypeName
)
)
.bind("entryId", entryId)
.map(
new ResultSetMapper<Pair<Long, LockType>>()
{
@Override
public Pair<Long, LockType> map(int index, ResultSet r, StatementContext ctx)
throws SQLException
{
try {
return Pair.of(
r.getLong("id"),
jsonMapper.readValue(
r.getBytes("lock_payload"),
lockType
)
);
}
catch (IOException e) {
log.makeAlert(e, "Failed to deserialize " + lockType.getType())
.addData("id", r.getLong("id"))
.addData(
"lockPayload", StringUtils.fromUtf8(r.getBytes("lock_payload"))
)
.emit();
throw new SQLException(e);
}
}
}
)
.fold(
Maps.newLinkedHashMap(),
new Folder3<Map<Long, LockType>, Pair<Long, LockType>>()
{
@Override
public Map<Long, LockType> fold(
Map<Long, LockType> accumulator,
Pair<Long, LockType> lock,
FoldController control,
StatementContext ctx
)
{
accumulator.put(lock.lhs, lock.rhs);
return accumulator;
}
}
);
}
}
);
}
@Override
@Nullable
public Long getLockId(String entryId, LockType lock)
{
return getLocks(entryId).entrySet().stream()
.filter(entry -> entry.getValue().equals(lock))
.map(Entry::getKey)
.findAny()
.orElse(null);
}
}
|
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.portfolio.ui.artefacts.collect;
import java.util.Date;
import org.olat.core.CoreSpringFactory;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.controller.BasicController;
import org.olat.core.gui.control.generic.closablewrapper.CloseableCalloutWindowController;
import org.olat.core.gui.control.generic.wizard.Step;
import org.olat.core.gui.control.generic.wizard.StepRunnerCallback;
import org.olat.core.gui.control.generic.wizard.StepsMainRunController;
import org.olat.core.util.vfs.VFSContainer;
import org.olat.modules.webFeed.portfolio.EPCreateLiveBlogArtefactStep00;
import org.olat.modules.webFeed.portfolio.LiveBlogArtefact;
import org.olat.portfolio.EPArtefactHandler;
import org.olat.portfolio.PortfolioModule;
import org.olat.portfolio.manager.EPFrontendManager;
import org.olat.portfolio.model.artefacts.AbstractArtefact;
import org.olat.portfolio.model.artefacts.EPTextArtefact;
import org.olat.portfolio.model.artefacts.FileArtefact;
import org.olat.portfolio.model.structel.PortfolioStructure;
/**
* Description:<br>
* overlay controller to hold some links for different kind of adding artefacts.
* - triggers further workflows to add artefact
*
* fires an Done-Event when an artefact was added
* <P>
* Initial Date: 26.07.2010 <br>
*
* @author Roman Haag, roman.haag@frentix.com, http://www.frentix.com
*/
public class EPAddArtefactController extends BasicController {
private Link uploadBtn;
private Link liveBlogBtn;
private EPFrontendManager ePFMgr;
private VelocityContainer addPage = null;
private Link textBtn;
private Link addBtn;
private StepsMainRunController collectStepsCtrl;
private PortfolioModule portfolioModule;
private VFSContainer vfsTemp;
private VelocityContainer addLinkVC;
private CloseableCalloutWindowController calloutCtr;
private PortfolioStructure preSelectedStruct;
public EPAddArtefactController(UserRequest ureq, WindowControl wControl) {
super(ureq, wControl);
ePFMgr = (EPFrontendManager) CoreSpringFactory.getBean("epFrontendManager");
portfolioModule = (PortfolioModule) CoreSpringFactory.getBean("portfolioModule");
addLinkVC = createVelocityContainer("addLink");
addBtn = LinkFactory.createButton("add.artefact", addLinkVC, this);
addBtn.setElementCssClass("o_sel_add_artfeact");
putInitialPanel(addLinkVC);
}
private void initAddPageVC(){
addPage = createVelocityContainer("addpanel");
EPArtefactHandler<?> textHandler = portfolioModule.getArtefactHandler(EPTextArtefact.TEXT_ARTEFACT_TYPE);
if (textHandler != null && textHandler.isEnabled()) {
textBtn = LinkFactory.createLink("add.text.artefact", addPage, this);
textBtn.setElementCssClass("o_sel_add_text_artfeact");
}
EPArtefactHandler<?> fileHandler = portfolioModule.getArtefactHandler(FileArtefact.FILE_ARTEFACT_TYPE);
if (fileHandler != null && fileHandler.isEnabled()) {
uploadBtn = LinkFactory.createLink("add.artefact.upload", addPage, this);
uploadBtn.setElementCssClass("o_sel_add_upload_artfeact");
}
EPArtefactHandler<?> liveblogHandler = portfolioModule.getArtefactHandler(LiveBlogArtefact.TYPE);
if (liveblogHandler != null && liveblogHandler.isEnabled()) {
liveBlogBtn = LinkFactory.createLink("add.artefact.liveblog", addPage, this);
liveBlogBtn.setCustomDisplayText(translate("add.artefact.blog"));
liveBlogBtn.setElementCssClass("o_sel_add_liveblog_artfeact");
}
}
private void initAddLinkPopup(UserRequest ureq) {
if (addPage == null) initAddPageVC();
String title = translate("add.artefact");
removeAsListenerAndDispose(calloutCtr);
calloutCtr = new CloseableCalloutWindowController(ureq, getWindowControl(), addPage, addBtn, title, true, null);
listenTo(calloutCtr);
calloutCtr.activate();
}
public PortfolioStructure getPreSelectedStruct() {
return preSelectedStruct;
}
public void setPreSelectedStruct(PortfolioStructure preSelectedStruct) {
this.preSelectedStruct = preSelectedStruct;
}
private void closeAddLinkPopup(){
if (calloutCtr != null) {
calloutCtr.deactivate();
removeAsListenerAndDispose(calloutCtr);
calloutCtr = null;
}
}
/**
* @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest,
* org.olat.core.gui.components.Component,
* org.olat.core.gui.control.Event)
*/
@Override
protected void event(UserRequest ureq, Component source, Event event) {
if (source == addBtn) {
if (calloutCtr==null){
initAddLinkPopup(ureq);
addBtn.setDirty(false);
} else {
closeAddLinkPopup();
}
} else {
// close on all clicked links in the popup
closeAddLinkPopup();
if (source == textBtn) {
prepareNewTextArtefactWizzard(ureq);
} else if (source == uploadBtn) {
prepareFileArtefactWizzard(ureq);
} else if (source == liveBlogBtn) {
prepareNewLiveBlogArtefactWizzard(ureq);
}
}
}
/**
* @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest,
* org.olat.core.gui.control.Controller, org.olat.core.gui.control.Event)
*/
@Override
protected void event(UserRequest ureq, Controller source, Event event) {
if (source == collectStepsCtrl && event == Event.CANCELLED_EVENT) {
disposeTempDir();
getWindowControl().pop();
removeAsListenerAndDispose(collectStepsCtrl);
}
if (source == collectStepsCtrl && event == Event.CHANGED_EVENT) {
getWindowControl().pop();
removeAsListenerAndDispose(collectStepsCtrl);
// manually dispose temp vfsContainer here :: FXOLAT-386
// this EPAddArtefactController gets disposed "too late"
//(vfsTemp can change inbetween, so only the last one get's deleted)
disposeTempDir();
showInfo("collect.success.text.artefact");
fireEvent(ureq, Event.DONE_EVENT);
}
if (source == calloutCtr && event == CloseableCalloutWindowController.CLOSE_WINDOW_EVENT) {
removeAsListenerAndDispose(calloutCtr);
calloutCtr = null;
}
}
/**
* prepare a new text artefact and open with wizzard initialized with a
* special first step for text-artefacts
*
* @param ureq
*/
private void prepareNewTextArtefactWizzard(UserRequest ureq) {
EPArtefactHandler<?> artHandler = portfolioModule.getArtefactHandler(EPTextArtefact.TEXT_ARTEFACT_TYPE);
AbstractArtefact artefact1 = artHandler.createArtefact();
artefact1.setAuthor(getIdentity());
artefact1.setSource(translate("text.artefact.source.info"));
artefact1.setCollectionDate(new Date());
artefact1.setSignature(-20);
vfsTemp = ePFMgr.getArtefactsTempContainer(getIdentity());
Step start = new EPCreateTextArtefactStep00(ureq, artefact1, preSelectedStruct, vfsTemp);
StepRunnerCallback finish = new EPArtefactWizzardStepCallback(vfsTemp);
collectStepsCtrl = new StepsMainRunController(ureq, getWindowControl(), start, finish, null,
translate("create.text.artefact.wizzard.title"), "o_sel_artefact_add_wizard o_sel_artefact_add_text_wizard");
listenTo(collectStepsCtrl);
getWindowControl().pushAsModalDialog(collectStepsCtrl.getInitialComponent());
}
/**
* prepare a file artefact and open with wizzard initialized with a special
* first step for file-artefacts
*
* @param ureq
*/
private void prepareFileArtefactWizzard(UserRequest ureq) {
EPArtefactHandler<?> artHandler = portfolioModule.getArtefactHandler(FileArtefact.FILE_ARTEFACT_TYPE);
AbstractArtefact artefact1 = artHandler.createArtefact();
artefact1.setAuthor(getIdentity());
artefact1.setSource(translate("file.artefact.source.info"));
artefact1.setCollectionDate(new Date());
artefact1.setSignature(-30);
vfsTemp = ePFMgr.getArtefactsTempContainer(getIdentity());
Step start = new EPCreateFileArtefactStep00(ureq, artefact1, preSelectedStruct, vfsTemp);
StepRunnerCallback finish = new EPArtefactWizzardStepCallback(vfsTemp);
collectStepsCtrl = new StepsMainRunController(ureq, getWindowControl(), start, finish, null,
translate("create.file.artefact.wizzard.title"), "o_sel_artefact_add_wizard o_sel_artefact_add_file_wizard");
listenTo(collectStepsCtrl);
getWindowControl().pushAsModalDialog(collectStepsCtrl.getInitialComponent());
}
private void prepareNewLiveBlogArtefactWizzard(UserRequest ureq) {
EPArtefactHandler<?> artHandler = portfolioModule.getArtefactHandler(LiveBlogArtefact.TYPE);
AbstractArtefact artefact1 = artHandler.createArtefact();
artefact1.setAuthor(getIdentity());
artefact1.setCollectionDate(new Date());
artefact1.setSignature(60); // preset as signed by 60%
Step start = new EPCreateLiveBlogArtefactStep00(ureq, preSelectedStruct, artefact1);
StepRunnerCallback finish = new EPArtefactWizzardStepCallback(); // no vfsTemp!, blog doesn't need a directory
collectStepsCtrl = new StepsMainRunController(ureq, getWindowControl(), start, finish, null,
translate("create.blog.artefact.wizzard.title"), "o_sel_artefact_add_wizard o_sel_artefact_add_blog_wizard");
listenTo(collectStepsCtrl);
getWindowControl().pushAsModalDialog(collectStepsCtrl.getInitialComponent());
}
/**
* FXOLAT-386
* disposed the temp vfsContainer from a file Artefact upload
*/
private void disposeTempDir(){
if(vfsTemp != null ) {
vfsTemp.delete();
vfsTemp = null;
}
}
/**
* @see org.olat.core.gui.control.DefaultController#doDispose()
*/
@Override
protected void doDispose() {
disposeTempDir();
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xdebugger.impl.evaluate;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.components.BorderLayoutPanel;
import com.intellij.xdebugger.*;
import com.intellij.xdebugger.evaluation.EvaluationMode;
import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
import com.intellij.xdebugger.evaluation.XDebuggerEvaluator;
import com.intellij.xdebugger.impl.XDebugSessionImpl;
import com.intellij.xdebugger.impl.XDebuggerUtilImpl;
import com.intellij.xdebugger.impl.actions.XDebuggerActions;
import com.intellij.xdebugger.impl.breakpoints.XExpressionImpl;
import com.intellij.xdebugger.impl.settings.XDebuggerSettingManagerImpl;
import com.intellij.xdebugger.impl.ui.XDebugSessionTab;
import com.intellij.xdebugger.impl.ui.XDebuggerEditorBase;
import com.intellij.xdebugger.impl.ui.tree.XDebuggerTree;
import com.intellij.xdebugger.impl.ui.tree.XDebuggerTreePanel;
import com.intellij.xdebugger.impl.ui.tree.nodes.EvaluatingExpressionRootNode;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.tree.TreeNode;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
/**
* @author nik
*/
public class XDebuggerEvaluationDialog extends DialogWrapper {
public static final DataKey<XDebuggerEvaluationDialog> KEY = DataKey.create("DEBUGGER_EVALUATION_DIALOG");
private final JPanel myMainPanel;
private final JPanel myResultPanel;
private final XDebuggerTreePanel myTreePanel;
private EvaluationInputComponent myInputComponent;
private final XDebugSession mySession;
private final XDebuggerEditorsProvider myEditorsProvider;
private EvaluationMode myMode;
private XSourcePosition mySourcePosition;
private final SwitchModeAction mySwitchModeAction;
private final boolean myIsCodeFragmentEvaluationSupported;
public XDebuggerEvaluationDialog(@NotNull XDebugSession session,
@NotNull XDebuggerEditorsProvider editorsProvider,
@NotNull XDebuggerEvaluator evaluator,
@NotNull XExpression text,
@Nullable XSourcePosition sourcePosition) {
super(WindowManager.getInstance().getFrame(session.getProject()), true);
mySession = session;
myEditorsProvider = editorsProvider;
mySourcePosition = sourcePosition;
setModal(false);
setOKButtonText(XDebuggerBundle.message("xdebugger.button.evaluate"));
setCancelButtonText(XDebuggerBundle.message("xdebugger.evaluate.dialog.close"));
mySession.addSessionListener(new XDebugSessionAdapter() {
@Override
public void sessionStopped() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
close(CANCEL_EXIT_CODE);
}
});
}
@Override
public void stackFrameChanged() {
updateSourcePosition();
}
@Override
public void sessionPaused() {
updateSourcePosition();
}
}, myDisposable);
myTreePanel = new XDebuggerTreePanel(session.getProject(), editorsProvider, myDisposable, sourcePosition, XDebuggerActions.EVALUATE_DIALOG_TREE_POPUP_GROUP,
((XDebugSessionImpl)session).getValueMarkers());
myResultPanel = JBUI.Panels.simplePanel()
.addToTop(new JLabel(XDebuggerBundle.message("xdebugger.evaluate.label.result")))
.addToCenter(myTreePanel.getMainPanel());
myMainPanel = new EvaluationMainPanel();
mySwitchModeAction = new SwitchModeAction();
new AnAction(){
@Override
public void actionPerformed(AnActionEvent e) {
doOKAction();
addToWatches();
}
}.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, InputEvent.CTRL_DOWN_MASK | InputEvent.SHIFT_DOWN_MASK)), getRootPane(), myDisposable);
new AnAction() {
@Override
public void actionPerformed(AnActionEvent e) {
IdeFocusManager.getInstance(mySession.getProject()).requestFocus(myTreePanel.getTree(), true);
}
}.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_R, InputEvent.ALT_DOWN_MASK)), getRootPane(),
myDisposable);
Condition<TreeNode> rootFilter = new Condition<TreeNode>() {
@Override
public boolean value(TreeNode node) {
return node.getParent() instanceof EvaluatingExpressionRootNode;
}
};
myTreePanel.getTree().expandNodesOnLoad(rootFilter);
myTreePanel.getTree().selectNodeOnLoad(rootFilter);
EvaluationMode mode = XDebuggerSettingManagerImpl.getInstanceImpl().getGeneralSettings().getEvaluationDialogMode();
myIsCodeFragmentEvaluationSupported = evaluator.isCodeFragmentEvaluationSupported();
if (mode == EvaluationMode.CODE_FRAGMENT && !myIsCodeFragmentEvaluationSupported) {
mode = EvaluationMode.EXPRESSION;
}
if (mode == EvaluationMode.EXPRESSION && text.getMode() == EvaluationMode.CODE_FRAGMENT && myIsCodeFragmentEvaluationSupported) {
mode = EvaluationMode.CODE_FRAGMENT;
}
switchToMode(mode, text);
init();
}
@Override
protected void dispose() {
super.dispose();
myMainPanel.removeAll();
}
private void updateSourcePosition() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
mySourcePosition = mySession.getCurrentPosition();
getInputEditor().setSourcePosition(mySourcePosition);
}
});
}
@Override
protected void doOKAction() {
evaluate();
}
@Override
protected void createDefaultActions() {
super.createDefaultActions();
myOKAction = new OkAction(){
@Override
public void actionPerformed(ActionEvent e) {
super.actionPerformed(e);
if ((e.getModifiers() & (InputEvent.SHIFT_MASK | InputEvent.CTRL_MASK)) == (InputEvent.SHIFT_MASK | InputEvent.CTRL_MASK)) {
addToWatches();
}
}
};
}
private void addToWatches() {
if (myMode == EvaluationMode.EXPRESSION) {
XExpression expression = getInputEditor().getExpression();
if (!XDebuggerUtilImpl.isEmptyExpression(expression)) {
XDebugSessionTab tab = ((XDebugSessionImpl)mySession).getSessionTab();
if (tab != null) {
tab.getWatchesView().addWatchExpression(expression, -1, true);
getInputEditor().requestFocusInEditor();
}
}
}
}
@NotNull
@Override
protected Action[] createActions() {
if (myIsCodeFragmentEvaluationSupported) {
return new Action[]{getOKAction(), mySwitchModeAction, getCancelAction()};
}
return super.createActions();
}
@Override
protected String getHelpId() {
return "debugging.debugMenu.evaluate";
}
@Override
protected JButton createJButtonForAction(Action action) {
final JButton button = super.createJButtonForAction(action);
if (action == mySwitchModeAction) {
int width1 = new JButton(getSwitchButtonText(EvaluationMode.EXPRESSION)).getPreferredSize().width;
int width2 = new JButton(getSwitchButtonText(EvaluationMode.CODE_FRAGMENT)).getPreferredSize().width;
final Dimension size = new Dimension(Math.max(width1, width2), button.getPreferredSize().height);
button.setMinimumSize(size);
button.setPreferredSize(size);
}
return button;
}
public XExpression getExpression() {
return getInputEditor().getExpression();
}
private static String getSwitchButtonText(EvaluationMode mode) {
return mode != EvaluationMode.EXPRESSION
? XDebuggerBundle.message("button.text.expression.mode")
: XDebuggerBundle.message("button.text.code.fragment.mode");
}
private void switchToMode(EvaluationMode mode, XExpression text) {
if (myMode == mode) return;
myMode = mode;
if (mode == EvaluationMode.EXPRESSION) {
text = new XExpressionImpl(StringUtil.convertLineSeparators(text.getExpression(), " "), text.getLanguage(), text.getCustomInfo());
}
myInputComponent = createInputComponent(mode, text);
myMainPanel.removeAll();
myInputComponent.addComponent(myMainPanel, myResultPanel);
setTitle(myInputComponent.getTitle());
mySwitchModeAction.putValue(Action.NAME, getSwitchButtonText(mode));
getInputEditor().requestFocusInEditor();
}
private XDebuggerEditorBase getInputEditor() {
return myInputComponent.getInputEditor();
}
private EvaluationInputComponent createInputComponent(EvaluationMode mode, XExpression text) {
final Project project = mySession.getProject();
text = XExpressionImpl.changeMode(text, mode);
if (mode == EvaluationMode.EXPRESSION) {
return new ExpressionInputComponent(project, myEditorsProvider, mySourcePosition, text, myDisposable);
}
else {
return new CodeFragmentInputComponent(project, myEditorsProvider, mySourcePosition, text,
getDimensionServiceKey() + ".splitter", myDisposable);
}
}
private void evaluate() {
final XDebuggerEditorBase inputEditor = getInputEditor();
int offset = -1;
//try to save caret position
Editor editor = inputEditor.getEditor();
if (editor != null) {
offset = editor.getCaretModel().getOffset();
}
final XDebuggerTree tree = myTreePanel.getTree();
tree.markNodesObsolete();
tree.setRoot(new EvaluatingExpressionRootNode(this, tree), false);
myResultPanel.invalidate();
//editor is already changed
editor = inputEditor.getEditor();
//selectAll puts focus back
inputEditor.selectAll();
//try to restore caret position and clear selection
if (offset >= 0 && editor != null) {
offset = Math.min(editor.getDocument().getTextLength(), offset);
editor.getCaretModel().moveToOffset(offset);
editor.getSelectionModel().setSelection(offset, offset);
}
}
@Override
protected String getDimensionServiceKey() {
return "#xdebugger.evaluate";
}
@Override
protected JComponent createCenterPanel() {
return myMainPanel;
}
public void startEvaluation(@NotNull XDebuggerEvaluator.XEvaluationCallback evaluationCallback) {
final XDebuggerEditorBase inputEditor = getInputEditor();
inputEditor.saveTextInHistory();
XExpression expression = inputEditor.getExpression();
XDebuggerEvaluator evaluator = mySession.getDebugProcess().getEvaluator();
if (evaluator == null) {
evaluationCallback.errorOccurred(XDebuggerBundle.message("xdebugger.evaluate.stack.frame.has.not.evaluator"));
}
else {
evaluator.evaluate(expression, evaluationCallback, null);
}
}
public void evaluationDone() {
mySession.rebuildViews();
}
@Override
public JComponent getPreferredFocusedComponent() {
return getInputEditor().getPreferredFocusedComponent();
}
private class SwitchModeAction extends AbstractAction {
@Override
public void actionPerformed(ActionEvent e) {
XExpression text = getInputEditor().getExpression();
EvaluationMode newMode = (myMode == EvaluationMode.EXPRESSION) ? EvaluationMode.CODE_FRAGMENT : EvaluationMode.EXPRESSION;
// remember only on user selection
XDebuggerSettingManagerImpl.getInstanceImpl().getGeneralSettings().setEvaluationDialogMode(newMode);
switchToMode(newMode, text);
}
}
private class EvaluationMainPanel extends BorderLayoutPanel implements DataProvider {
@Nullable
@Override
public Object getData(@NonNls String dataId) {
if (KEY.is(dataId)) {
return XDebuggerEvaluationDialog.this;
}
return null;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.comm;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction;
import org.apache.giraph.ImmutableClassesGiraphConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RPC.Server;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.security.token.Token;
import org.apache.log4j.Logger;
import org.apache.giraph.bsp.CentralizedServiceWorker;
import org.apache.giraph.graph.GraphState;
import org.apache.giraph.hadoop.BspPolicyProvider;
/*if[HADOOP_NON_INTERVERSIONED_RPC]
else[HADOOP_NON_INTERVERSIONED_RPC]*/
import org.apache.hadoop.ipc.ProtocolSignature;
/*end[HADOOP_NON_INTERVERSIONED_RPC]*/
/**
* Used to implement abstract {@link BasicRPCCommunications} methods.
*
* @param <I> Vertex id
* @param <V> Vertex data
* @param <E> Edge data
* @param <M> Message data
*/
public class SecureRPCCommunications<I extends WritableComparable,
V extends Writable, E extends Writable, M extends Writable>
extends BasicRPCCommunications<I, V, E, M, Token<JobTokenIdentifier>> {
/** Class logger */
public static final Logger LOG =
Logger.getLogger(SecureRPCCommunications.class);
/**
* Constructor.
*
* @param context Context to be saved.
* @param service Server worker.
* @param configuration Configuration.
* @param graphState Graph state from infrastructure.
* @throws IOException
* @throws InterruptedException
*/
public SecureRPCCommunications(Mapper<?, ?, ?, ?>.Context context,
CentralizedServiceWorker<I, V, E, M> service,
ImmutableClassesGiraphConfiguration configuration,
GraphState<I, V, E, M> graphState) throws
IOException, InterruptedException {
super(context, configuration, service);
}
/**
* Create the job token.
*
* @return Job token.
*/
protected Token<JobTokenIdentifier> createJobToken() throws IOException {
String localJobTokenFile = System.getenv().get(
UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
if (localJobTokenFile != null) {
JobConf jobConf = new JobConf(conf);
Credentials credentials =
TokenCache.loadTokens(localJobTokenFile, jobConf);
return TokenCache.getJobToken(credentials);
}
return null;
}
/*if[HADOOP_NON_INTERVERSIONED_RPC]
else[HADOOP_NON_INTERVERSIONED_RPC]*/
/**
* Get the Protocol Signature for the given protocol,
* client version and method.
*
* @param protocol Protocol.
* @param clientVersion Version of Client.
* @param clientMethodsHash Hash of Client methods.
* @return ProtocolSignature for input parameters.
*/
public ProtocolSignature getProtocolSignature(
String protocol,
long clientVersion,
int clientMethodsHash) throws IOException {
return new ProtocolSignature(VERSION_ID, null);
}
/*end[HADOOP_NON_INTERVERSIONED_RPC]*/
/**
* Get the RPC server.
*
* @param myAddress My address.
* @param numHandlers Number of handler threads.
* @param jobId Job id.
* @param jt Jobtoken indentifier.
* @return RPC server.
*/
@Override
protected RPC.Server getRPCServer(
InetSocketAddress myAddress, int numHandlers, String jobId,
Token<JobTokenIdentifier> jt) throws IOException {
@SuppressWarnings("deprecation")
JobTokenSecretManager jobTokenSecretManager =
new JobTokenSecretManager();
if (jt != null) { //could be null in the case of some unit tests:
// TODO: unit tests should use SecureRPCCommunications or
// RPCCommunications
// TODO: remove jt from RPCCommunications.
jobTokenSecretManager.addTokenForJob(jobId, jt);
if (LOG.isInfoEnabled()) {
LOG.info("getRPCServer: Added jobToken " + jt);
}
}
/*if[HADOOP_1_SECURITY]
// Hadoop 1-style authorization.
Server server = RPC.getServer(this,
myAddress.getHostName(), myAddress.getPort(),
numHandlers, false, conf, jobTokenSecretManager);
String hadoopSecurityAuthorization =
ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG;
if (conf.getBoolean(hadoopSecurityAuthorization, false)) {
ServiceAuthorizationManager.refresh(conf, new BspPolicyProvider());
}
else[HADOOP_1_SECURITY]*/
// Hadoop 2+-style authorization.
Server server = RPC.getServer(this,
myAddress.getHostName(), myAddress.getPort(),
numHandlers, false, conf);
String hadoopSecurityAuthorization =
ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG;
if (conf.getBoolean(hadoopSecurityAuthorization, false)) {
ServiceAuthorizationManager sam = new ServiceAuthorizationManager();
sam.refresh(conf, new BspPolicyProvider());
}
/*end[HADOOP_1_SECURITY]*/
return server;
}
/**
* Get the RPC proxy.
*
* @param addr Address of the RPC server.
* @param jobId Job id.
* @param jt Job token.
* @return Proxy of the RPC server.
*/
@Override
@SuppressWarnings("unchecked")
protected CommunicationsInterface<I, V, E, M> getRPCProxy(
final InetSocketAddress addr,
String jobId,
Token<JobTokenIdentifier> jt)
throws IOException, InterruptedException {
final Configuration config = new Configuration(conf);
if (jt == null) {
return (CommunicationsInterface<I, V, E, M>) RPC.getProxy(
CommunicationsInterface.class, VERSION_ID, addr, config);
}
jt.setService(new Text(addr.getAddress().getHostAddress() + ":" +
addr.getPort()));
UserGroupInformation current = UserGroupInformation.getCurrentUser();
current.addToken(jt);
UserGroupInformation owner =
UserGroupInformation.createRemoteUser(jobId);
owner.addToken(jt);
return
owner.doAs(new PrivilegedExceptionAction<
CommunicationsInterface<I, V, E, M>>() {
@Override
@SuppressWarnings("unchecked")
public CommunicationsInterface<I, V, E, M> run() throws Exception {
// All methods in CommunicationsInterface will be used for RPC
return (CommunicationsInterface<I, V, E, M>) RPC.getProxy(
CommunicationsInterface.class, VERSION_ID, addr, config);
}
});
}
}
|
|
package weka.classifiers.lazy.AM.label;
import com.google.common.annotations.VisibleForTesting;
import weka.core.Instance;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.StringJoiner;
/**
* Analogical Modeling uses labels composed of boolean vectors in order to group
* instances into subcontexts and subcontexts in supracontexts. Training set
* instances are assigned labels by comparing them with the instance to be
* classified and encoding matched attributes and mismatched attributes in a
* boolean vector.
*
* This class is used to assign context labels to training instances by
* comparison with the instance being classified.
*
* @author Nathan Glenn
*/
public abstract class Labeler {
private final boolean ignoreUnknowns;
private final MissingDataCompare mdc;
private final Instance testInstance;
private final Set<Integer> ignoreSet;
/**
* The default (max) size of a label partition
*/
private static final int PARTITION_SIZE = 5;
/**
* @param test Instance being classified
* @param ignoreUnknowns true if attributes with undefined values in the test item should be ignored; false if not.
* @param mdc Specifies how to compare missing attributes
*/
public Labeler(Instance test, boolean ignoreUnknowns, MissingDataCompare mdc) {
this.mdc = mdc;
this.testInstance = test;
this.ignoreUnknowns = ignoreUnknowns;
Set<Integer> ignoreSet = new HashSet<>();
if (ignoreUnknowns) {
int length = testInstance.numAttributes() - 1;
for (int i = 0; i < length; i++) {
if (testInstance.isMissing(i)) ignoreSet.add(i);
}
}
this.ignoreSet = Collections.unmodifiableSet(ignoreSet);
}
/**
* @return The cardinality of the generated labels, or how many instance attributes are considered during labeling.
*/
public int getCardinality() {
return testInstance.numAttributes() - ignoreSet.size() - 1;
}
/**
* Calculate the label cardinality for a given test instance
*
* @param testInstance instance to assign labels
* @param ignoreUnknowns true if unknown values are ignored; false otherwise
* @return the cardinality of labels generated from testInstance and ignoreUnknowns
*/
public static int getCardinality(Instance testInstance, boolean ignoreUnknowns) {
int cardinality = 0;
for (int i = 0; i < testInstance.numAttributes(); i++) {
if (i != testInstance.classIndex() && !(testInstance.isMissing(i) && ignoreUnknowns)) cardinality++;
}
return cardinality;
}
/**
* @return true if attributes with undefined values in the test item are ignored during labeling; false if not.
*/
public boolean getIgnoreUnknowns() {
return ignoreUnknowns;
}
/**
* @return the MissingDataCompare strategy in use by this labeler
*/
public MissingDataCompare getMissingDataCompare() {
return mdc;
}
/**
* @return the test instance being used to label other instances
*/
public Instance getTestInstance() {
return testInstance;
}
/**
* Find if the attribute at the given index is ignored during labeling. The
* default behavior is to ignore the attributes with unknown values in the
* test instance if {@link #getIgnoreUnknowns()} is true.
*
* @param index Index of the attribute being queried
* @return True if the given attribute is ignored during labeling; false otherwise.
*/
public boolean isIgnored(int index) {
return ignoreSet.contains(index);
}
/**
* Create a context label for the input instance by comparing it with the
* test instance.
*
* @param data Instance to be labeled
* @return the label for the context that the instance belongs to. The cardinality of the label will be the same as
* the test and data items. At any given index i, {@link Label#matches(int) label.matches(i)} will return true if
* that feature is the same in the test and data instances.
* @throws IllegalArgumentException if the test and data instances are not from the same data set.
*/
public abstract Label label(Instance data);
/**
* Returns a string representing the context. If the input test instance attributes are "A C D Z R",
* and the {@code label} is {@code 00101}, then the return string will be "A C * Z *".
*/
public String getContextString(Label label) {
String contextBitString = label.toString();
StringJoiner joiner = new StringJoiner(" ");
int labelIndex = 0;
for (int i = 0; i < testInstance.numAttributes(); i++) {
// skip the class attribute and ignored attributes
if (i == testInstance.classIndex() || isIgnored(i)) continue;
if (contextBitString.charAt(labelIndex) == '0') {
joiner.add(testInstance.stringValue(i));
} else {
joiner.add("*");
}
labelIndex++;
}
return joiner.toString();
}
/**
* Returns a string containing the attributes of the input instance (minus the class
* attribute and ignored attributes).
*/
public String getInstanceAttsString(Instance instance) {
StringJoiner joiner = new StringJoiner(" ");
for(int i = 0; i < instance.numAttributes(); i++) {
if (i == instance.classIndex() || isIgnored(i)) {
continue;
}
joiner.add(instance.stringValue(i));
}
return joiner.toString();
}
/**
* Creates and returns the label which belongs at the top of the boolean
* lattice formed by the subcontexts labeled by this labeler, i.e. the one for
* which every feature is a match.
*
* @return A label with all matches
*/
public abstract Label getLatticeTop();
/**
* Creates and returns the label which belongs at the bottom of the boolean
* lattice formed by the subcontexts labeled by this labeler, i.e. the one for
* which every feature is a mismatch.
*
* @return A label with all mismatches
*/
public abstract Label getLatticeBottom();
/**
* For testing purposes, this method allows the client to directly specify the label using
* the bits of an integer
*/
@VisibleForTesting
public abstract Label fromBits(int labelBits);
/**
* In distributed processing, it is necessary to split labels into
* partitions. This method returns a partition for the given label. A full
* label is partitioned into pieces 0 through {@link #numPartitions()}, so
* code to process labels in pieces should look like this:
*
* <pre>
* Label myLabel = myLabeler.label(myInstance);
* for(int i = 0; i < myLabeler.numPartitions(); i++)
* process(myLabeler.partition(myLabel, i);
* </pre>
*
* @param partitionIndex index of the partition to return
* @return a new label representing a portion of the attributes represented by the input label.
* @throws IllegalArgumentException if the partitionIndex is greater than {@link #numPartitions()} or less than
* zero.
* @throws IllegalArgumentException if the input label is not compatible with this labeler.
*/
public abstract Label partition(Label label, int partitionIndex);
/**
* @return The number of label partitions available via {@link #partition}
*/
public int numPartitions() {
if (getCardinality() < PARTITION_SIZE) return 1;
else return (int) Math.ceil(getCardinality() / (double) PARTITION_SIZE);
}
/**
* This provides a default partitioning implementation which is overridable
* by child classes.
*
* @return An array of partitions indicating how labels can be split into partitions.
*/
Partition[] partitions() {
Partition[] spans = new Partition[numPartitions()];
int spanSize = (int) Math.floor((double) getCardinality() / numPartitions());
// an extra bit will be given to remainder masks, since numMasks
// probably does not divide cardinality
int remainder = getCardinality() % numPartitions();
int index = 0;
for (int i = 0; i < numPartitions(); i++) {
int inc = (i < remainder) ? spanSize + 1 : spanSize;
spans[i] = new Partition(index, inc);
index += inc;
}
return spans;
}
/**
* Simple class for storing index spans.
*/
protected static class Partition {
private final int startIndex;
private final int cardinality;
Partition(int s, int l) {
startIndex = s;
cardinality = l;
}
/**
* @return The beginning of the span
*/
int getStartIndex() {
return startIndex;
}
/**
* @return The cardinality of the partition, or number of represented features.
*/
int getCardinality() {
return cardinality;
}
@Override
public String toString() {
return "[" + startIndex + "," + cardinality + "]";
}
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.popup.list;
import com.intellij.icons.AllIcons;
import com.intellij.ide.IdeEventQueue;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.ui.popup.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.statistics.StatisticsInfo;
import com.intellij.psi.statistics.StatisticsManager;
import com.intellij.ui.JBListWithHintProvider;
import com.intellij.ui.ScrollingUtil;
import com.intellij.ui.SeparatorWithText;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.popup.ClosableByLeftArrow;
import com.intellij.ui.popup.WizardPopup;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.awt.event.*;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicBoolean;
public class ListPopupImpl extends WizardPopup implements ListPopup {
private static final Logger LOG = Logger.getInstance("#com.intellij.ui.popup.list.ListPopupImpl");
private MyList myList;
private MyMouseMotionListener myMouseMotionListener;
private MyMouseListener myMouseListener;
private ListPopupModel myListModel;
private int myIndexForShowingChild = -1;
private int myMaxRowCount = 20;
private boolean myAutoHandleBeforeShow;
public ListPopupImpl(@NotNull ListPopupStep aStep, int maxRowCount) {
super(aStep);
if (maxRowCount != -1){
myMaxRowCount = maxRowCount;
}
}
public ListPopupImpl(@NotNull ListPopupStep aStep) {
this(aStep, -1);
}
public ListPopupImpl(WizardPopup aParent, @NotNull ListPopupStep aStep, Object parentValue) {
this(aParent, aStep, parentValue, -1);
}
public ListPopupImpl(WizardPopup aParent, @NotNull ListPopupStep aStep, Object parentValue, int maxRowCount) {
super(aParent, aStep);
setParentValue(parentValue);
if (maxRowCount != -1){
myMaxRowCount = maxRowCount;
}
}
public void showUnderneathOfLabel(@NotNull JLabel label) {
int offset = -UIUtil.getListCellHPadding() - UIUtil.getListViewportPadding().left;
if (label.getIcon() != null) {
offset += label.getIcon().getIconWidth() + label.getIconTextGap();
}
show(new RelativePoint(label, new Point(offset, label.getHeight() + 1)));
}
protected ListPopupModel getListModel() {
return myListModel;
}
@Override
protected boolean beforeShow() {
myList.addMouseMotionListener(myMouseMotionListener);
myList.addMouseListener(myMouseListener);
myList.setVisibleRowCount(Math.min(myMaxRowCount, myListModel.getSize()));
boolean shouldShow = super.beforeShow();
if (myAutoHandleBeforeShow) {
final boolean toDispose = tryToAutoSelect(true);
shouldShow &= !toDispose;
}
return shouldShow;
}
@Override
public void goBack() {
myList.clearSelection();
super.goBack();
}
@Override
protected void afterShow() {
tryToAutoSelect(false);
}
private boolean tryToAutoSelect(boolean handleFinalChoices) {
ListPopupStep<Object> listStep = getListStep();
boolean selected = false;
if (listStep instanceof MultiSelectionListPopupStep<?>) {
int[] indices = ((MultiSelectionListPopupStep)listStep).getDefaultOptionIndices();
if (indices.length > 0) {
ScrollingUtil.ensureIndexIsVisible(myList, indices[0], 0);
myList.setSelectedIndices(indices);
selected = true;
}
}
else {
final int defaultIndex = listStep.getDefaultOptionIndex();
if (defaultIndex >= 0 && defaultIndex < myList.getModel().getSize()) {
ScrollingUtil.selectItem(myList, defaultIndex);
selected = true;
}
}
if (!selected) {
selectFirstSelectableItem();
}
if (listStep.isAutoSelectionEnabled()) {
if (!isVisible() && getSelectableCount() == 1) {
return _handleSelect(handleFinalChoices, null);
} else if (isVisible() && hasSingleSelectableItemWithSubmenu()) {
return _handleSelect(handleFinalChoices, null);
}
}
return false;
}
private boolean autoSelectUsingStatistics() {
final String filter = getSpeedSearch().getFilter();
if (!StringUtil.isEmpty(filter)) {
int maxUseCount = -1;
int mostUsedValue = -1;
int elementsCount = myListModel.getSize();
for (int i = 0; i < elementsCount; i++) {
Object value = myListModel.getElementAt(i);
final String text = getListStep().getTextFor(value);
final int count =
StatisticsManager.getInstance().getUseCount(new StatisticsInfo("#list_popup:" + myStep.getTitle() + "#" + filter, text));
if (count > maxUseCount) {
maxUseCount = count;
mostUsedValue = i;
}
}
if (mostUsedValue > 0) {
ScrollingUtil.selectItem(myList, mostUsedValue);
return true;
}
}
return false;
}
private void selectFirstSelectableItem() {
for (int i = 0; i < myListModel.getSize(); i++) {
if (getListStep().isSelectable(myListModel.getElementAt(i))) {
myList.setSelectedIndex(i);
break;
}
}
}
private boolean hasSingleSelectableItemWithSubmenu() {
boolean oneSubmenuFound = false;
int countSelectables = 0;
for (int i = 0; i < myListModel.getSize(); i++) {
Object elementAt = myListModel.getElementAt(i);
if (getListStep().isSelectable(elementAt) ) {
countSelectables ++;
if (getStep().hasSubstep(elementAt)) {
if (oneSubmenuFound) {
return false;
}
oneSubmenuFound = true;
}
}
}
return oneSubmenuFound && countSelectables == 1;
}
private int getSelectableCount() {
int count = 0;
for (int i = 0; i < myListModel.getSize(); i++) {
final Object each = myListModel.getElementAt(i);
if (getListStep().isSelectable(each)) {
count++;
}
}
return count;
}
public JList getList() {
return myList;
}
@Override
protected JComponent createContent() {
myMouseMotionListener = new MyMouseMotionListener();
myMouseListener = new MyMouseListener();
ListPopupStep<Object> step = getListStep();
myListModel = new ListPopupModel(this, getSpeedSearch(), step);
myList = new MyList();
if (myStep.getTitle() != null) {
myList.getAccessibleContext().setAccessibleName(myStep.getTitle());
}
if (step instanceof ListPopupStepEx) {
((ListPopupStepEx)step).setEmptyText(myList.getEmptyText());
}
myList.setSelectionMode(isMultiSelectionEnabled() ? ListSelectionModel.MULTIPLE_INTERVAL_SELECTION : ListSelectionModel.SINGLE_SELECTION);
myList.setSelectedIndex(0);
Insets padding = UIUtil.getListViewportPadding();
myList.setBorder(new EmptyBorder(padding));
ScrollingUtil.installActions(myList);
myList.setCellRenderer(getListElementRenderer());
myList.getActionMap().get("selectNextColumn").setEnabled(false);
myList.getActionMap().get("selectPreviousColumn").setEnabled(false);
registerAction("handleSelection1", KeyEvent.VK_ENTER, 0, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
handleSelect(true);
}
});
registerAction("handleSelection2", KeyEvent.VK_RIGHT, 0, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
handleSelect(false);
}
});
registerAction("goBack2", KeyEvent.VK_LEFT, 0, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
if (isClosableByLeftArrow()) {
goBack();
}
}
});
myList.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
return myList;
}
private boolean isMultiSelectionEnabled() {
return getListStep() instanceof MultiSelectionListPopupStep<?>;
}
private boolean isClosableByLeftArrow() {
return getParent() != null || myStep instanceof ClosableByLeftArrow;
}
@Override
protected ActionMap getActionMap() {
return myList.getActionMap();
}
@Override
protected InputMap getInputMap() {
return myList.getInputMap();
}
protected ListCellRenderer getListElementRenderer() {
return new PopupListElementRenderer(this);
}
@Override
public ListPopupStep<Object> getListStep() {
return (ListPopupStep<Object>) myStep;
}
@Override
public void dispose() {
myList.removeMouseMotionListener(myMouseMotionListener);
myList.removeMouseListener(myMouseListener);
super.dispose();
}
protected int getSelectedIndex() {
return myList.getSelectedIndex();
}
protected Rectangle getCellBounds(int i) {
return myList.getCellBounds(i, i);
}
@Override
public void disposeChildren() {
setIndexForShowingChild(-1);
super.disposeChildren();
}
@Override
protected void onAutoSelectionTimer() {
if (myList.getModel().getSize() > 0 && !myList.isSelectionEmpty() ) {
handleSelect(false);
}
else {
disposeChildren();
setIndexForShowingChild(-1);
}
}
@Override
public void handleSelect(boolean handleFinalChoices) {
_handleSelect(handleFinalChoices, null);
}
@Override
public void handleSelect(boolean handleFinalChoices, InputEvent e) {
_handleSelect(handleFinalChoices, e);
}
private boolean _handleSelect(final boolean handleFinalChoices, @Nullable InputEvent e) {
if (myList.getSelectedIndex() == -1) return false;
if (getSpeedSearch().isHoldingFilter() && myList.getModel().getSize() == 0) return false;
if (myList.getSelectedIndex() == getIndexForShowingChild()) {
if (myChild != null && !myChild.isVisible()) setIndexForShowingChild(-1);
return false;
}
final Object[] selectedValues = myList.getSelectedValues();
final ListPopupStep<Object> listStep = getListStep();
if (!listStep.isSelectable(selectedValues[0])) return false;
if ((listStep instanceof MultiSelectionListPopupStep<?> && !((MultiSelectionListPopupStep<Object>)listStep).hasSubstep(Arrays.asList(selectedValues))
|| !listStep.hasSubstep(selectedValues[0])) && !handleFinalChoices) return false;
disposeChildren();
if (myListModel.getSize() == 0) {
setFinalRunnable(myStep.getFinalRunnable());
setOk(true);
disposeAllParents(e);
setIndexForShowingChild(-1);
return true;
}
valuesSelected(selectedValues);
AtomicBoolean insideOnChosen = new AtomicBoolean(true);
ApplicationManager.getApplication().invokeLater(() -> {
if (insideOnChosen.get()) {
LOG.error("Showing dialogs from popup onChosen can result in focus issues. Please put the handler into BaseStep.doFinalStep or PopupStep.getFinalRunnable.");
}
}, ModalityState.any());
final PopupStep nextStep;
try {
if (listStep instanceof MultiSelectionListPopupStep<?>) {
nextStep = ((MultiSelectionListPopupStep<Object>)listStep).onChosen(Arrays.asList(selectedValues), handleFinalChoices);
}
else if (e != null && listStep instanceof ListPopupStepEx<?>) {
nextStep = ((ListPopupStepEx<Object>)listStep).onChosen(selectedValues[0], handleFinalChoices, e.getModifiers());
}
else {
nextStep = listStep.onChosen(selectedValues[0], handleFinalChoices);
}
}
finally {
insideOnChosen.set(false);
}
return handleNextStep(nextStep, selectedValues.length == 1 ? selectedValues[0] : null, e);
}
private void valuesSelected(final Object[] values) {
final String filter = getSpeedSearch().getFilter();
if (!StringUtil.isEmpty(filter)) {
for (Object value : values) {
final String text = getListStep().getTextFor(value);
StatisticsManager.getInstance().incUseCount(new StatisticsInfo("#list_popup:" + getListStep().getTitle() + "#" + filter, text));
}
}
}
private boolean handleNextStep(final PopupStep nextStep, Object parentValue, InputEvent e) {
if (nextStep != PopupStep.FINAL_CHOICE) {
final Point point = myList.indexToLocation(myList.getSelectedIndex());
SwingUtilities.convertPointToScreen(point, myList);
myChild = createPopup(this, nextStep, parentValue);
if (myChild instanceof ListPopupImpl) {
for (ListSelectionListener listener : myList.getListSelectionListeners()) {
((ListPopupImpl)myChild).addListSelectionListener(listener);
}
}
final JComponent container = getContent();
assert container != null : "container == null";
int y = point.y;
if (parentValue != null && getListModel().isSeparatorAboveOf(parentValue)) {
SeparatorWithText swt = new SeparatorWithText();
swt.setCaption(getListModel().getCaptionAboveOf(parentValue));
y += swt.getPreferredSize().height - 1;
}
myChild.show(container, point.x + container.getWidth() - STEP_X_PADDING, y, true);
setIndexForShowingChild(myList.getSelectedIndex());
return false;
}
else {
setOk(true);
setFinalRunnable(myStep.getFinalRunnable());
disposeAllParents(e);
setIndexForShowingChild(-1);
return true;
}
}
@Override
public void addListSelectionListener(ListSelectionListener listSelectionListener) {
myList.addListSelectionListener(listSelectionListener);
}
private class MyMouseMotionListener extends MouseMotionAdapter {
private int myLastSelectedIndex = -2;
private Point myLastMouseLocation;
private boolean isMouseMoved(Point location) {
if (myLastMouseLocation == null) {
myLastMouseLocation = location;
return false;
}
return !myLastMouseLocation.equals(location);
}
@Override
public void mouseMoved(MouseEvent e) {
if (!isMouseMoved(e.getLocationOnScreen())) return;
Point point = e.getPoint();
int index = myList.locationToIndex(point);
if (index != myLastSelectedIndex) {
if (!isMultiSelectionEnabled() || !UIUtil.isSelectionButtonDown(e) && myList.getSelectedIndices().length <= 1) {
myList.setSelectedIndex(index);
}
restartTimer();
myLastSelectedIndex = index;
}
notifyParentOnChildSelection();
}
}
protected boolean isActionClick(MouseEvent e) {
return UIUtil.isActionClick(e, MouseEvent.MOUSE_RELEASED, true);
}
public Object[] getSelectedValues() {
return myList.getSelectedValues();
}
private class MyMouseListener extends MouseAdapter {
@Override
public void mouseReleased(MouseEvent e) {
if (!isActionClick(e) || isMultiSelectionEnabled() && UIUtil.isSelectionButtonDown(e)) return;
IdeEventQueue.getInstance().blockNextEvents(e); // sometimes, after popup close, MOUSE_RELEASE event delivers to other components
final Object selectedValue = myList.getSelectedValue();
final ListPopupStep<Object> listStep = getListStep();
handleSelect(handleFinalChoices(e, selectedValue, listStep), e);
stopTimer();
}
}
protected boolean handleFinalChoices(MouseEvent e, Object selectedValue, ListPopupStep<Object> listStep) {
return selectedValue == null || !listStep.hasSubstep(selectedValue) || !listStep.isSelectable(selectedValue) || !isOnNextStepButton(e);
}
private boolean isOnNextStepButton(MouseEvent e) {
final int index = myList.getSelectedIndex();
final Rectangle bounds = myList.getCellBounds(index, index);
final Point point = e.getPoint();
return bounds != null && point.getX() > bounds.width + bounds.getX() - AllIcons.Icons.Ide.NextStep.getIconWidth();
}
@Override
protected void process(KeyEvent aEvent) {
myList.processKeyEvent(aEvent);
}
private int getIndexForShowingChild() {
return myIndexForShowingChild;
}
private void setIndexForShowingChild(int aIndexForShowingChild) {
myIndexForShowingChild = aIndexForShowingChild;
}
private class MyList extends JBListWithHintProvider implements DataProvider {
public MyList() {
super(myListModel);
}
@Override
protected PsiElement getPsiElementForHint(Object selectedValue) {
return selectedValue instanceof PsiElement ? (PsiElement)selectedValue : null;
}
@Override
public Dimension getPreferredScrollableViewportSize() {
Dimension result = super.getPreferredScrollableViewportSize();
result.width += JBUI.scale(14); // support possible scroll bar
int rowCount = getVisibleRowCount();
int size = getModel().getSize();
if (rowCount < size) {
// Note: labeled separators are not counted in this branch
return result;
}
result.height = getPreferredSize().height;
return result;
}
@Override
public void processKeyEvent(KeyEvent e) {
e.setSource(this);
super.processKeyEvent(e);
}
@Override
protected void processMouseEvent(MouseEvent e) {
if (!isMultiSelectionEnabled() &&
(e.getModifiers() & Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()) != 0) {
// do not toggle selection with ctrl+click event in single-selection mode
e.consume();
}
if (UIUtil.isActionClick(e, MouseEvent.MOUSE_PRESSED) && isOnNextStepButton(e)) {
e.consume();
}
super.processMouseEvent(e);
}
@Override
public Object getData(String dataId) {
if (PlatformDataKeys.SELECTED_ITEM.is(dataId)){
return myList.getSelectedValue();
}
if (PlatformDataKeys.SELECTED_ITEMS.is(dataId)){
return myList.getSelectedValues();
}
return null;
}
}
@Override
protected void onSpeedSearchPatternChanged() {
myListModel.refilter();
if (myListModel.getSize() > 0) {
if (!autoSelectUsingStatistics()) {
int fullMatchIndex = myListModel.getClosestMatchIndex();
if (fullMatchIndex != -1) {
myList.setSelectedIndex(fullMatchIndex);
}
if (myListModel.getSize() <= myList.getSelectedIndex() || !myListModel.isVisible(myList.getSelectedValue())) {
myList.setSelectedIndex(0);
}
}
}
}
@Override
protected void onSelectByMnemonic(Object value) {
if (myListModel.isVisible(value)) {
myList.setSelectedValue(value, true);
myList.repaint();
handleSelect(true);
}
}
@Override
protected JComponent getPreferredFocusableComponent() {
return myList;
}
@Override
protected void onChildSelectedFor(Object value) {
if (myList.getSelectedValue() != value) {
myList.setSelectedValue(value, false);
}
}
@Override
public void setHandleAutoSelectionBeforeShow(final boolean autoHandle) {
myAutoHandleBeforeShow = autoHandle;
}
@Override
public boolean isModalContext() {
return true;
}
@Override
public void showInBestPositionFor(@NotNull Editor editor) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
handleSelect(true);
}
else {
super.showInBestPositionFor(editor);
}
}
}
|
|
package org.nybatis.core.model;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.nybatis.core.exception.unchecked.ParseException;
import org.nybatis.core.reflection.Reflector;
import org.nybatis.core.util.ClassUtil;
public class PrimitiveConverter {
private static final Map<Class<?>, Class<?>> TO_PRIMITIVE = new HashMap<>( 16 );
private static final Map<Class<?>, Class<?>> TO_WRAPPER = new HashMap<>( 16 );
static {
add( void.class, Void.class );
add( String.class, String.class );
add( char.class, Character.class );
add( int.class, Integer.class );
add( long.class, Long.class );
add( float.class, Float.class );
add( double.class, Double.class );
add( BigDecimal.class, BigDecimal.class );
add( BigInteger.class, BigInteger.class );
add( byte.class, Byte.class );
add( short.class, Short.class );
add( boolean.class, Boolean.class );
add( Date.class, Date.class );
add( Calendar.class, Calendar.class );
add( NDate.class, NDate.class );
add( Void.class, Void.class );
add( URI.class, URI.class );
add( URL.class, URL.class );
add( UUID.class, UUID.class );
add( Pattern.class, Pattern.class );
}
private static void add( Class<?> primitiveClass, Class<?> wrapperClass ) {
TO_PRIMITIVE.put( wrapperClass, primitiveClass );
TO_WRAPPER.put( primitiveClass, wrapperClass );
}
private Object val = null;
private String nvlVal = "";
private boolean ignoreCastingError = true;
public PrimitiveConverter() {}
public PrimitiveConverter( boolean ignoreCastingError ) {
ignoreCastingError( ignoreCastingError );
}
public PrimitiveConverter( Object value ) {
if( value != null ) {
this.val = value;
this.nvlVal = value.toString();
}
}
public PrimitiveConverter( Object value, boolean ignoreCastingError ) {
this( value );
ignoreCastingError( ignoreCastingError );
}
public void ignoreCastingError( boolean ignore ) {
ignoreCastingError = ignore;
}
public Object get() {
return val;
}
public String toString() {
return ( val == null && ! ignoreCastingError ) ? null : nvlVal;
}
public int toInt() {
try {
return Integer.parseInt( nvlVal );
} catch( NumberFormatException e ) {
try {
return Double.valueOf( nvlVal ).intValue();
} catch( NumberFormatException ne ) {
if( ! ignoreCastingError ) throw ne;
return 0;
}
}
}
public long toLong() {
try {
return Long.parseLong( nvlVal );
} catch( NumberFormatException e ) {
try {
return Double.valueOf( nvlVal ).longValue();
} catch( NumberFormatException ne ) {
if( ! ignoreCastingError ) throw ne;
return 0L;
}
}
}
public float toFloat() {
try {
return Float.parseFloat( nvlVal );
} catch( NumberFormatException e ) {
if( ! ignoreCastingError ) throw e;
return 0F;
}
}
public double toDouble() {
try {
return Double.parseDouble( nvlVal );
} catch( NumberFormatException e ) {
if( ! ignoreCastingError ) throw e;
return 0.;
}
}
public boolean toBoolean() {
if( "true".equalsIgnoreCase(nvlVal) ) return true;
if( ignoreCastingError ) {
if( "y".equalsIgnoreCase(nvlVal) ) return true;
if( "yes".equalsIgnoreCase(nvlVal) ) return true;
}
return false;
}
public byte toByte() {
try {
return Byte.parseByte( nvlVal );
} catch( NumberFormatException e ) {
if( ! ignoreCastingError ) throw e;
return 0;
}
}
public short toShort() {
try {
return Short.parseShort( nvlVal );
} catch( NumberFormatException e ) {
if( ! ignoreCastingError ) throw e;
return (short) 0;
}
}
public char toChar() {
if( isEmpty() ) {
return Character.MIN_VALUE;
} else {
return nvlVal.charAt( 0 );
}
}
public NDate toNDate() {
if( isEmpty() ) return ignoreCastingError ? NDate.MIN_DATE : null;
Class<?> klass = val.getClass();
if( klass == NDate.class ) return (NDate) val;
if( klass == Date.class ) return new NDate( (Date) val );
if( klass == Calendar.class ) return new NDate( (Calendar) val );
try {
return new NDate( nvlVal );
} catch( ParseException e ) {
if( ! ignoreCastingError ) throw e;
return NDate.MIN_DATE;
}
}
public Date toDate() {
if( isEmpty() ) return ignoreCastingError ? NDate.MIN_DATE.toDate() : null;
Class<?> klass = val.getClass();
if( klass == Date.class ) return (Date) val;
if( klass == NDate.class ) return ((NDate)val).toDate();
if( klass == Calendar.class ) return new NDate( (Calendar) val ).toDate();
NDate date = toNDate();
return date == null ? null : date.toDate();
}
public Calendar toCalendar() {
if( isEmpty() ) return ignoreCastingError ? NDate.MIN_DATE.toCalendar() : null;
Class<?> klass = val.getClass();
if( klass == Calendar.class ) return (Calendar) val;
if( klass == NDate.class ) return ((NDate)val).toCalendar();
if( klass == Date.class ) return new NDate( (Date) val ).toCalendar();
NDate date = toNDate();
return date == null ? null : date.toCalendar();
}
public BigInteger toBigInt() {
try {
return new BigInteger( nvlVal );
} catch( NumberFormatException e ) {
if( ! ignoreCastingError ) throw e;
return BigInteger.ZERO;
}
}
public BigDecimal toBigDecimal() {
try {
return new BigDecimal( nvlVal );
} catch( NumberFormatException e ) {
if( ! ignoreCastingError ) throw e;
return BigDecimal.ZERO;
}
}
public Void toVoid() {
return null;
}
public URI toURI() {
try {
return new URI( nvlVal );
} catch( URISyntaxException e ) {
if( ! ignoreCastingError ) throw new IllegalArgumentException( e );
return null;
}
}
public URL toURL() {
try {
return new URL( nvlVal );
} catch( MalformedURLException e ) {
if( ! ignoreCastingError ) throw new IllegalArgumentException( e );
return null;
}
}
public UUID toUUID() {
try {
return UUID.fromString(nvlVal);
} catch(IllegalArgumentException e) {
if( ! ignoreCastingError ) throw e;
return null;
}
}
public Pattern toPattern() {
try {
return Pattern.compile( nvlVal );
} catch( PatternSyntaxException e ) {
return Pattern.compile( "" );
}
}
private boolean isEmpty() {
return val == null || val.toString().length() == 0;
}
public Map<String, Object> toMap() {
return Reflector.toMapFrom( nvlVal );
}
public <T> T toBean( Class<T> klass ) {
return Reflector.toBeanFrom( toMap(), klass );
}
@SuppressWarnings( "unchecked" )
public <T> Object cast( Class<T> klass ) {
if( val == null && ! ignoreCastingError ) return null;
Class<?> klassWrapped = wrap( klass );
if( wrap(val) == klassWrapped ) return val;
if( ! isPrimitive(klass) ) return val;
if( klassWrapped == String.class ) return toString();
if( klassWrapped == Character.class ) return toChar();
if( klassWrapped == Integer.class ) return toInt();
if( klassWrapped == Long.class ) return toLong();
if( klassWrapped == Double.class ) return toDouble();
if( klassWrapped == Float.class ) return toFloat();
if( klassWrapped == BigDecimal.class ) return toBigDecimal();
if( klassWrapped == BigInteger.class ) return toBigInt();
if( klassWrapped == Boolean.class ) return toBoolean();
if( klassWrapped == Byte.class ) return toByte();
if( klassWrapped == Short.class ) return toShort();
if( klassWrapped == Date.class ) return toDate();
if( klassWrapped == Calendar.class ) return toCalendar();
if( klassWrapped == NDate.class ) return toNDate();
if( klassWrapped == Date.class ) return toDate();
if( klassWrapped == Calendar.class ) return toCalendar();
if( klassWrapped == Void.class ) return toVoid();
if( klassWrapped == URI.class ) return toURI();
if( klassWrapped == URL.class ) return toURL();
if( klassWrapped == UUID.class ) return toUUID();
if( klassWrapped == Pattern.class ) return toPattern();
if( val == null ) return null;
try {
if( ClassUtil.isExtendedBy( val, Map.class ) ) return toMap();
return toBean( klass );
} catch( Exception e ) {
return val;
}
}
public boolean isPrimitive( Class<?> klass ) {
return TO_PRIMITIVE.containsKey( klass );
}
public boolean isWrapper( Class<?> klass ) {
return TO_WRAPPER.containsKey( klass );
}
private <T> Class<T> wrap( T value ) {
if( value == null ) return null;
return (Class<T>) wrap( value.getClass() );
}
private <T> Class<T> wrap( Class<T> klass ) {
if( klass == null ) return klass;
Class<T> wrapped = (Class<T>) TO_WRAPPER.get( klass );
return ( wrapped == null ) ? klass : wrapped;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.sql.translator;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
import org.apache.calcite.DataContext;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.linq4j.QueryProvider;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.samza.application.descriptors.StreamApplicationDescriptor;
import org.apache.samza.operators.MessageStream;
import org.apache.samza.sql.data.SamzaSqlRelMessage;
import org.apache.samza.sql.interfaces.SamzaRelTableKeyConverter;
import org.apache.samza.system.descriptors.DelegatingSystemDescriptor;
import org.apache.samza.sql.data.RexToJavaCompiler;
import org.apache.samza.sql.data.SamzaSqlExecutionContext;
import org.apache.samza.sql.interfaces.SamzaRelConverter;
/**
* State that is maintained while translating the Calcite relational graph to Samza {@link StreamApplicationDescriptor}.
*/
public class TranslatorContext implements Cloneable {
/**
* The internal variables that are shared among all cloned {@link TranslatorContext}
*/
private final StreamApplicationDescriptor streamAppDesc;
private final RexToJavaCompiler compiler;
private final Map<String, SamzaRelConverter> relSamzaConverters;
private final Map<String, SamzaRelTableKeyConverter> relTableKeyConverters;
private final Map<Integer, MessageStream<SamzaSqlRelMessage>> messageStreams;
private final Map<Integer, RelNode> relNodes;
private final Map<String, DelegatingSystemDescriptor> systemDescriptors;
/**
* The internal variables that are not shared among all cloned {@link TranslatorContext}
*/
private final SamzaSqlExecutionContext executionContext;
private final DataContextImpl dataContext;
private static class DataContextImpl implements DataContext {
@Override
public SchemaPlus getRootSchema() {
return null;
}
@Override
public JavaTypeFactory getTypeFactory() {
return null;
}
@Override
public QueryProvider getQueryProvider() {
return null;
}
@Override
public Object get(String name) {
TimeZone timeZone = TimeZone.getDefault();
long timeMs = System.currentTimeMillis();
long offsetMs = timeZone.getOffset(timeMs);
if (name.equals(Variable.LOCAL_TIMESTAMP.camelName)) {
return timeMs + offsetMs;
} else if (name.equals(Variable.UTC_TIMESTAMP.camelName) || name.equals(Variable.CURRENT_TIMESTAMP.camelName)) {
return timeMs;
} else if (name.equals(Variable.TIME_ZONE.camelName)) {
return timeZone;
} else {
throw new UnsupportedOperationException("Unsupported operation " + name);
}
}
}
private static class SamzaSqlRexBuilder extends RexBuilder {
private SamzaSqlRexBuilder(RelDataTypeFactory typeFactory) {
super(typeFactory);
}
/**
* Since Drill has different mechanism and rules for implicit casting,
* ensureType() is overridden to avoid conflicting cast functions being added to the expressions.
*/
@Override
public RexNode ensureType(RelDataType type, RexNode node, boolean matchNullability) {
return node;
}
}
private RexToJavaCompiler createExpressionCompiler(RelRoot relRoot) {
RelDataTypeFactory dataTypeFactory = relRoot.project().getCluster().getTypeFactory();
RexBuilder rexBuilder = new SamzaSqlRexBuilder(dataTypeFactory);
return new RexToJavaCompiler(rexBuilder);
}
/**
* Private constructor to make a clone of {@link TranslatorContext} object
*
* @param other the original object to copy from
*/
private TranslatorContext(TranslatorContext other) {
this.streamAppDesc = other.streamAppDesc;
this.compiler = other.compiler;
this.relSamzaConverters = other.relSamzaConverters;
this.relTableKeyConverters = other.relTableKeyConverters;
this.messageStreams = other.messageStreams;
this.relNodes = other.relNodes;
this.executionContext = other.executionContext.clone();
this.dataContext = new DataContextImpl();
this.systemDescriptors = other.systemDescriptors;
}
/**
* Create the instance of TranslatorContext
* @param streamAppDesc Samza's streamAppDesc that is populated during the translation.
* @param relRoot Root of the relational graph from calcite.
* @param executionContext the execution context
*/
public TranslatorContext(StreamApplicationDescriptor streamAppDesc, RelRoot relRoot, SamzaSqlExecutionContext executionContext) {
this.streamAppDesc = streamAppDesc;
this.compiler = createExpressionCompiler(relRoot);
this.executionContext = executionContext;
this.dataContext = new DataContextImpl();
this.relSamzaConverters = executionContext.getSamzaSqlApplicationConfig().getSamzaRelConverters();
this.relTableKeyConverters = executionContext.getSamzaSqlApplicationConfig().getSamzaRelTableKeyConverters();
this.messageStreams = new HashMap<>();
this.relNodes = new HashMap<>();
this.systemDescriptors = new HashMap<>();
}
/**
* Gets stream graph.
*
* @return the stream graph
*/
public StreamApplicationDescriptor getStreamAppDescriptor() {
return streamAppDesc;
}
/**
* Gets execution context.
*
* @return the execution context
*/
SamzaSqlExecutionContext getExecutionContext() {
return executionContext;
}
DataContext getDataContext() {
return dataContext;
}
/**
* Gets expression compiler.
*
* @return the expression compiler
*/
RexToJavaCompiler getExpressionCompiler() {
return compiler;
}
/**
* Register message stream.
*
* @param id the id
* @param stream the stream
*/
void registerMessageStream(int id, MessageStream stream) {
messageStreams.put(id, stream);
}
/**
* Gets message stream.
*
* @param id the id
* @return the message stream
*/
MessageStream<SamzaSqlRelMessage> getMessageStream(int id) {
return messageStreams.get(id);
}
void registerRelNode(int id, RelNode relNode) {
relNodes.put(id, relNode);
}
RelNode getRelNode(int id) {
return relNodes.get(id);
}
SamzaRelConverter getMsgConverter(String source) {
return this.relSamzaConverters.get(source);
}
SamzaRelTableKeyConverter getTableKeyConverter(String source) {
return this.relTableKeyConverters.get(source);
}
/**
* This method helps to create a per task instance of translator context
*
* @return the cloned instance of {@link TranslatorContext}
*/
@Override
public TranslatorContext clone() {
return new TranslatorContext(this);
}
}
|
|
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.provider.bgp.cfg.impl;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.onosproject.bgp.controller.BgpCfg;
import org.onosproject.bgp.controller.BgpPeerCfg;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.net.config.ConfigFactory;
import org.onosproject.net.config.NetworkConfigEvent;
import org.onosproject.net.config.NetworkConfigListener;
import org.onosproject.net.config.NetworkConfigRegistry;
import org.onosproject.net.config.NetworkConfigService;
import org.onosproject.net.config.basics.SubjectFactories;
import org.onosproject.net.provider.AbstractProvider;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.bgp.controller.BgpController;
import org.slf4j.Logger;
import org.osgi.service.component.ComponentContext;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import static org.slf4j.LoggerFactory.getLogger;
/**
* BGP config provider to validate and populate the configuration.
*/
@Component(immediate = true)
public class BgpCfgProvider extends AbstractProvider {
private static final Logger log = getLogger(BgpCfgProvider.class);
static final String PROVIDER_ID = "org.onosproject.provider.bgp.cfg";
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected BgpController bgpController;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected CoreService coreService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected NetworkConfigRegistry configRegistry;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected NetworkConfigService configService;
private final ConfigFactory configFactory =
new ConfigFactory(SubjectFactories.APP_SUBJECT_FACTORY, BgpAppConfig.class, "bgpapp") {
@Override
public BgpAppConfig createConfig() {
return new BgpAppConfig();
}
};
private final NetworkConfigListener configListener = new InternalConfigListener();
private ApplicationId appId;
/**
* Creates a Bgp config provider.
*/
public BgpCfgProvider() {
super(new ProviderId("bgp", PROVIDER_ID));
}
@Activate
public void activate(ComponentContext context) {
appId = coreService.registerApplication(PROVIDER_ID);
configService.addListener(configListener);
configRegistry.registerConfigFactory(configFactory);
readConfiguration();
log.info("BGP cfg provider started");
}
@Deactivate
public void deactivate(ComponentContext context) {
configRegistry.unregisterConfigFactory(configFactory);
configService.removeListener(configListener);
}
void setBgpController(BgpController bgpController) {
this.bgpController = bgpController;
}
/**
* Reads the configuration and set it to the BGP-LS south bound protocol.
*/
private void readConfiguration() {
BgpCfg bgpConfig = null;
List<BgpAppConfig.BgpPeerConfig> nodes;
bgpConfig = bgpController.getConfig();
BgpAppConfig config = configRegistry.getConfig(appId, BgpAppConfig.class);
if (config == null) {
log.warn("No configuration found");
return;
}
/*Set the configuration */
bgpConfig.setRouterId(config.routerId());
bgpConfig.setAsNumber(config.localAs());
bgpConfig.setLsCapability(config.lsCapability());
bgpConfig.setHoldTime(config.holdTime());
bgpConfig.setMaxSession(config.maxSession());
bgpConfig.setLargeASCapability(config.largeAsCapability());
if (config.flowSpecCapability() == null) {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.NONE);
} else {
if (config.flowSpecCapability().equals("IPV4")) {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.IPV4);
} else if (config.flowSpecCapability().equals("VPNV4")) {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.VPNV4);
} else if (config.flowSpecCapability().equals("IPV4_VPNV4")) {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.IPV4_VPNV4);
} else {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.NONE);
}
}
bgpConfig.setFlowSpecRpdCapability(config.rpdCapability());
nodes = config.bgpPeer();
for (int i = 0; i < nodes.size(); i++) {
String connectMode = nodes.get(i).connectMode();
bgpConfig.addPeer(nodes.get(i).hostname(), nodes.get(i).asNumber(), nodes.get(i).holdTime());
if (connectMode.equals(BgpAppConfig.PEER_CONNECT_ACTIVE)) {
bgpConfig.connectPeer(nodes.get(i).hostname());
}
}
}
/**
* Read the configuration and update it to the BGP-LS south bound protocol.
*/
private void updateConfiguration() {
BgpCfg bgpConfig = null;
List<BgpAppConfig.BgpPeerConfig> nodes;
TreeMap<String, BgpPeerCfg> bgpPeerTree;
bgpConfig = bgpController.getConfig();
BgpPeerCfg peer = null;
BgpAppConfig config = configRegistry.getConfig(appId, BgpAppConfig.class);
if (config == null) {
log.warn("No configuration found");
return;
}
/* Update the self configuration */
if (bgpController.connectedPeerCount() != 0) {
//TODO: If connections already exist, disconnect
bgpController.closeConnectedPeers();
}
bgpConfig.setRouterId(config.routerId());
bgpConfig.setAsNumber(config.localAs());
bgpConfig.setLsCapability(config.lsCapability());
bgpConfig.setHoldTime(config.holdTime());
bgpConfig.setMaxSession(config.maxSession());
bgpConfig.setLargeASCapability(config.largeAsCapability());
if (config.flowSpecCapability() == null) {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.NONE);
} else {
if (config.flowSpecCapability().equals("IPV4")) {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.IPV4);
} else if (config.flowSpecCapability().equals("VPNV4")) {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.VPNV4);
} else if (config.flowSpecCapability().equals("IPV4_VPNV4")) {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.IPV4_VPNV4);
} else {
bgpConfig.setFlowSpecCapability(BgpCfg.FlowSpec.NONE);
}
}
bgpConfig.setFlowSpecRpdCapability(config.rpdCapability());
/* update the peer configuration */
bgpPeerTree = bgpConfig.getPeerTree();
if (bgpPeerTree.isEmpty()) {
log.info("There are no BGP peers to iterate");
} else {
Set set = bgpPeerTree.entrySet();
Iterator i = set.iterator();
List<BgpPeerCfg> absPeerList = new ArrayList<BgpPeerCfg>();
boolean exists = false;
while (i.hasNext()) {
Map.Entry me = (Map.Entry) i.next();
peer = (BgpPeerCfg) me.getValue();
nodes = config.bgpPeer();
for (int j = 0; j < nodes.size(); j++) {
String peerIp = nodes.get(j).hostname();
if (peerIp.equals(peer.getPeerRouterId())) {
if (bgpConfig.isPeerConnectable(peer.getPeerRouterId())) {
peer.setAsNumber(nodes.get(j).asNumber());
peer.setHoldtime(nodes.get(j).holdTime());
log.debug("Peer neighbor IP successfully modified :" + peer.getPeerRouterId());
} else {
log.debug("Peer neighbor IP cannot be modified :" + peer.getPeerRouterId());
}
nodes.remove(j);
exists = true;
break;
}
}
if (!exists) {
absPeerList.add(peer);
exists = false;
}
if (peer.connectPeer() != null) {
peer.connectPeer().disconnectPeer();
peer.setConnectPeer(null);
}
}
/* Remove the absent nodes. */
for (int j = 0; j < absPeerList.size(); j++) {
bgpConfig.removePeer(absPeerList.get(j).getPeerRouterId());
}
}
nodes = config.bgpPeer();
for (int i = 0; i < nodes.size(); i++) {
String connectMode = nodes.get(i).connectMode();
bgpConfig.addPeer(nodes.get(i).hostname(), nodes.get(i).asNumber(), nodes.get(i).holdTime());
if (connectMode.equals(BgpAppConfig.PEER_CONNECT_ACTIVE)) {
bgpConfig.connectPeer(nodes.get(i).hostname());
}
}
}
/**
* BGP config listener to populate the configuration.
*/
private class InternalConfigListener implements NetworkConfigListener {
@Override
public void event(NetworkConfigEvent event) {
if (!event.configClass().equals(BgpAppConfig.class)) {
return;
}
switch (event.type()) {
case CONFIG_ADDED:
readConfiguration();
break;
case CONFIG_UPDATED:
updateConfiguration();
break;
case CONFIG_REMOVED:
default:
break;
}
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rya.indexing.mongodb;
import static com.google.common.base.Preconditions.checkState;
import java.io.IOException;
import java.util.Collection;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.apache.rya.api.domain.RyaIRI;
import org.apache.rya.api.domain.RyaStatement;
import org.apache.rya.api.resolver.RyaToRdfConversions;
import org.apache.rya.indexing.StatementConstraints;
import org.apache.rya.mongodb.MongoDBRdfConfiguration;
import org.apache.rya.mongodb.MongoDBRyaDAO;
import org.apache.rya.mongodb.MongoSecondaryIndex;
import org.apache.rya.mongodb.StatefulMongoDBRdfConfiguration;
import org.apache.rya.mongodb.batch.MongoDbBatchWriter;
import org.apache.rya.mongodb.batch.MongoDbBatchWriterConfig;
import org.apache.rya.mongodb.batch.MongoDbBatchWriterException;
import org.apache.rya.mongodb.batch.MongoDbBatchWriterUtils;
import org.apache.rya.mongodb.batch.collection.MongoCollectionType;
import org.apache.rya.mongodb.document.operators.query.QueryBuilder;
import org.bson.Document;
import org.eclipse.rdf4j.common.iteration.CloseableIteration;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Literal;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.query.QueryEvaluationException;
import com.mongodb.DBCollection;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
/**
* Secondary Indexer using MondoDB
* @param <T> - The {@link AbstractMongoIndexingStorageStrategy} this indexer uses.
*/
public abstract class AbstractMongoIndexer<T extends IndexingMongoDBStorageStrategy> implements MongoSecondaryIndex {
private static final Logger LOG = Logger.getLogger(AbstractMongoIndexer.class);
private boolean flushEachUpdate = true;
protected StatefulMongoDBRdfConfiguration conf;
protected MongoDBRyaDAO dao;
protected MongoClient mongoClient;
protected String dbName;
protected MongoDatabase db;
protected MongoCollection<Document> collection;
protected Set<IRI> predicates;
protected T storageStrategy;
private MongoDbBatchWriter<Document> mongoDbBatchWriter;
protected void initCore() {
dbName = conf.getMongoDBName();
this.mongoClient = conf.getMongoClient();
db = this.mongoClient.getDatabase(dbName);
final String collectionName = conf.get(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, "rya") + getCollectionName();
collection = db.getCollection(collectionName);
flushEachUpdate = ((MongoDBRdfConfiguration)conf).flushEachUpdate();
final MongoDbBatchWriterConfig mongoDbBatchWriterConfig = MongoDbBatchWriterUtils.getMongoDbBatchWriterConfig(conf);
mongoDbBatchWriter = new MongoDbBatchWriter<>(new MongoCollectionType(collection), mongoDbBatchWriterConfig);
try {
mongoDbBatchWriter.start();
} catch (final MongoDbBatchWriterException e) {
LOG.error("Error start MongoDB batch writer", e);
}
}
@Override
public void setConf(final Configuration conf) {
checkState(conf instanceof StatefulMongoDBRdfConfiguration,
"The provided Configuration must be a StatefulMongoDBRdfConfiguration, but it was " + conf.getClass().getName());
this.conf = (StatefulMongoDBRdfConfiguration) conf;
}
@Override
public void close() throws IOException {
flush();
try {
mongoDbBatchWriter.shutdown();
} catch (final MongoDbBatchWriterException e) {
throw new IOException("Error shutting down MongoDB batch writer", e);
}
}
@Override
public void flush() throws IOException {
try {
mongoDbBatchWriter.flush();
} catch (final MongoDbBatchWriterException e) {
throw new IOException("Error flushing batch writer", e);
}
}
@Override
public Configuration getConf() {
return conf;
}
@Override
public String getTableName() {
return dbName;
}
@Override
public Set<IRI> getIndexablePredicates() {
return predicates;
}
@Override
public void deleteStatement(final RyaStatement stmt) throws IOException {
final Document obj = storageStrategy.getQuery(stmt);
collection.deleteOne(obj);
}
@Override
public void storeStatements(final Collection<RyaStatement> ryaStatements)
throws IOException {
for (final RyaStatement ryaStatement : ryaStatements){
storeStatement(ryaStatement, false);
}
if (flushEachUpdate) {
flush();
}
}
@Override
public void storeStatement(final RyaStatement ryaStatement) throws IOException {
storeStatement(ryaStatement, flushEachUpdate);
}
private void storeStatement(final RyaStatement ryaStatement, final boolean flush) throws IOException {
final Document obj = prepareStatementForStorage(ryaStatement);
try {
mongoDbBatchWriter.addObjectToQueue(obj);
if (flush) {
flush();
}
} catch (final MongoDbBatchWriterException e) {
throw new IOException("Error storing statement", e);
}
}
private Document prepareStatementForStorage(final RyaStatement ryaStatement) {
try {
final Statement statement = RyaToRdfConversions.convertStatement(ryaStatement);
final boolean isValidPredicate = predicates.isEmpty() || predicates.contains(statement.getPredicate());
if (isValidPredicate && (statement.getObject() instanceof Literal)) {
final Document obj = storageStrategy.serialize(ryaStatement);
return obj;
}
} catch (final IllegalArgumentException e) {
LOG.error("Unable to parse the statement: " + ryaStatement.toString(), e);
}
return null;
}
@Override
public void dropGraph(final RyaIRI... graphs) {
throw new UnsupportedOperationException();
}
protected CloseableIteration<Statement, QueryEvaluationException> withConstraints(final StatementConstraints constraints, final Document preConstraints) {
final Document doc = QueryBuilder.start().and(preConstraints).and(storageStrategy.getQuery(constraints)).get();
return closableIterationFromCursor(doc);
}
private CloseableIteration<Statement, QueryEvaluationException> closableIterationFromCursor(final Document doc) {
final MongoCursor<Document> cursor = collection.find(doc).iterator();
return new CloseableIteration<Statement, QueryEvaluationException>() {
@Override
public boolean hasNext() {
return cursor.hasNext();
}
@Override
public Statement next() throws QueryEvaluationException {
final Document doc = cursor.next();
return RyaToRdfConversions.convertStatement(storageStrategy.deserializeDocument(doc));
}
@Override
public void remove() {
throw new UnsupportedOperationException("Remove not implemented");
}
@Override
public void close() throws QueryEvaluationException {
cursor.close();
}
};
}
/**
* @return The name of the {@link DBCollection} to use with the storage strategy.
*/
public abstract String getCollectionName();
}
|
|
package com.dkarv.jdcallgraph.util.log;
import com.dkarv.jdcallgraph.helper.Console;
import com.dkarv.jdcallgraph.util.config.ConfigUtils;
import org.hamcrest.text.MatchesPattern;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import java.io.IOException;
import java.util.regex.Pattern;
import static org.junit.Assert.*;
public class LoggerTest {
@Rule
public TemporaryFolder tmp = new TemporaryFolder();
private Logger init(int logLevel, boolean stdOut) {
try {
ConfigUtils.replace( true, "logLevel: " + logLevel, "logConsole: " + stdOut);
} catch (IOException e) {
fail("Error initializing config");
}
Logger.init();
return new Logger(LoggerTest.class);
}
@Test
public void testInit() {
init(0, false);
assertTrue(Logger.TARGETS.isEmpty());
init(1, false);
assertEquals(1, Logger.TARGETS.size());
assertTrue(Logger.TARGETS.get(0) instanceof FileTarget);
init(1, true);
assertEquals(2, Logger.TARGETS.size());
assertTrue(Logger.TARGETS.get(0) instanceof FileTarget);
assertTrue(Logger.TARGETS.get(1) instanceof ConsoleTarget);
Logger logger = init(1, false);
assertEquals("[LoggerTest]", logger.prefix);
}
@Test
public void testTrace() {
Logger logger = Mockito.spy(init(6, false));
logger.trace("test");
Mockito.verify(logger).trace("test");
Mockito.verify(logger).log(6, "test");
Mockito.verifyNoMoreInteractions(logger);
logger = Mockito.spy(init(5, false));
logger.trace("test");
Mockito.verify(logger).trace("test");
Mockito.verifyNoMoreInteractions(logger);
}
@Test
public void testDebug() {
Logger logger = Mockito.spy(init(5, false));
logger.debug("test");
Mockito.verify(logger).debug("test");
Mockito.verify(logger).log(5, "test");
Mockito.verifyNoMoreInteractions(logger);
logger = Mockito.spy(init(4, false));
logger.debug("test");
Mockito.verify(logger).debug("test");
Mockito.verifyNoMoreInteractions(logger);
}
@Test
public void testInfo() {
Logger logger = Mockito.spy(init(4, false));
logger.info("test");
Mockito.verify(logger).info("test");
Mockito.verify(logger).log(4, "test");
Mockito.verifyNoMoreInteractions(logger);
logger = Mockito.spy(init(3, false));
logger.info("test");
Mockito.verify(logger).info("test");
Mockito.verifyNoMoreInteractions(logger);
}
@Test
public void testWarn() {
Logger logger = Mockito.spy(init(3, false));
logger.warn("test");
Mockito.verify(logger).warn("test");
Mockito.verify(logger).log(3, "test");
Mockito.verifyNoMoreInteractions(logger);
logger = Mockito.spy(init(2, false));
logger.warn("test");
Mockito.verify(logger).warn("test");
Mockito.verifyNoMoreInteractions(logger);
}
@Test
public void testError() {
Logger logger = Mockito.spy(init(2, false));
logger.error("test");
Mockito.verify(logger).error("test");
Mockito.verify(logger).logE(2, "test");
Mockito.verify(logger).log(2, "test");
Mockito.verifyNoMoreInteractions(logger);
logger = Mockito.spy(init(1, false));
logger.error("test");
Mockito.verify(logger).error("test");
Mockito.verifyNoMoreInteractions(logger);
}
@Test
public void testFatal() {
Logger logger = Mockito.spy(init(1, false));
logger.fatal("test");
Mockito.verify(logger).fatal("test");
Mockito.verify(logger).logE(1, "test");
Mockito.verify(logger).log(1, "test");
Mockito.verifyNoMoreInteractions(logger);
logger = Mockito.spy(init(0, false));
logger.fatal("test");
Mockito.verify(logger).fatal("test");
Mockito.verifyNoMoreInteractions(logger);
}
@Test
public void testException() throws IOException {
Logger logger = init(6, false);
Logger.TARGETS.clear();
LogTarget target = Mockito.mock(LogTarget.class);
Logger.TARGETS.add(target);
Exception e = new RuntimeException();
logger.error("test", e);
Mockito.verify(target).printTrace(e, 2);
Mockito.verify(target, Mockito.times(2)).print(Mockito.anyString(), Mockito.eq(2));
Mockito.verify(target, Mockito.times(2)).flush();
Mockito.verifyNoMoreInteractions(target);
}
@Test
public void testReplacement() throws IOException {
Logger logger = init(6, false);
Logger.TARGETS.clear();
LogTarget target = Mockito.mock(LogTarget.class);
Logger.TARGETS.add(target);
String arg = "123";
logger.debug("test {}", arg);
Mockito.verify(target).print(Mockito.matches("\\[.*] \\[DEBUG] \\[LoggerTest] test 123\n"),
Mockito.eq(5));
arg = null;
logger.debug("test {}", arg);
Mockito.verify(target).print(Mockito.matches("\\[.*] \\[DEBUG] \\[LoggerTest] test null\n"),
Mockito.eq(5));
}
@Test
public void testWrongArgumentCount() {
Logger logger = init(6, false);
try {
logger.debug("test {}", "123", "456");
fail("Should notice too less {}");
} catch (IllegalArgumentException e) {
}
try {
logger.debug("test {} {}");
fail("Should notice missing arguments");
} catch (IllegalArgumentException e) {
}
}
@Test
public void testTargetError() throws IOException {
Logger logger = init(6, false);
Logger.TARGETS.clear();
LogTarget target = Mockito.mock(LogTarget.class);
Mockito.doThrow(new IOException("error")).when(target).print(Mockito.anyString(), Mockito.anyInt());
Logger.TARGETS.add(target);
Console console = new Console();
console.startCapture();
logger.debug("test");
logger.error("test", new RuntimeException());
assertEquals("", console.getOut());
String pattern = "Error in logger: error\njava.io.IOException: error\n.*";
pattern = pattern + pattern + pattern;
assertThat(console.getErr(), MatchesPattern.matchesPattern(Pattern.compile(pattern, Pattern.DOTALL)));
console.reset();
}
}
|
|
/**
*/
package isostdisots_29002_10ed_1techxmlschemavalueSimplified.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import isostdisots_29002_10ed_1techxmlschemavalueSimplified.Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage;
import isostdisots_29002_10ed_1techxmlschemavalueSimplified.RationalValue;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Rational Value</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link isostdisots_29002_10ed_1techxmlschemavalueSimplified.impl.RationalValueImpl#getWholePart <em>Whole Part</em>}</li>
* <li>{@link isostdisots_29002_10ed_1techxmlschemavalueSimplified.impl.RationalValueImpl#getNumerator <em>Numerator</em>}</li>
* <li>{@link isostdisots_29002_10ed_1techxmlschemavalueSimplified.impl.RationalValueImpl#getDenominator <em>Denominator</em>}</li>
* </ul>
*
* @generated
*/
public class RationalValueImpl extends MinimalEObjectImpl.Container implements RationalValue {
/**
* The default value of the '{@link #getWholePart() <em>Whole Part</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getWholePart()
* @generated
* @ordered
*/
protected static final int WHOLE_PART_EDEFAULT = 0;
/**
* The cached value of the '{@link #getWholePart() <em>Whole Part</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getWholePart()
* @generated
* @ordered
*/
protected int wholePart = WHOLE_PART_EDEFAULT;
/**
* The default value of the '{@link #getNumerator() <em>Numerator</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getNumerator()
* @generated
* @ordered
*/
protected static final int NUMERATOR_EDEFAULT = 0;
/**
* The cached value of the '{@link #getNumerator() <em>Numerator</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getNumerator()
* @generated
* @ordered
*/
protected int numerator = NUMERATOR_EDEFAULT;
/**
* The default value of the '{@link #getDenominator() <em>Denominator</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDenominator()
* @generated
* @ordered
*/
protected static final int DENOMINATOR_EDEFAULT = 0;
/**
* The cached value of the '{@link #getDenominator() <em>Denominator</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDenominator()
* @generated
* @ordered
*/
protected int denominator = DENOMINATOR_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected RationalValueImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.Literals.RATIONAL_VALUE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getWholePart() {
return wholePart;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setWholePart(int newWholePart) {
int oldWholePart = wholePart;
wholePart = newWholePart;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__WHOLE_PART, oldWholePart, wholePart));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getNumerator() {
return numerator;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setNumerator(int newNumerator) {
int oldNumerator = numerator;
numerator = newNumerator;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__NUMERATOR, oldNumerator, numerator));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getDenominator() {
return denominator;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setDenominator(int newDenominator) {
int oldDenominator = denominator;
denominator = newDenominator;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__DENOMINATOR, oldDenominator, denominator));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__WHOLE_PART:
return getWholePart();
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__NUMERATOR:
return getNumerator();
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__DENOMINATOR:
return getDenominator();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__WHOLE_PART:
setWholePart((Integer)newValue);
return;
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__NUMERATOR:
setNumerator((Integer)newValue);
return;
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__DENOMINATOR:
setDenominator((Integer)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__WHOLE_PART:
setWholePart(WHOLE_PART_EDEFAULT);
return;
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__NUMERATOR:
setNumerator(NUMERATOR_EDEFAULT);
return;
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__DENOMINATOR:
setDenominator(DENOMINATOR_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__WHOLE_PART:
return wholePart != WHOLE_PART_EDEFAULT;
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__NUMERATOR:
return numerator != NUMERATOR_EDEFAULT;
case Isostdisots_29002_10ed_1techxmlschemavalueSimplifiedPackage.RATIONAL_VALUE__DENOMINATOR:
return denominator != DENOMINATOR_EDEFAULT;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (wholePart: ");
result.append(wholePart);
result.append(", numerator: ");
result.append(numerator);
result.append(", denominator: ");
result.append(denominator);
result.append(')');
return result.toString();
}
} //RationalValueImpl
|
|
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.metadata.security;
import java.util.HashSet;
import java.util.Set;
import com.orientechnologies.orient.core.annotation.OAfterDeserialization;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.exception.OSecurityAccessException;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.security.OSecurityManager;
import com.orientechnologies.orient.core.type.ODocumentWrapper;
/**
* Contains the user settings about security and permissions. Each user has one or more roles associated. Roles contains the
* permission rules that define what the user can access and what he can't.
*
* @author Luca Garulli
*
* @see ORole
*/
public class OUser extends ODocumentWrapper {
public static final String ADMIN = "admin";
public enum STATUSES {
SUSPENDED, ACTIVE
}
// AVOID THE INVOCATION OF SETTER
protected Set<ORole> roles = new HashSet<ORole>();
/**
* Constructor used in unmarshalling.
*/
public OUser() {
}
public OUser(final ODatabaseRecord iDatabase, final String iName) {
super(iDatabase, "OUser");
document.field("name", iName);
setAccountStatus(STATUSES.ACTIVE);
}
public OUser(ODatabaseRecord iDatabase, String iUserName, final String iUserPassword) {
super(iDatabase, "OUser");
document.field("name", iUserName);
setPassword(iUserPassword);
setAccountStatus(STATUSES.ACTIVE);
}
/**
* Create the user by reading the source document.
*/
public OUser(final ODocument iSource) {
fromStream(iSource);
}
@Override
@OAfterDeserialization
public void fromStream(final ODocument iSource) {
if (document != null)
return;
document = iSource;
roles = new HashSet<ORole>();
final Set<ODocument> loadedRoles = iSource.field("roles");
if (loadedRoles != null)
for (ODocument d : loadedRoles) {
roles.add(document.getDatabase().getMetadata().getSecurity().getRole((String) d.field("name")));
}
}
/**
* Checks if the user has the permission to access to the requested resource for the requested operation.
*
* @param iResource
* Requested resource
* @param iOperation
* Requested operation
* @return The role that has granted the permission if any, otherwise a OSecurityAccessException exception is raised
* @exception OSecurityAccessException
*/
public ORole allow(final String iResource, final int iOperation) {
if (roles == null || roles.isEmpty())
throw new OSecurityAccessException(document.getDatabase().getName(), "User '" + document.field("name")
+ "' has no role defined");
final ORole role = checkIfAllowed(iResource, iOperation);
if (role == null)
throw new OSecurityAccessException(document.getDatabase().getName(), "User '" + document.field("name")
+ "' has no the permission to execute the operation '" + ORole.permissionToString(iOperation)
+ "' against the resource: " + iResource);
return role;
}
/**
* Checks if the user has the permission to access to the requested resource for the requested operation.
*
* @param iResource
* Requested resource
* @param iOperation
* Requested operation
* @return The role that has granted the permission if any, otherwise null
*/
public ORole checkIfAllowed(final String iResource, final int iOperation) {
for (ORole r : roles)
if (r.allow(iResource, iOperation))
return r;
return null;
}
/**
* Checks if a rule was defined for the user.
*
* @param iResource
* Requested resource
* @return True is a rule is defined, otherwise false
*/
public boolean isRuleDefined(final String iResource) {
for (ORole r : roles)
if (r.hasRule(iResource))
return true;
return false;
}
public boolean checkPassword(final String iPassword) {
return OSecurityManager.instance().check(iPassword, (String) document.field("password"));
}
public String getName() {
return document.field("name");
}
public String getPassword() {
return document.field("password");
}
public OUser setPassword(final String iPassword) {
document.field("password", iPassword);
return this;
}
public static final String encryptPassword(final String iPassword) {
return OSecurityManager.instance().digest2String(iPassword, true);
}
public STATUSES getAccountStatus() {
return STATUSES.valueOf((String) document.field("status"));
}
public void setAccountStatus(STATUSES accountStatus) {
document.field("status", accountStatus);
}
public Set<ORole> getRoles() {
return roles;
}
public OUser addRole(final String iRole) {
if (iRole != null)
addRole(document.getDatabase().getMetadata().getSecurity().getRole(iRole));
return this;
}
public OUser addRole(final ORole iRole) {
if (iRole != null)
roles.add(iRole);
final HashSet<ODocument> persistentRoles = new HashSet<ODocument>();
for (ORole r : roles) {
persistentRoles.add(r.toStream());
}
document.field("roles", persistentRoles);
return this;
}
@Override
@SuppressWarnings("unchecked")
public OUser save() {
document.save(OUser.class.getSimpleName());
return this;
}
@Override
public String toString() {
return getName();
}
}
|
|
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.input;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.InputAdapter;
import com.badlogic.gdx.InputProcessor;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.utils.TimeUtils;
import com.badlogic.gdx.utils.Timer;
import com.badlogic.gdx.utils.Timer.Task;
/** {@link InputProcessor} implementation that detects gestures (tap, long press, fling, pan, zoom, pinch) and hands them to a
* {@link GestureListener}.
* @author mzechner */
public class GestureDetector extends InputAdapter {
final GestureListener listener;
private float tapSquareSize;
private long tapCountInterval;
private float longPressSeconds;
private long maxFlingDelay;
private boolean inTapSquare;
private int tapCount;
private long lastTapTime;
private float lastTapX, lastTapY;
private int lastTapButton, lastTapPointer;
boolean longPressFired;
private boolean pinching;
private boolean panning;
private final VelocityTracker tracker = new VelocityTracker();
private float tapSquareCenterX, tapSquareCenterY;
private long gestureStartTime;
Vector2 pointer1 = new Vector2();
private final Vector2 pointer2 = new Vector2();
private final Vector2 initialPointer1 = new Vector2();
private final Vector2 initialPointer2 = new Vector2();
private final Task longPressTask = new Task() {
@Override
public void run () {
longPressFired = listener.longPress(pointer1.x, pointer1.y);
}
};
/** Creates a new GestureDetector with default values: halfTapSquareSize=20, tapCountInterval=0.4f, longPressDuration=1.1f,
* maxFlingDelay=0.15f. */
public GestureDetector (GestureListener listener) {
this(20, 0.4f, 1.1f, 0.15f, listener);
}
/** @param halfTapSquareSize half width in pixels of the square around an initial touch event, see
* {@link GestureListener#tap(float, float, int, int)}.
* @param tapCountInterval time in seconds that must pass for two touch down/up sequences to be detected as consecutive taps.
* @param longPressDuration time in seconds that must pass for the detector to fire a
* {@link GestureListener#longPress(float, float)} event.
* @param maxFlingDelay time in seconds the finger must have been dragged for a fling event to be fired, see
* {@link GestureListener#fling(float, float, int)}
* @param listener May be null if the listener will be set later. */
public GestureDetector (float halfTapSquareSize, float tapCountInterval, float longPressDuration, float maxFlingDelay,
GestureListener listener) {
this.tapSquareSize = halfTapSquareSize;
this.tapCountInterval = (long)(tapCountInterval * 1000000000l);
this.longPressSeconds = longPressDuration;
this.maxFlingDelay = (long)(maxFlingDelay * 1000000000l);
this.listener = listener;
}
@Override
public boolean touchDown (int x, int y, int pointer, int button) {
return touchDown((float)x, (float)y, pointer, button);
}
public boolean touchDown (float x, float y, int pointer, int button) {
if (pointer > 1) return false;
if (pointer == 0) {
pointer1.set(x, y);
gestureStartTime = Gdx.input.getCurrentEventTime();
tracker.start(x, y, gestureStartTime);
if (Gdx.input.isTouched(1)) {
// Start pinch.
inTapSquare = false;
pinching = true;
initialPointer1.set(pointer1);
initialPointer2.set(pointer2);
longPressTask.cancel();
} else {
// Normal touch down.
inTapSquare = true;
pinching = false;
longPressFired = false;
tapSquareCenterX = x;
tapSquareCenterY = y;
if (!longPressTask.isScheduled()) Timer.schedule(longPressTask, longPressSeconds);
}
} else {
// Start pinch.
pointer2.set(x, y);
inTapSquare = false;
pinching = true;
initialPointer1.set(pointer1);
initialPointer2.set(pointer2);
longPressTask.cancel();
}
return listener.touchDown(x, y, pointer, button);
}
@Override
public boolean touchDragged (int x, int y, int pointer) {
return touchDragged((float)x, (float)y, pointer);
}
public boolean touchDragged (float x, float y, int pointer) {
if (pointer > 1) return false;
if (longPressFired) return false;
if (pointer == 0)
pointer1.set(x, y);
else
pointer2.set(x, y);
// handle pinch zoom
if (pinching) {
if (listener != null) {
boolean result = listener.pinch(initialPointer1, initialPointer2, pointer1, pointer2);
return listener.zoom(initialPointer1.dst(initialPointer2), pointer1.dst(pointer2)) || result;
}
return false;
}
// update tracker
tracker.update(x, y, Gdx.input.getCurrentEventTime());
// check if we are still tapping.
if (inTapSquare && !isWithinTapSquare(x, y, tapSquareCenterX, tapSquareCenterY)) {
longPressTask.cancel();
inTapSquare = false;
}
// if we have left the tap square, we are panning
if (!inTapSquare) {
panning = true;
return listener.pan(x, y, tracker.deltaX, tracker.deltaY);
}
return false;
}
@Override
public boolean touchUp (int x, int y, int pointer, int button) {
return touchUp((float)x, (float)y, pointer, button);
}
public boolean touchUp (float x, float y, int pointer, int button) {
if (pointer > 1) return false;
// check if we are still tapping.
if (inTapSquare && !isWithinTapSquare(x, y, tapSquareCenterX, tapSquareCenterY)) inTapSquare = false;
longPressTask.cancel();
panning = false;
if (longPressFired) return false;
if (inTapSquare) {
// handle taps
if (lastTapButton != button || lastTapPointer != pointer || TimeUtils.nanoTime() - lastTapTime > tapCountInterval
|| !isWithinTapSquare(x, y, lastTapX, lastTapY)) tapCount = 0;
tapCount++;
lastTapTime = TimeUtils.nanoTime();
lastTapX = x;
lastTapY = y;
lastTapButton = button;
lastTapPointer = pointer;
gestureStartTime = 0;
return listener.tap(x, y, tapCount, button);
} else if (pinching) {
// handle pinch end
pinching = false;
panning = true;
// we are in pan mode again, reset velocity tracker
if (pointer == 0) {
// first pointer has lifted off, set up panning to use the second pointer...
tracker.start(pointer2.x, pointer2.y, Gdx.input.getCurrentEventTime());
} else {
// second pointer has lifted off, set up panning to use the first pointer...
tracker.start(pointer1.x, pointer1.y, Gdx.input.getCurrentEventTime());
}
} else {
gestureStartTime = 0;
// handle fling
long time = Gdx.input.getCurrentEventTime();
if (time - tracker.lastTime < maxFlingDelay) {
tracker.update(x, y, time);
return listener.fling(tracker.getVelocityX(), tracker.getVelocityY(), button);
}
}
return false;
}
/** @return whether the user touched the screen long enough to trigger a long press event. */
public boolean isLongPressed () {
return isLongPressed(longPressSeconds);
}
/** @param duration
* @return whether the user touched the screen for as much or more than the given duration. */
public boolean isLongPressed (float duration) {
if (gestureStartTime == 0) return false;
return TimeUtils.nanoTime() - gestureStartTime > (long)(duration * 1000000000l);
}
public boolean isPanning () {
return panning;
}
public void reset () {
gestureStartTime = 0;
panning = false;
inTapSquare = false;
}
private boolean isWithinTapSquare (float x, float y, float centerX, float centerY) {
return Math.abs(x - centerX) < tapSquareSize && Math.abs(y - centerY) < tapSquareSize;
}
/** The tap square will not longer be used for the current touch. */
public void invalidateTapSquare () {
inTapSquare = false;
}
public void setTapSquareSize (int tapSquareSize) {
this.tapSquareSize = tapSquareSize;
}
/** @param tapCountInterval time in seconds that must pass for two touch down/up sequences to be detected as consecutive taps. */
public void setTapCountInterval (float tapCountInterval) {
this.tapCountInterval = (long)(tapCountInterval * 1000000000l);
}
public void setLongPressSeconds (float longPressSeconds) {
this.longPressSeconds = longPressSeconds;
}
public void setMaxFlingDelay (long maxFlingDelay) {
this.maxFlingDelay = maxFlingDelay;
}
/** Register an instance of this class with a {@link GestureDetector} to receive gestures such as taps, long presses, flings,
* panning or pinch zooming. Each method returns a boolean indicating if the event should be handed to the next listener (false
* to hand it to the next listener, true otherwise).
* @author mzechner */
public static interface GestureListener {
/** Called when a finger went down on the screen or a mouse button was pressed. */
public boolean touchDown (float x, float y, int pointer, int button);
/** Called when a tap occured. A tap happens if a touch went down on the screen and was lifted again without moving outside
* of the tap square. The tap square is a rectangular area around the initial touch position as specified on construction
* time of the {@link GestureDetector}.
* @param count the number of taps. */
public boolean tap (float x, float y, int count, int button);
public boolean longPress (float x, float y);
/** Called when the user dragged a finger over the screen and lifted it. Reports the last known velocity of the finger in
* pixels per second.
* @param velocityX velocity on x in seconds
* @param velocityY velocity on y in seconds */
public boolean fling (float velocityX, float velocityY, int button);
/** Called when the user drags a finger over the screen.
* @param x
* @param y
* @param deltaX the difference in pixels to the last drag event on x.
* @param deltaY the difference in pixels to the last drag event on y. */
public boolean pan (float x, float y, float deltaX, float deltaY);
/** Called when the user performs a pinch zoom gesture. The original distance is the distance in pixels when the gesture
* started.
* @param initialDistance distance between fingers when the gesture started.
* @param distance current distance between fingers. */
public boolean zoom (float initialDistance, float distance);
/** Called when a user performs a pinch zoom gesture. Reports the initial positions of the two involved fingers and their
* current positions.
* @param initialPointer1
* @param initialPointer2
* @param pointer1
* @param pointer2 */
public boolean pinch (Vector2 initialPointer1, Vector2 initialPointer2, Vector2 pointer1, Vector2 pointer2);
}
/** Derrive from this if you only want to implement a subset of {@link GestureListener}.
* @author mzechner */
public static class GestureAdapter implements GestureListener {
@Override
public boolean touchDown (float x, float y, int pointer, int button) {
return false;
}
@Override
public boolean tap (float x, float y, int count, int button) {
return false;
}
@Override
public boolean longPress (float x, float y) {
return false;
}
@Override
public boolean fling (float velocityX, float velocityY, int button) {
return false;
}
@Override
public boolean pan (float x, float y, float deltaX, float deltaY) {
return false;
}
@Override
public boolean zoom (float initialDistance, float distance) {
return false;
}
@Override
public boolean pinch (Vector2 initialPointer1, Vector2 initialPointer2, Vector2 pointer1, Vector2 pointer2) {
return false;
}
}
static class VelocityTracker {
int sampleSize = 10;
float lastX, lastY;
float deltaX, deltaY;
long lastTime;
int numSamples;
float[] meanX = new float[sampleSize];
float[] meanY = new float[sampleSize];
long[] meanTime = new long[sampleSize];
public void start (float x, float y, long timeStamp) {
lastX = x;
lastY = y;
deltaX = 0;
deltaY = 0;
numSamples = 0;
for (int i = 0; i < sampleSize; i++) {
meanX[i] = 0;
meanY[i] = 0;
meanTime[i] = 0;
}
lastTime = timeStamp;
}
public void update (float x, float y, long timeStamp) {
long currTime = timeStamp;
deltaX = x - lastX;
deltaY = y - lastY;
lastX = x;
lastY = y;
long deltaTime = currTime - lastTime;
lastTime = currTime;
int index = numSamples % sampleSize;
meanX[index] = deltaX;
meanY[index] = deltaY;
meanTime[index] = deltaTime;
numSamples++;
}
public float getVelocityX () {
float meanX = getAverage(this.meanX, numSamples);
float meanTime = getAverage(this.meanTime, numSamples) / 1000000000.0f;
if (meanTime == 0) return 0;
return meanX / meanTime;
}
public float getVelocityY () {
float meanY = getAverage(this.meanY, numSamples);
float meanTime = getAverage(this.meanTime, numSamples) / 1000000000.0f;
if (meanTime == 0) return 0;
return meanY / meanTime;
}
private float getAverage (float[] values, int numSamples) {
numSamples = Math.min(sampleSize, numSamples);
float sum = 0;
for (int i = 0; i < numSamples; i++) {
sum += values[i];
}
return sum / numSamples;
}
private long getAverage (long[] values, int numSamples) {
numSamples = Math.min(sampleSize, numSamples);
long sum = 0;
for (int i = 0; i < numSamples; i++) {
sum += values[i];
}
if (numSamples == 0) return 0;
return sum / numSamples;
}
private float getSum (float[] values, int numSamples) {
numSamples = Math.min(sampleSize, numSamples);
float sum = 0;
for (int i = 0; i < numSamples; i++) {
sum += values[i];
}
if (numSamples == 0) return 0;
return sum;
}
}
}
|
|
package com.endava.issuetracker.web;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import com.endava.issuetracker.domain.Category;
import com.endava.issuetracker.domain.Issue;
import com.endava.issuetracker.domain.Priority;
import com.endava.issuetracker.domain.Project;
import com.endava.issuetracker.domain.Resolution;
import com.endava.issuetracker.domain.Severity;
import com.endava.issuetracker.domain.State;
import com.endava.issuetracker.domain.User;
import com.endava.issuetracker.domain.Version;
import com.endava.issuetracker.service.IssueService;
import com.endava.issuetracker.service.ProjectService;
import com.endava.issuetracker.service.UserService;
import com.endava.issuetracker.service.VersionService;
import com.google.gson.Gson;
@Controller
public class IssueController {
private static final Logger logger = LoggerFactory.getLogger(ProjectController.class);
@Autowired
private ProjectService projectService;
@Autowired
private IssueService issueService;
@Autowired
private UserService userService;
@Autowired
private VersionService versionService;
@RequestMapping(value = "/user/issues", method = RequestMethod.GET)
public String displayIssues(Model model) {
Iterable<Issue> issuesList = issueService.findAllIssues();
model.addAttribute("issuesList", issuesList);
return "user/issues";
}
@RequestMapping(value = "/issues/viewAssigned", method = RequestMethod.GET)
public String displayAssignedIssues(Model model) {
User user = userService.getLoggedInUser();
Iterable<Issue> issuesList = issueService.findAssignedIssues(user.getId().toString());
model.addAttribute("issuesList", issuesList);
return "user/issues";
}
@RequestMapping(value = "/issues/viewReported", method = RequestMethod.GET)
public String displayReportedIssues(Model model) {
User user = userService.getLoggedInUser();
Iterable<Issue> issuesList = issueService.findReportedIssues(user.getId().toString());
model.addAttribute("issuesList", issuesList);
return "user/issues";
}
/* @ModelAttribute("categoryList")
public List<Category> populateCategory()
{
return Arrays.asList(Category.values());
} */
@RequestMapping(value = "/user/createIssue", method = RequestMethod.GET)
public String addIssue(Model model) {
Issue newIssue = new Issue();
model.addAttribute("issue", newIssue);
model.addAttribute("categoryList", Category.values());
model.addAttribute("severityList", Severity.values());
model.addAttribute("priorityList", Priority.values());
Iterable<Project> projectsList = projectService.findProjects();
model.addAttribute("projectsList", projectsList);
return "user/createIssue";
}
@RequestMapping(value = "/user/createIssue", method = RequestMethod.POST)
public String addIssue(@ModelAttribute Issue issue, @RequestParam Map<String, String> params) {
if(params != null && params.containsKey("save")) {
Project selectedProject = null;
if(params.get("project.id") != null) {
selectedProject = projectService.findProject(Long.valueOf(params.get("project.id")));
issue.setProject(selectedProject);
issue.setAssigned(selectedProject.getOwner());
}
String versionName = params.get("openOnVersion.name");
if(versionName != null && !versionName.equals("--Please Select a project")) {
Version v = versionService.findVersion(versionName,selectedProject.getId().toString());
issue.setOpenOnVersion(v);
}
User reporter = userService.getLoggedInUser();
issue.setCreatedBy(reporter);
issue.setModifiedBy(reporter);
issue.setState(State.OPEN);
long timeNow = Calendar.getInstance().getTimeInMillis();
Timestamp ts = new java.sql.Timestamp(timeNow);
issue.setCreated(ts);
issue.setModified(ts);
issueService.saveIssue(issue);
}
return "redirect:/user/issues";
}
@RequestMapping(value ="/issues/{issueId}/editIssue", method = RequestMethod.GET)
public String editIssue(@PathVariable("issueId") long issueId, Model model) {
Issue editedIssue = issueService.findIssue(issueId);
model.addAttribute("issue", editedIssue);
Iterable<Project> projectsList = projectService.findProjects();
Iterable<Version> versionsList = versionService.findAllVersions();
String currentVersion= editedIssue.getOpenOnVersion().getName();
model.addAttribute("currentVersion", currentVersion);
model.addAttribute("projectsList", projectsList);
model.addAttribute("versionsList", versionsList);
model.addAttribute("stateList", State.values());
model.addAttribute("priorityList", Priority.values());
model.addAttribute("severityList", Severity.values());
model.addAttribute("resolutionList", Resolution.values());
return "user/editIssue";
}
@RequestMapping(value ="/issues/{issueId}/editIssue", method = RequestMethod.POST)
public String editIssue(@ModelAttribute Issue issue, @RequestParam Map<String, String> params) {
if(params != null && params.containsKey("save")) {
Issue oldIssue = issueService.findIssue(issue.getId());
issue.setAssigned(oldIssue.getAssigned());
issue.setCreated(oldIssue.getCreated());
issue.setCreatedBy(oldIssue.getCreatedBy());
//issue.setState(oldIssue.getState());
if(params.get("project.id") != null) {
Project selectedProject = projectService.findProject(Long.valueOf(params.get("project.id")));
issue.setProject(selectedProject);
}
long timeNow = Calendar.getInstance().getTimeInMillis();
Timestamp ts = new java.sql.Timestamp(timeNow);
issue.setModified(ts);
String versionName = params.get("openOnVersion.name");
if(versionName != "" && versionName != null && !versionName.equals("--Please Select a project")) {
Version v = versionService.findVersion(versionName, params.get("project.id"));
issue.setOpenOnVersion(v);
}
else {
issue.setOpenOnVersion(oldIssue.getOpenOnVersion());
}
/* String resolution = params.get("resolution");
if(resolution != "" && resolution != null && !resolution.equals("--Please Select")) {
issue.setResolution(Resolution.valueOf(resolution));
}
else {
issue.setResolution(oldIssue.getResolution());
}*/
String currentState = issue.getState().getName();
String fixedVersionName = params.get("fixedOnVersion.name");
if(fixedVersionName != "" && fixedVersionName != null && !currentState.equals("open") && !currentState.equals("in progress")) {
Version v = versionService.findVersion(fixedVersionName, params.get("project.id"));
issue.setFixedOnVersion(v);
}
else {
issue.setFixedOnVersion(null);
issue.setResolution(null);
}
User currentUser = userService.getLoggedInUser();
issue.setModifiedBy(currentUser);
issueService.saveIssue(issue);
}
return "redirect:/user/issues";
}
@RequestMapping(value ="/versions", method = RequestMethod.GET)
public @ResponseBody
String versionsForProject(@RequestParam(value="projectName", required= true) String project) {
long projectId = Long.valueOf(project);
Project projectObject = projectService.findProject(projectId);
Set<Version> versions = projectObject.getVersions();
Set<String> versionNames = new HashSet<String>();
for(Version v:versions) {
versionNames.add(v.getName());
}
String json = new Gson().toJson(versionNames);
return json;
}
@RequestMapping(value ="/issues/{issueId}", method = RequestMethod.GET)
public ModelAndView showIssue(@PathVariable("issueId") long issueId) {
Issue issue = this.issueService.findIssue(issueId);
ModelAndView mav = new ModelAndView("user/issueDetails");
mav.addObject(issue);
return mav;
}
@RequestMapping(value ="/issues/{issueId2}/deleteIssue", method = RequestMethod.GET)
public String deleteIssue(@PathVariable("issueId2") long issueId, Model model) {
issueService.deleteIssue(issueId);
return "forward:/user/issues";
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.runtime.io;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.runtime.checkpoint.channel.InputChannelInfo;
import org.apache.flink.runtime.event.AbstractEvent;
import org.apache.flink.runtime.io.PullingAsyncDataInput;
import org.apache.flink.runtime.io.network.api.CancelCheckpointMarker;
import org.apache.flink.runtime.io.network.api.CheckpointBarrier;
import org.apache.flink.runtime.io.network.api.EndOfPartitionEvent;
import org.apache.flink.runtime.io.network.api.EventAnnouncement;
import org.apache.flink.runtime.io.network.partition.consumer.BufferOrEvent;
import org.apache.flink.runtime.io.network.partition.consumer.EndOfChannelStateEvent;
import org.apache.flink.runtime.io.network.partition.consumer.InputChannel;
import org.apache.flink.runtime.io.network.partition.consumer.InputGate;
import org.apache.flink.streaming.api.operators.MailboxExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.RejectedExecutionException;
import static org.apache.flink.runtime.concurrent.FutureUtils.assertNoException;
import static org.apache.flink.util.Preconditions.checkState;
/**
* The {@link CheckpointedInputGate} uses {@link CheckpointBarrierHandler} to handle incoming
* {@link CheckpointBarrier} from the {@link InputGate}.
*/
@Internal
public class CheckpointedInputGate implements PullingAsyncDataInput<BufferOrEvent>, Closeable {
private static final Logger LOG = LoggerFactory.getLogger(CheckpointedInputGate.class);
private final CheckpointBarrierHandler barrierHandler;
private final UpstreamRecoveryTracker upstreamRecoveryTracker;
/** The gate that the buffer draws its input from. */
private final InputGate inputGate;
private final MailboxExecutor mailboxExecutor;
/** Indicate end of the input. */
private boolean isFinished;
/**
* Creates a new checkpoint stream aligner.
*
* <p>The aligner will allow only alignments that buffer up to the given number of bytes.
* When that number is exceeded, it will stop the alignment and notify the task that the
* checkpoint has been cancelled.
*
* @param inputGate The input gate to draw the buffers and events from.
* @param barrierHandler Handler that controls which channels are blocked.
*/
public CheckpointedInputGate(
InputGate inputGate,
CheckpointBarrierHandler barrierHandler,
MailboxExecutor mailboxExecutor) {
this(
inputGate,
barrierHandler,
mailboxExecutor,
UpstreamRecoveryTracker.NO_OP
);
}
public CheckpointedInputGate(
InputGate inputGate,
CheckpointBarrierHandler barrierHandler,
MailboxExecutor mailboxExecutor,
UpstreamRecoveryTracker upstreamRecoveryTracker) {
this.inputGate = inputGate;
this.barrierHandler = barrierHandler;
this.mailboxExecutor = mailboxExecutor;
this.upstreamRecoveryTracker = upstreamRecoveryTracker;
waitForPriorityEvents(inputGate, mailboxExecutor);
}
/**
* Eagerly pulls and processes all priority events. Must be called from task thread.
*
* <p>Basic assumption is that no priority event needs to be handled by the {@link StreamTaskNetworkInput}.
*/
private void processPriorityEvents() throws IOException, InterruptedException {
// check if the priority event is still not processed (could have been pulled before mail was being executed)
boolean hasPriorityEvent = inputGate.getPriorityEventAvailableFuture().isDone();
while (hasPriorityEvent) {
// process as many priority events as possible
final Optional<BufferOrEvent> bufferOrEventOpt = pollNext();
checkState(bufferOrEventOpt.isPresent());
final BufferOrEvent bufferOrEvent = bufferOrEventOpt.get();
checkState(bufferOrEvent.hasPriority(), "Should only poll priority events");
hasPriorityEvent = bufferOrEvent.morePriorityEvents();
}
// re-enqueue mail to process future priority events
waitForPriorityEvents(inputGate, mailboxExecutor);
}
private void waitForPriorityEvents(InputGate inputGate, MailboxExecutor mailboxExecutor) {
final CompletableFuture<?> priorityEventAvailableFuture = inputGate.getPriorityEventAvailableFuture();
assertNoException(priorityEventAvailableFuture.thenRun(() -> {
try {
mailboxExecutor.execute(this::processPriorityEvents, "process priority event @ gate %s", inputGate);
} catch (RejectedExecutionException ex) {
LOG.debug("Ignored RejectedExecutionException in CheckpointedInputGate.waitForPriorityEvents");
}
}));
}
@Override
public CompletableFuture<?> getAvailableFuture() {
return inputGate.getAvailableFuture();
}
@Override
public Optional<BufferOrEvent> pollNext() throws IOException, InterruptedException {
Optional<BufferOrEvent> next = inputGate.pollNext();
if (!next.isPresent()) {
return handleEmptyBuffer();
}
BufferOrEvent bufferOrEvent = next.get();
if (bufferOrEvent.isEvent()) {
return handleEvent(bufferOrEvent);
}
else if (bufferOrEvent.isBuffer()) {
/**
* https://issues.apache.org/jira/browse/FLINK-19537
* This is not entirely true, as it's ignoring the buffer/bytes accumulated in the
* record deserializers. If buffer is processed here, it doesn't mean it was fully
* processed (so we can over estimate the amount of processed bytes). On the other hand
* some records/bytes might be processed without polling anything from this
* {@link CheckpointedInputGate} (underestimating the amount of processed bytes). All in all
* this should have been calculated on the {@link StreamTaskNetworkInput} level, where we
* have an access to the records deserializers. However the current is on average accurate
* and it might be just good enough (at least for the time being).
*/
barrierHandler.addProcessedBytes(bufferOrEvent.getBuffer().getSize());
}
return next;
}
private Optional<BufferOrEvent> handleEvent(BufferOrEvent bufferOrEvent) throws IOException, InterruptedException {
Class<? extends AbstractEvent> eventClass = bufferOrEvent.getEvent().getClass();
if (eventClass == CheckpointBarrier.class) {
CheckpointBarrier checkpointBarrier = (CheckpointBarrier) bufferOrEvent.getEvent();
barrierHandler.processBarrier(checkpointBarrier, bufferOrEvent.getChannelInfo());
}
else if (eventClass == CancelCheckpointMarker.class) {
barrierHandler.processCancellationBarrier((CancelCheckpointMarker) bufferOrEvent.getEvent());
}
else if (eventClass == EndOfPartitionEvent.class) {
barrierHandler.processEndOfPartition();
}
else if (eventClass == EventAnnouncement.class) {
EventAnnouncement eventAnnouncement = (EventAnnouncement) bufferOrEvent.getEvent();
AbstractEvent announcedEvent = eventAnnouncement.getAnnouncedEvent();
checkState(
announcedEvent instanceof CheckpointBarrier,
"Only CheckpointBarrier announcement are currently supported, but found [%s]",
announcedEvent);
CheckpointBarrier announcedBarrier = (CheckpointBarrier) announcedEvent;
barrierHandler.processBarrierAnnouncement(announcedBarrier, eventAnnouncement.getSequenceNumber(), bufferOrEvent.getChannelInfo());
}
else if (bufferOrEvent.getEvent().getClass() == EndOfChannelStateEvent.class) {
upstreamRecoveryTracker.handleEndOfRecovery(bufferOrEvent.getChannelInfo());
if (!upstreamRecoveryTracker.allChannelsRecovered()) {
return pollNext();
}
}
return Optional.of(bufferOrEvent);
}
public CompletableFuture<Void> getAllBarriersReceivedFuture(long checkpointId) {
return barrierHandler.getAllBarriersReceivedFuture(checkpointId);
}
private Optional<BufferOrEvent> handleEmptyBuffer() {
if (inputGate.isFinished()) {
isFinished = true;
}
return Optional.empty();
}
@Override
public boolean isFinished() {
return isFinished;
}
/**
* Cleans up all internally held resources.
*
* @throws IOException Thrown if the cleanup of I/O resources failed.
*/
public void close() throws IOException {
barrierHandler.close();
}
// ------------------------------------------------------------------------
// Properties
// ------------------------------------------------------------------------
/**
* Gets the ID defining the current pending, or just completed, checkpoint.
*
* @return The ID of the pending of completed checkpoint.
*/
@VisibleForTesting
long getLatestCheckpointId() {
return barrierHandler.getLatestCheckpointId();
}
/**
* Gets the time that the latest alignment took, in nanoseconds.
* If there is currently an alignment in progress, it will return the time spent in the
* current alignment so far.
*
* @return The duration in nanoseconds
*/
@VisibleForTesting
long getAlignmentDurationNanos() {
return barrierHandler.getAlignmentDurationNanos();
}
/**
* @return the time that elapsed, in nanoseconds, between the creation of the latest checkpoint
* and the time when it's first {@link CheckpointBarrier} was received by this {@link InputGate}.
*/
@VisibleForTesting
long getCheckpointStartDelayNanos() {
return barrierHandler.getCheckpointStartDelayNanos();
}
/**
* @return number of underlying input channels.
*/
public int getNumberOfInputChannels() {
return inputGate.getNumberOfInputChannels();
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
@Override
public String toString() {
return barrierHandler.toString();
}
public InputChannel getChannel(int channelIndex) {
return inputGate.getChannel(channelIndex);
}
public List<InputChannelInfo> getChannelInfos() {
return inputGate.getChannelInfos();
}
@VisibleForTesting
CheckpointBarrierHandler getCheckpointBarrierHandler() {
return barrierHandler;
}
}
|
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.Date;
import java.sql.JDBCType;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLSyntaxErrorException;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.LocalDateTime;
import java.util.Calendar;
import java.util.Locale;
import java.util.StringJoiner;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.startsWith;
public abstract class PreparedStatementTestCase extends JdbcIntegrationTestCase {
public void testSupportedTypes() throws SQLException {
String stringVal = randomAlphaOfLength(randomIntBetween(0, 1000));
int intVal = randomInt();
long longVal = randomLong();
double doubleVal = randomDouble();
float floatVal = randomFloat();
boolean booleanVal = randomBoolean();
byte byteVal = randomByte();
short shortVal = randomShort();
BigDecimal bigDecimalVal = BigDecimal.valueOf(randomDouble());
long millis = randomNonNegativeLong();
Calendar calendarVal = Calendar.getInstance(randomTimeZone(), Locale.ROOT);
Timestamp timestampVal = new Timestamp(millis);
Timestamp timestampValWithCal = new Timestamp(JdbcTestUtils.convertFromCalendarToUTC(timestampVal.getTime(), calendarVal));
Date dateVal = JdbcTestUtils.asDate(millis, JdbcTestUtils.UTC);
Date dateValWithCal = JdbcTestUtils.asDate(
JdbcTestUtils.convertFromCalendarToUTC(dateVal.getTime(), calendarVal),
JdbcTestUtils.UTC
);
Time timeVal = JdbcTestUtils.asTime(millis, JdbcTestUtils.UTC);
Time timeValWithCal = JdbcTestUtils.asTime(
JdbcTestUtils.convertFromCalendarToUTC(timeVal.getTime(), calendarVal),
JdbcTestUtils.UTC
);
java.util.Date utilDateVal = new java.util.Date(millis);
LocalDateTime localDateTimeVal = LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), JdbcTestUtils.UTC);
try (Connection connection = esJdbc()) {
StringJoiner sql = new StringJoiner(",", "SELECT ", "");
for (int i = 0; i < 19; i++) {
sql.add("?");
}
try (PreparedStatement statement = connection.prepareStatement(sql.toString())) {
statement.setString(1, stringVal);
statement.setInt(2, intVal);
statement.setLong(3, longVal);
statement.setFloat(4, floatVal);
statement.setDouble(5, doubleVal);
statement.setNull(6, JDBCType.DOUBLE.getVendorTypeNumber());
statement.setBoolean(7, booleanVal);
statement.setByte(8, byteVal);
statement.setShort(9, shortVal);
statement.setBigDecimal(10, bigDecimalVal);
statement.setTimestamp(11, timestampVal);
statement.setTimestamp(12, timestampVal, calendarVal);
statement.setDate(13, dateVal);
statement.setDate(14, dateVal, calendarVal);
statement.setTime(15, timeVal);
statement.setTime(16, timeVal, calendarVal);
statement.setObject(17, calendarVal);
statement.setObject(18, utilDateVal);
statement.setObject(19, localDateTimeVal);
try (ResultSet results = statement.executeQuery()) {
ResultSetMetaData resultSetMetaData = results.getMetaData();
ParameterMetaData parameterMetaData = statement.getParameterMetaData();
assertEquals(resultSetMetaData.getColumnCount(), parameterMetaData.getParameterCount());
for (int i = 1; i < resultSetMetaData.getColumnCount(); i++) {
// Makes sure that column types survived the round trip
assertEquals(parameterMetaData.getParameterType(i), resultSetMetaData.getColumnType(i));
}
assertTrue(results.next());
assertEquals(stringVal, results.getString(1));
assertEquals(intVal, results.getInt(2));
assertEquals(longVal, results.getLong(3));
assertEquals(floatVal, results.getFloat(4), 0.00001f);
assertEquals(doubleVal, results.getDouble(5), 0.00001f);
assertNull(results.getObject(6));
assertEquals(booleanVal, results.getBoolean(7));
assertEquals(byteVal, results.getByte(8));
assertEquals(shortVal, results.getShort(9));
assertEquals(bigDecimalVal, results.getBigDecimal(10));
assertEquals(timestampVal, results.getTimestamp(11));
assertEquals(timestampValWithCal, results.getTimestamp(12));
assertEquals(dateVal, results.getDate(13));
assertEquals(dateValWithCal, results.getDate(14));
assertEquals(timeVal, results.getTime(15));
assertEquals(timeValWithCal, results.getTime(16));
assertEquals(new Timestamp(calendarVal.getTimeInMillis()), results.getObject(17));
assertEquals(timestampVal, results.getObject(18));
assertEquals(timestampVal, results.getObject(19));
assertFalse(results.next());
}
}
}
}
public void testDatetime() throws IOException, SQLException {
long randomMillis = randomNonNegativeLong();
setupIndexForDateTimeTests(randomMillis);
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT id, birth_date FROM emps WHERE birth_date = ?")) {
Object dateTimeParam = randomFrom(new Timestamp(randomMillis), new Date(randomMillis));
statement.setObject(1, dateTimeParam);
try (ResultSet results = statement.executeQuery()) {
assertTrue(results.next());
assertEquals(1002, results.getInt(1));
assertEquals(new Timestamp(randomMillis), results.getTimestamp(2));
assertFalse(results.next());
}
}
}
}
public void testDate() throws IOException, SQLException {
long randomMillis = randomNonNegativeLong();
setupIndexForDateTimeTests(randomMillis);
try (Connection connection = esJdbc()) {
try (
PreparedStatement statement = connection.prepareStatement(
"SELECT id, birth_date FROM emps WHERE birth_date::date = ? " + "ORDER BY id"
)
) {
statement.setDate(1, new Date(JdbcTestUtils.asDate(randomMillis, JdbcTestUtils.UTC).getTime()));
try (ResultSet results = statement.executeQuery()) {
for (int i = 1; i <= 3; i++) {
assertTrue(results.next());
assertEquals(1000 + i, results.getInt(1));
assertEquals(new Timestamp(testMillis(randomMillis, i)), results.getTimestamp(2));
}
assertFalse(results.next());
}
}
}
}
public void testTime() throws IOException, SQLException {
long randomMillis = randomNonNegativeLong();
setupIndexForDateTimeTests(randomMillis);
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT id, birth_date FROM emps WHERE birth_date::time = ?")) {
Time time = JdbcTestUtils.asTime(randomMillis, JdbcTestUtils.UTC);
statement.setObject(1, time);
try (ResultSet results = statement.executeQuery()) {
assertTrue(results.next());
assertEquals(1002, results.getInt(1));
assertEquals(new Timestamp(randomMillis), results.getTimestamp(2));
assertFalse(results.next());
}
}
}
}
public void testOutOfRangeBigDecimal() throws SQLException {
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT ?")) {
BigDecimal tooLarge = BigDecimal.valueOf(Double.MAX_VALUE).add(BigDecimal.ONE);
SQLException ex = expectThrows(SQLException.class, () -> statement.setBigDecimal(1, tooLarge));
assertThat(ex.getMessage(), equalTo("BigDecimal value [" + tooLarge + "] out of supported double's range."));
}
}
}
public void testUnsupportedParameterUse() throws IOException, SQLException {
index("library", builder -> {
builder.field("name", "Don Quixote");
builder.field("page_count", 1072);
});
try (Connection connection = esJdbc()) {
// This is the current limitation of JDBC parser that it cannot detect improper use of '?'
try (PreparedStatement statement = connection.prepareStatement("SELECT name FROM ? WHERE page_count=?")) {
statement.setString(1, "library");
statement.setInt(2, 1072);
SQLSyntaxErrorException exception = expectThrows(SQLSyntaxErrorException.class, statement::executeQuery);
assertThat(exception.getMessage(), startsWith("line 1:18: mismatched input '?' expecting "));
}
}
}
public void testTooMayParameters() throws IOException, SQLException {
index("library", builder -> {
builder.field("name", "Don Quixote");
builder.field("page_count", 1072);
});
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT name FROM library WHERE page_count=?")) {
statement.setInt(1, 1072);
int tooBig = randomIntBetween(2, 10);
SQLException tooBigEx = expectThrows(SQLException.class, () -> statement.setInt(tooBig, 1072));
assertThat(tooBigEx.getMessage(), startsWith("Invalid parameter index ["));
int tooSmall = randomIntBetween(-10, 0);
SQLException tooSmallEx = expectThrows(SQLException.class, () -> statement.setInt(tooSmall, 1072));
assertThat(tooSmallEx.getMessage(), startsWith("Invalid parameter index ["));
}
}
}
public void testStringEscaping() throws SQLException {
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT ?, ?, ?, ?")) {
statement.setString(1, "foo --");
statement.setString(2, "/* foo */");
statement.setString(3, "\"foo");
statement.setString(4, "'foo'");
try (ResultSet results = statement.executeQuery()) {
ResultSetMetaData resultSetMetaData = results.getMetaData();
assertEquals(4, resultSetMetaData.getColumnCount());
for (int i = 1; i < resultSetMetaData.getColumnCount(); i++) {
assertEquals(JDBCType.VARCHAR.getVendorTypeNumber().intValue(), resultSetMetaData.getColumnType(i));
}
assertTrue(results.next());
assertEquals("foo --", results.getString(1));
assertEquals("/* foo */", results.getString(2));
assertEquals("\"foo", results.getString(3));
assertEquals("'foo'", results.getString(4));
assertFalse(results.next());
}
}
}
}
public void testCommentsHandling() throws SQLException {
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT ?, /* ?, */ ? -- ?")) {
assertEquals(2, statement.getParameterMetaData().getParameterCount());
statement.setString(1, "foo");
statement.setString(2, "bar");
try (ResultSet results = statement.executeQuery()) {
ResultSetMetaData resultSetMetaData = results.getMetaData();
assertEquals(2, resultSetMetaData.getColumnCount());
assertTrue(results.next());
assertEquals("foo", results.getString(1));
assertEquals("bar", results.getString(2));
assertFalse(results.next());
}
}
}
}
public void testSingleParameterMultipleTypes() throws SQLException {
String stringVal = randomAlphaOfLength(randomIntBetween(0, 1000));
int intVal = randomInt();
long longVal = randomLong();
double doubleVal = randomDouble();
float floatVal = randomFloat();
boolean booleanVal = randomBoolean();
byte byteVal = randomByte();
short shortVal = randomShort();
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT ?")) {
statement.setString(1, stringVal);
assertEquals(new Tuple<>(JDBCType.VARCHAR.getVendorTypeNumber(), stringVal), execute(statement));
statement.setInt(1, intVal);
assertEquals(new Tuple<>(JDBCType.INTEGER.getVendorTypeNumber(), intVal), execute(statement));
statement.setLong(1, longVal);
assertEquals(new Tuple<>(JDBCType.BIGINT.getVendorTypeNumber(), longVal), execute(statement));
statement.setFloat(1, floatVal);
assertEquals(new Tuple<>(JDBCType.REAL.getVendorTypeNumber(), floatVal), execute(statement));
statement.setDouble(1, doubleVal);
assertEquals(new Tuple<>(JDBCType.DOUBLE.getVendorTypeNumber(), doubleVal), execute(statement));
statement.setNull(1, JDBCType.DOUBLE.getVendorTypeNumber());
assertEquals(new Tuple<>(JDBCType.DOUBLE.getVendorTypeNumber(), null), execute(statement));
statement.setBoolean(1, booleanVal);
assertEquals(new Tuple<>(JDBCType.BOOLEAN.getVendorTypeNumber(), booleanVal), execute(statement));
statement.setByte(1, byteVal);
assertEquals(new Tuple<>(JDBCType.TINYINT.getVendorTypeNumber(), byteVal), execute(statement));
statement.setShort(1, shortVal);
assertEquals(new Tuple<>(JDBCType.SMALLINT.getVendorTypeNumber(), shortVal), execute(statement));
}
}
}
private Tuple<Integer, Object> execute(PreparedStatement statement) throws SQLException {
try (ResultSet results = statement.executeQuery()) {
ResultSetMetaData resultSetMetaData = results.getMetaData();
assertTrue(results.next());
Tuple<Integer, Object> result = new Tuple<>(resultSetMetaData.getColumnType(1), results.getObject(1));
assertFalse(results.next());
return result;
}
}
private static long testMillis(long randomMillis, int i) {
return randomMillis - 2 + i;
}
private static void setupIndexForDateTimeTests(long randomMillis) throws IOException {
String mapping = "\"properties\":{\"id\":{\"type\":\"integer\"},\"birth_date\":{\"type\":\"date\"}}";
createIndex("emps", Settings.EMPTY, mapping);
for (int i = 1; i <= 3; i++) {
int id = 1000 + i;
long testMillis = testMillis(randomMillis, i);
index("emps", "" + i, builder -> {
builder.field("id", id);
builder.field("birth_date", testMillis);
});
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.collect.Iterables;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkNull;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.MaterializedViewDefinition;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.cql3.ColumnIdentifier.Raw;
import org.apache.cassandra.cql3.functions.Function;
import org.apache.cassandra.cql3.restrictions.StatementRestrictions;
import org.apache.cassandra.cql3.selection.Selection;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.filter.*;
import org.apache.cassandra.db.marshal.BooleanType;
import org.apache.cassandra.db.partitions.*;
import org.apache.cassandra.db.rows.RowIterator;
import org.apache.cassandra.exceptions.InvalidRequestException;
import org.apache.cassandra.exceptions.RequestExecutionException;
import org.apache.cassandra.exceptions.RequestValidationException;
import org.apache.cassandra.exceptions.UnauthorizedException;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.QueryState;
import org.apache.cassandra.service.StorageProxy;
import org.apache.cassandra.service.paxos.Commit;
import org.apache.cassandra.thrift.ThriftValidation;
import org.apache.cassandra.transport.messages.ResultMessage;
import org.apache.cassandra.triggers.TriggerExecutor;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.Pair;
import org.apache.cassandra.utils.UUIDGen;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkFalse;
import static org.apache.cassandra.cql3.statements.RequestValidations.checkNotNull;
/*
* Abstract parent class of individual modifications, i.e. INSERT, UPDATE and DELETE.
*/
public abstract class ModificationStatement implements CQLStatement
{
protected static final Logger logger = LoggerFactory.getLogger(ModificationStatement.class);
private static final ColumnIdentifier CAS_RESULT_COLUMN = new ColumnIdentifier("[applied]", false);
protected final StatementType type;
private final int boundTerms;
public final CFMetaData cfm;
private final Attributes attrs;
private final StatementRestrictions restrictions;
private final Operations operations;
private final PartitionColumns updatedColumns;
private final Conditions conditions;
private final PartitionColumns conditionColumns;
private final PartitionColumns requiresRead;
public ModificationStatement(StatementType type,
int boundTerms,
CFMetaData cfm,
Operations operations,
StatementRestrictions restrictions,
Conditions conditions,
Attributes attrs)
{
this.type = type;
this.boundTerms = boundTerms;
this.cfm = cfm;
this.restrictions = restrictions;
this.operations = operations;
this.conditions = conditions;
this.attrs = attrs;
if (!conditions.isEmpty())
{
checkFalse(cfm.isCounter(), "Conditional updates are not supported on counter tables");
checkFalse(attrs.isTimestampSet(), "Cannot provide custom timestamp for conditional updates");
}
PartitionColumns.Builder conditionColumnsBuilder = PartitionColumns.builder();
Iterable<ColumnDefinition> columns = conditions.getColumns();
if (columns != null)
conditionColumnsBuilder.addAll(columns);
PartitionColumns.Builder updatedColumnsBuilder = PartitionColumns.builder();
PartitionColumns.Builder requiresReadBuilder = PartitionColumns.builder();
for (Operation operation : operations)
{
updatedColumnsBuilder.add(operation.column);
// If the operation requires a read-before-write and we're doing a conditional read, we want to read
// the affected column as part of the read-for-conditions paxos phase (see #7499).
if (operation.requiresRead())
{
conditionColumnsBuilder.add(operation.column);
requiresReadBuilder.add(operation.column);
}
}
PartitionColumns modifiedColumns = updatedColumnsBuilder.build();
// Compact tables have not row marker. So if we don't actually update any particular column,
// this means that we're only updating the PK, which we allow if only those were declared in
// the definition. In that case however, we do went to write the compactValueColumn (since again
// we can't use a "row marker") so add it automatically.
if (cfm.isCompactTable() && modifiedColumns.isEmpty() && updatesRegularRows())
modifiedColumns = cfm.partitionColumns();
this.updatedColumns = modifiedColumns;
this.conditionColumns = conditionColumnsBuilder.build();
this.requiresRead = requiresReadBuilder.build();
}
public Iterable<Function> getFunctions()
{
return Iterables.concat(attrs.getFunctions(),
restrictions.getFunctions(),
operations.getFunctions(),
conditions.getFunctions());
}
public abstract void addUpdateForKey(PartitionUpdate update, Clustering clustering, UpdateParameters params);
public abstract void addUpdateForKey(PartitionUpdate update, Slice slice, UpdateParameters params);
public int getBoundTerms()
{
return boundTerms;
}
public String keyspace()
{
return cfm.ksName;
}
public String columnFamily()
{
return cfm.cfName;
}
public boolean isCounter()
{
return cfm.isCounter();
}
public boolean isMaterializedView()
{
return cfm.isMaterializedView();
}
public boolean hasMaterializedViews()
{
return !cfm.getMaterializedViews().isEmpty();
}
public long getTimestamp(long now, QueryOptions options) throws InvalidRequestException
{
return attrs.getTimestamp(now, options);
}
public boolean isTimestampSet()
{
return attrs.isTimestampSet();
}
public int getTimeToLive(QueryOptions options) throws InvalidRequestException
{
return attrs.getTimeToLive(options);
}
public void checkAccess(ClientState state) throws InvalidRequestException, UnauthorizedException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.MODIFY);
// CAS updates can be used to simulate a SELECT query, so should require Permission.SELECT as well.
if (hasConditions())
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.SELECT);
// MV updates need to get the current state from the table, and might update the materialized views
// Require Permission.SELECT on the base table, and Permission.MODIFY on the views
if (hasMaterializedViews())
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.SELECT);
for (MaterializedViewDefinition view : cfm.getMaterializedViews())
state.hasColumnFamilyAccess(keyspace(), view.viewName, Permission.MODIFY);
}
for (Function function : getFunctions())
state.ensureHasPermission(Permission.EXECUTE, function);
}
public void validate(ClientState state) throws InvalidRequestException
{
checkFalse(hasConditions() && attrs.isTimestampSet(), "Cannot provide custom timestamp for conditional updates");
checkFalse(isCounter() && attrs.isTimestampSet(), "Cannot provide custom timestamp for counter updates");
checkFalse(isCounter() && attrs.isTimeToLiveSet(), "Cannot provide custom TTL for counter updates");
checkFalse(isMaterializedView(), "Cannot directly modify a materialized view");
}
public PartitionColumns updatedColumns()
{
return updatedColumns;
}
public PartitionColumns conditionColumns()
{
return conditionColumns;
}
public boolean updatesRegularRows()
{
// We're updating regular rows if all the clustering columns are provided.
// Note that the only case where we're allowed not to provide clustering
// columns is if we set some static columns, and in that case no clustering
// columns should be given. So in practice, it's enough to check if we have
// either the table has no clustering or if it has at least one of them set.
return cfm.clusteringColumns().isEmpty() || restrictions.hasClusteringColumnsRestriction();
}
public boolean updatesStaticRow()
{
return operations.appliesToStaticColumns();
}
public List<Operation> getRegularOperations()
{
return operations.regularOperations();
}
public List<Operation> getStaticOperations()
{
return operations.staticOperations();
}
public Iterable<Operation> allOperations()
{
return operations;
}
public Iterable<ColumnDefinition> getColumnsWithConditions()
{
return conditions.getColumns();
}
public boolean hasIfNotExistCondition()
{
return conditions.isIfNotExists();
}
public boolean hasIfExistCondition()
{
return conditions.isIfExists();
}
public List<ByteBuffer> buildPartitionKeyNames(QueryOptions options)
throws InvalidRequestException
{
return restrictions.getPartitionKeys(options);
}
public NavigableSet<Clustering> createClustering(QueryOptions options)
throws InvalidRequestException
{
if (appliesOnlyToStaticColumns() && !restrictions.hasClusteringColumnsRestriction())
return FBUtilities.singleton(CBuilder.STATIC_BUILDER.build(), cfm.comparator);
return restrictions.getClusteringColumns(options);
}
/**
* Checks that the modification only apply to static columns.
* @return <code>true</code> if the modification only apply to static columns, <code>false</code> otherwise.
*/
private boolean appliesOnlyToStaticColumns()
{
return appliesOnlyToStaticColumns(operations, conditions);
}
/**
* Checks that the specified operations and conditions only apply to static columns.
* @return <code>true</code> if the specified operations and conditions only apply to static columns,
* <code>false</code> otherwise.
*/
public static boolean appliesOnlyToStaticColumns(Operations operation, Conditions conditions)
{
return !operation.appliesToRegularColumns() && !conditions.appliesToRegularColumns()
&& (operation.appliesToStaticColumns() || conditions.appliesToStaticColumns());
}
public boolean requiresRead()
{
// Lists SET operation incurs a read.
for (Operation op : allOperations())
if (op.requiresRead())
return true;
return false;
}
private Map<DecoratedKey, Partition> readRequiredLists(Collection<ByteBuffer> partitionKeys,
ClusteringIndexFilter filter,
DataLimits limits,
boolean local,
ConsistencyLevel cl)
{
if (!requiresRead())
return null;
try
{
cl.validateForRead(keyspace());
}
catch (InvalidRequestException e)
{
throw new InvalidRequestException(String.format("Write operation require a read but consistency %s is not supported on reads", cl));
}
List<SinglePartitionReadCommand<?>> commands = new ArrayList<>(partitionKeys.size());
int nowInSec = FBUtilities.nowInSeconds();
for (ByteBuffer key : partitionKeys)
commands.add(SinglePartitionReadCommand.create(cfm,
nowInSec,
ColumnFilter.selection(this.requiresRead),
RowFilter.NONE,
limits,
cfm.decorateKey(key),
filter));
SinglePartitionReadCommand.Group group = new SinglePartitionReadCommand.Group(commands, DataLimits.NONE);
if (local)
{
try (ReadOrderGroup orderGroup = group.startOrderGroup(); PartitionIterator iter = group.executeInternal(orderGroup))
{
return asMaterializedMap(iter);
}
}
try (PartitionIterator iter = group.execute(cl, null))
{
return asMaterializedMap(iter);
}
}
private Map<DecoratedKey, Partition> asMaterializedMap(PartitionIterator iterator)
{
Map<DecoratedKey, Partition> map = new HashMap<>();
while (iterator.hasNext())
{
try (RowIterator partition = iterator.next())
{
map.put(partition.partitionKey(), FilteredPartition.create(partition));
}
}
return map;
}
public boolean hasConditions()
{
return !conditions.isEmpty();
}
public ResultMessage execute(QueryState queryState, QueryOptions options)
throws RequestExecutionException, RequestValidationException
{
if (options.getConsistency() == null)
throw new InvalidRequestException("Invalid empty consistency level");
if (hasConditions() && options.getProtocolVersion() == 1)
throw new InvalidRequestException("Conditional updates are not supported by the protocol version in use. You need to upgrade to a driver using the native protocol v2.");
return hasConditions()
? executeWithCondition(queryState, options)
: executeWithoutCondition(queryState, options);
}
private ResultMessage executeWithoutCondition(QueryState queryState, QueryOptions options)
throws RequestExecutionException, RequestValidationException
{
ConsistencyLevel cl = options.getConsistency();
if (isCounter())
cl.validateCounterForWrite(cfm);
else
cl.validateForWrite(cfm.ksName);
Collection<? extends IMutation> mutations = getMutations(options, false, options.getTimestamp(queryState));
if (!mutations.isEmpty())
StorageProxy.mutateWithTriggers(mutations, cl, false);
return null;
}
public ResultMessage executeWithCondition(QueryState queryState, QueryOptions options)
throws RequestExecutionException, RequestValidationException
{
CQL3CasRequest request = makeCasRequest(queryState, options);
try (RowIterator result = StorageProxy.cas(keyspace(),
columnFamily(),
request.key,
request,
options.getSerialConsistency(),
options.getConsistency(),
queryState.getClientState()))
{
return new ResultMessage.Rows(buildCasResultSet(result, options));
}
}
private CQL3CasRequest makeCasRequest(QueryState queryState, QueryOptions options)
{
List<ByteBuffer> keys = buildPartitionKeyNames(options);
// We don't support IN for CAS operation so far
checkFalse(keys.size() > 1,
"IN on the partition key is not supported with conditional %s",
type.isUpdate()? "updates" : "deletions");
DecoratedKey key = cfm.decorateKey(keys.get(0));
long now = options.getTimestamp(queryState);
SortedSet<Clustering> clusterings = createClustering(options);
checkFalse(clusterings.size() > 1,
"IN on the clustering key columns is not supported with conditional %s",
type.isUpdate()? "updates" : "deletions");
Clustering clustering = Iterables.getOnlyElement(clusterings);
CQL3CasRequest request = new CQL3CasRequest(cfm, key, false, conditionColumns(), updatesRegularRows(), updatesStaticRow());
addConditions(clustering, request, options);
request.addRowUpdate(clustering, this, options, now);
return request;
}
public void addConditions(Clustering clustering, CQL3CasRequest request, QueryOptions options) throws InvalidRequestException
{
conditions.addConditionsTo(request, clustering, options);
}
private ResultSet buildCasResultSet(RowIterator partition, QueryOptions options) throws InvalidRequestException
{
return buildCasResultSet(keyspace(), columnFamily(), partition, getColumnsWithConditions(), false, options);
}
public static ResultSet buildCasResultSet(String ksName, String tableName, RowIterator partition, Iterable<ColumnDefinition> columnsWithConditions, boolean isBatch, QueryOptions options)
throws InvalidRequestException
{
boolean success = partition == null;
ColumnSpecification spec = new ColumnSpecification(ksName, tableName, CAS_RESULT_COLUMN, BooleanType.instance);
ResultSet.ResultMetadata metadata = new ResultSet.ResultMetadata(Collections.singletonList(spec));
List<List<ByteBuffer>> rows = Collections.singletonList(Collections.singletonList(BooleanType.instance.decompose(success)));
ResultSet rs = new ResultSet(metadata, rows);
return success ? rs : merge(rs, buildCasFailureResultSet(partition, columnsWithConditions, isBatch, options));
}
private static ResultSet merge(ResultSet left, ResultSet right)
{
if (left.size() == 0)
return right;
else if (right.size() == 0)
return left;
assert left.size() == 1;
int size = left.metadata.names.size() + right.metadata.names.size();
List<ColumnSpecification> specs = new ArrayList<ColumnSpecification>(size);
specs.addAll(left.metadata.names);
specs.addAll(right.metadata.names);
List<List<ByteBuffer>> rows = new ArrayList<>(right.size());
for (int i = 0; i < right.size(); i++)
{
List<ByteBuffer> row = new ArrayList<ByteBuffer>(size);
row.addAll(left.rows.get(0));
row.addAll(right.rows.get(i));
rows.add(row);
}
return new ResultSet(new ResultSet.ResultMetadata(specs), rows);
}
private static ResultSet buildCasFailureResultSet(RowIterator partition, Iterable<ColumnDefinition> columnsWithConditions, boolean isBatch, QueryOptions options)
throws InvalidRequestException
{
CFMetaData cfm = partition.metadata();
Selection selection;
if (columnsWithConditions == null)
{
selection = Selection.wildcard(cfm);
}
else
{
// We can have multiple conditions on the same columns (for collections) so use a set
// to avoid duplicate, but preserve the order just to it follows the order of IF in the query in general
Set<ColumnDefinition> defs = new LinkedHashSet<>();
// Adding the partition key for batches to disambiguate if the conditions span multipe rows (we don't add them outside
// of batches for compatibility sakes).
if (isBatch)
{
defs.addAll(cfm.partitionKeyColumns());
defs.addAll(cfm.clusteringColumns());
}
for (ColumnDefinition def : columnsWithConditions)
defs.add(def);
selection = Selection.forColumns(cfm, new ArrayList<>(defs));
}
Selection.ResultSetBuilder builder = selection.resultSetBuilder(false);
SelectStatement.forSelection(cfm, selection).processPartition(partition, options, builder, FBUtilities.nowInSeconds());
return builder.build(options.getProtocolVersion());
}
public ResultMessage executeInternal(QueryState queryState, QueryOptions options) throws RequestValidationException, RequestExecutionException
{
return hasConditions()
? executeInternalWithCondition(queryState, options)
: executeInternalWithoutCondition(queryState, options);
}
public ResultMessage executeInternalWithoutCondition(QueryState queryState, QueryOptions options) throws RequestValidationException, RequestExecutionException
{
for (IMutation mutation : getMutations(options, true, queryState.getTimestamp()))
{
assert mutation instanceof Mutation || mutation instanceof CounterMutation;
if (mutation instanceof Mutation)
((Mutation) mutation).apply();
else if (mutation instanceof CounterMutation)
((CounterMutation) mutation).apply();
}
return null;
}
public ResultMessage executeInternalWithCondition(QueryState state, QueryOptions options) throws RequestValidationException, RequestExecutionException
{
CQL3CasRequest request = makeCasRequest(state, options);
try (RowIterator result = casInternal(request, state))
{
return new ResultMessage.Rows(buildCasResultSet(result, options));
}
}
static RowIterator casInternal(CQL3CasRequest request, QueryState state)
{
UUID ballot = UUIDGen.getTimeUUIDFromMicros(state.getTimestamp());
SinglePartitionReadCommand<?> readCommand = request.readCommand(FBUtilities.nowInSeconds());
FilteredPartition current;
try (ReadOrderGroup orderGroup = readCommand.startOrderGroup(); PartitionIterator iter = readCommand.executeInternal(orderGroup))
{
current = FilteredPartition.create(PartitionIterators.getOnlyElement(iter, readCommand));
}
if (!request.appliesTo(current))
return current.rowIterator();
PartitionUpdate updates = request.makeUpdates(current);
updates = TriggerExecutor.instance.execute(updates);
Commit proposal = Commit.newProposal(ballot, updates);
proposal.makeMutation().apply();
return null;
}
/**
* Convert statement into a list of mutations to apply on the server
*
* @param options value for prepared statement markers
* @param local if true, any requests (for collections) performed by getMutation should be done locally only.
* @param now the current timestamp in microseconds to use if no timestamp is user provided.
*
* @return list of the mutations
*/
private Collection<? extends IMutation> getMutations(QueryOptions options, boolean local, long now)
{
UpdatesCollector collector = new UpdatesCollector(updatedColumns, 1);
addUpdates(collector, options, local, now);
return collector.toMutations();
}
final void addUpdates(UpdatesCollector collector,
QueryOptions options,
boolean local,
long now)
{
List<ByteBuffer> keys = buildPartitionKeyNames(options);
if (type.allowClusteringColumnSlices()
&& restrictions.hasClusteringColumnsRestriction()
&& restrictions.isColumnRange())
{
Slices slices = createSlice(options);
// If all the ranges were invalid we do not need to do anything.
if (slices.isEmpty())
return;
UpdateParameters params = makeUpdateParameters(keys,
new ClusteringIndexSliceFilter(slices, false),
options,
DataLimits.NONE,
local,
now);
for (ByteBuffer key : keys)
{
ThriftValidation.validateKey(cfm, key);
DecoratedKey dk = cfm.decorateKey(key);
PartitionUpdate upd = collector.getPartitionUpdate(cfm, dk, options.getConsistency());
for (Slice slice : slices)
addUpdateForKey(upd, slice, params);
}
}
else
{
NavigableSet<Clustering> clusterings = createClustering(options);
UpdateParameters params = makeUpdateParameters(keys, clusterings, options, local, now);
for (ByteBuffer key : keys)
{
ThriftValidation.validateKey(cfm, key);
DecoratedKey dk = cfm.decorateKey(key);
PartitionUpdate upd = collector.getPartitionUpdate(cfm, dk, options.getConsistency());
if (clusterings.isEmpty())
{
addUpdateForKey(upd, Clustering.EMPTY, params);
}
else
{
for (Clustering clustering : clusterings)
addUpdateForKey(upd, clustering, params);
}
}
}
}
private Slices createSlice(QueryOptions options)
{
SortedSet<Slice.Bound> startBounds = restrictions.getClusteringColumnsBounds(Bound.START, options);
SortedSet<Slice.Bound> endBounds = restrictions.getClusteringColumnsBounds(Bound.END, options);
return toSlices(startBounds, endBounds);
}
private UpdateParameters makeUpdateParameters(Collection<ByteBuffer> keys,
NavigableSet<Clustering> clusterings,
QueryOptions options,
boolean local,
long now)
{
if (clusterings.contains(Clustering.STATIC_CLUSTERING))
return makeUpdateParameters(keys,
new ClusteringIndexSliceFilter(Slices.ALL, false),
options,
DataLimits.cqlLimits(1),
local,
now);
return makeUpdateParameters(keys,
new ClusteringIndexNamesFilter(clusterings, false),
options,
DataLimits.NONE,
local,
now);
}
private UpdateParameters makeUpdateParameters(Collection<ByteBuffer> keys,
ClusteringIndexFilter filter,
QueryOptions options,
DataLimits limits,
boolean local,
long now)
{
// Some lists operation requires reading
Map<DecoratedKey, Partition> lists = readRequiredLists(keys, filter, limits, local, options.getConsistency());
return new UpdateParameters(cfm, updatedColumns(), options, getTimestamp(now, options), getTimeToLive(options), lists, true);
}
private Slices toSlices(SortedSet<Slice.Bound> startBounds, SortedSet<Slice.Bound> endBounds)
{
assert startBounds.size() == endBounds.size();
Slices.Builder builder = new Slices.Builder(cfm.comparator);
Iterator<Slice.Bound> starts = startBounds.iterator();
Iterator<Slice.Bound> ends = endBounds.iterator();
while (starts.hasNext())
{
Slice slice = Slice.make(starts.next(), ends.next());
if (!slice.isEmpty(cfm.comparator))
{
builder.add(slice);
}
}
return builder.build();
}
public static abstract class Parsed extends CFStatement
{
private final Attributes.Raw attrs;
private final List<Pair<ColumnIdentifier.Raw, ColumnCondition.Raw>> conditions;
private final boolean ifNotExists;
private final boolean ifExists;
protected Parsed(CFName name, Attributes.Raw attrs, List<Pair<ColumnIdentifier.Raw, ColumnCondition.Raw>> conditions, boolean ifNotExists, boolean ifExists)
{
super(name);
this.attrs = attrs;
this.conditions = conditions == null ? Collections.<Pair<ColumnIdentifier.Raw, ColumnCondition.Raw>>emptyList() : conditions;
this.ifNotExists = ifNotExists;
this.ifExists = ifExists;
}
public ParsedStatement.Prepared prepare()
{
VariableSpecifications boundNames = getBoundVariables();
ModificationStatement statement = prepare(boundNames);
CFMetaData cfm = ThriftValidation.validateColumnFamily(keyspace(), columnFamily());
return new ParsedStatement.Prepared(statement, boundNames, boundNames.getPartitionKeyBindIndexes(cfm));
}
public ModificationStatement prepare(VariableSpecifications boundNames)
{
CFMetaData metadata = ThriftValidation.validateColumnFamily(keyspace(), columnFamily());
Attributes preparedAttributes = attrs.prepare(keyspace(), columnFamily());
preparedAttributes.collectMarkerSpecification(boundNames);
Conditions preparedConditions = prepareConditions(metadata, boundNames);
return prepareInternal(metadata,
boundNames,
preparedConditions,
preparedAttributes);
}
/**
* Returns the column conditions.
*
* @param metadata the column family meta data
* @param boundNames the bound names
* @return the column conditions.
*/
private Conditions prepareConditions(CFMetaData metadata, VariableSpecifications boundNames)
{
// To have both 'IF EXISTS'/'IF NOT EXISTS' and some other conditions doesn't make sense.
// So far this is enforced by the parser, but let's assert it for sanity if ever the parse changes.
if (ifExists)
{
assert conditions.isEmpty();
assert !ifNotExists;
return Conditions.IF_EXISTS_CONDITION;
}
if (ifNotExists)
{
assert conditions.isEmpty();
assert !ifExists;
return Conditions.IF_NOT_EXISTS_CONDITION;
}
if (conditions.isEmpty())
return Conditions.EMPTY_CONDITION;
return prepareColumnConditions(metadata, boundNames);
}
/**
* Returns the column conditions.
*
* @param metadata the column family meta data
* @param boundNames the bound names
* @return the column conditions.
*/
private ColumnConditions prepareColumnConditions(CFMetaData metadata, VariableSpecifications boundNames)
{
checkNull(attrs.timestamp, "Cannot provide custom timestamp for conditional updates");
ColumnConditions.Builder builder = ColumnConditions.newBuilder();
for (Pair<ColumnIdentifier.Raw, ColumnCondition.Raw> entry : conditions)
{
ColumnIdentifier id = entry.left.prepare(metadata);
ColumnDefinition def = metadata.getColumnDefinition(id);
checkNotNull(metadata.getColumnDefinition(id), "Unknown identifier %s in IF conditions", id);
ColumnCondition condition = entry.right.prepare(keyspace(), def);
condition.collectMarkerSpecification(boundNames);
checkFalse(def.isPrimaryKeyColumn(), "PRIMARY KEY column '%s' cannot have IF conditions", id);
builder.add(condition);
}
return builder.build();
}
protected abstract ModificationStatement prepareInternal(CFMetaData cfm,
VariableSpecifications boundNames,
Conditions conditions,
Attributes attrs);
/**
* Creates the restrictions.
*
* @param type the statement type
* @param cfm the column family meta data
* @param boundNames the bound names
* @param operations the column operations
* @param relations the where relations
* @param conditions the conditions
* @return the restrictions
*/
protected static StatementRestrictions newRestrictions(StatementType type,
CFMetaData cfm,
VariableSpecifications boundNames,
Operations operations,
List<Relation> relations,
Conditions conditions)
{
boolean applyOnlyToStaticColumns = appliesOnlyToStaticColumns(operations, conditions);
return new StatementRestrictions(type, cfm, relations, boundNames, applyOnlyToStaticColumns, false, false);
}
/**
* Retrieves the <code>ColumnDefinition</code> corresponding to the specified raw <code>ColumnIdentifier</code>.
*
* @param cfm the column family meta data
* @param rawId the raw <code>ColumnIdentifier</code>
* @return the <code>ColumnDefinition</code> corresponding to the specified raw <code>ColumnIdentifier</code>
*/
protected static ColumnDefinition getColumnDefinition(CFMetaData cfm, Raw rawId)
{
ColumnIdentifier id = rawId.prepare(cfm);
return checkNotNull(cfm.getColumnDefinition(id), "Unknown identifier %s", id);
}
}
}
|
|
/*
* Copyright 2009 Martin Roth (mhroth@gmail.com)
*
* This file is part of JAsioHost.
*
* JAsioHost is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JAsioHost is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JAsioHost. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.synthbot.jasiohost;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* The <code>AsioChannel</code> class represents an input or output channel available form the
* ASIO driver. It provides information such as current state and sample type. This class also
* encapsulates and makes available native audio buffers if it is active. Convenience methods
* are also available to facilitate consuming audio.
*/
public class AsioChannel {
private final int index;
private final boolean isInput;
private volatile boolean isActive;
private final int channelGroup;
private final AsioSampleType sampleType;
private final String name;
private final ByteBuffer[] nativeBuffers;
private volatile int bufferIndex;
private static final float MAX_INT16 = 0x00007FFF;
private static final float MAX_INT18 = 0x0001FFFF;
private static final float MAX_INT20 = 0x0007FFFF;
private static final float MAX_INT24 = 0x007FFFFF;
private static final float MAX_INT32 = 0x7FFFFFFF; // Integer.MAX_VALUE
private AsioChannel(int index, boolean isInput, boolean isActive, int channelGroup, AsioSampleType sampleType, String name) {
this.index = index;
this.isInput = isInput;
this.isActive = isActive;
this.channelGroup = channelGroup;
this.sampleType = sampleType;
this.name = name;
nativeBuffers = new ByteBuffer[2];
}
public int getChannelIndex() {
return index;
}
public boolean isInput() {
return isInput;
}
public boolean isActive() {
return isActive;
}
public int getChannelGroup() {
return channelGroup;
}
public AsioSampleType getSampleType() {
return sampleType;
}
public String getChannelName() {
return name;
}
/**
* Returns the current buffer to read or write from, with the position reset to zero. The endian-ness
* of the buffer and of the underlying system has been accounted for. Note that input buffers
* <code>isInput()</code> are read-only.
*/
public ByteBuffer getByteBuffer() {
return nativeBuffers[bufferIndex];
}
protected void setBufferIndex(int bufferIndex) {
this.bufferIndex = bufferIndex;
nativeBuffers[bufferIndex].rewind(); // reset position to start of buffer
}
protected void setByteBuffers(ByteBuffer buffer0, ByteBuffer buffer1) {
if (buffer0 == null || buffer1 == null) {
// the ByteBuffer references are cleared
isActive = false;
nativeBuffers[0] = null;
nativeBuffers[1] = null;
} else {
nativeBuffers[0] = isInput ? buffer0.asReadOnlyBuffer() : buffer0;
nativeBuffers[1] = isInput ? buffer1.asReadOnlyBuffer() : buffer1;
if (sampleType.name().contains("MSB")) {
nativeBuffers[0].order(ByteOrder.BIG_ENDIAN); // set the endian-ness of the buffers
nativeBuffers[1].order(ByteOrder.BIG_ENDIAN); // according to the sample type
} else {
nativeBuffers[0].order(ByteOrder.LITTLE_ENDIAN);
nativeBuffers[1].order(ByteOrder.LITTLE_ENDIAN);
}
isActive = true;
}
}
/**
* A convenience method to write a <code>float</code> array of samples to the output. The array
* values are expected to be bounded to the range of [-1,1]. The need to convert to the correct
* sample type is abstracted. The <code>output</code> array should be same size as the buffer.
* If it is larger, then a <code>BufferOverflowException</code> will be thrown. If it is smaller,
* the buffer will be incompletely filled.
*
* If the ASIO host does not use <code>float</code>s to represent samples, then the <code>AsioChannel</code>'s
* <code>ByteBuffer</code> should be directly manipulated. Use <code>getByteBuffer</code> to access the buffer.
* @param output A <code>float</code> array to write to the output.
*/
public void write(float[] output) {
if (isInput) {
throw new IllegalStateException("Only output channels can be written to.");
}
if (!isActive) {
throw new IllegalStateException("This channel is not active: " + toString());
}
ByteBuffer outputBuffer = getByteBuffer();
switch (sampleType) {
case ASIOSTFloat64MSB:
case ASIOSTFloat64LSB: {
for (float sampleValue : output) {
outputBuffer.putDouble(sampleValue);
}
break;
}
case ASIOSTFloat32MSB:
case ASIOSTFloat32LSB: {
for (float sampleValue : output) {
outputBuffer.putFloat(sampleValue);
}
break;
}
case ASIOSTInt32MSB:
case ASIOSTInt32LSB: {
for (float sampleValue : output) {
outputBuffer.putInt((int) (sampleValue * MAX_INT32));
}
break;
}
case ASIOSTInt32MSB16:
case ASIOSTInt32LSB16: {
for (float sampleValue : output) {
outputBuffer.putInt((int) (sampleValue * MAX_INT16));
}
break;
}
case ASIOSTInt32MSB18:
case ASIOSTInt32LSB18: {
for (float sampleValue : output) {
outputBuffer.putInt((int) (sampleValue * MAX_INT18));
}
break;
}
case ASIOSTInt32MSB20:
case ASIOSTInt32LSB20: {
for (float sampleValue : output) {
outputBuffer.putInt((int) (sampleValue * MAX_INT20));
}
break;
}
case ASIOSTInt32MSB24:
case ASIOSTInt32LSB24: {
for (float sampleValue : output) {
outputBuffer.putInt((int) (sampleValue * MAX_INT24));
}
break;
}
case ASIOSTInt16MSB:
case ASIOSTInt16LSB: {
for (float sampleValue : output) {
outputBuffer.putShort((short) (sampleValue * MAX_INT16));
}
break;
}
case ASIOSTInt24MSB: {
for (float sampleValue : output) {
int sampleValueInt = (int) (sampleValue * MAX_INT24);
outputBuffer.put((byte) ((sampleValueInt >> 16) & 0xFF));
outputBuffer.put((byte) ((sampleValueInt >> 8) & 0xFF));
outputBuffer.put((byte) (sampleValueInt & 0xFF));
}
break;
}
case ASIOSTInt24LSB: {
for (float sampleValue : output) {
int sampleValueInt = (int) (sampleValue * MAX_INT24);
outputBuffer.put((byte) (sampleValueInt & 0xFF));
outputBuffer.put((byte) ((sampleValueInt >> 8) & 0xFF));
outputBuffer.put((byte) ((sampleValueInt >> 16) & 0xFF));
}
break;
}
case ASIOSTDSDInt8MSB1:
case ASIOSTDSDInt8LSB1:
case ASIOSTDSDInt8NER8: {
throw new IllegalStateException(
"The sample types ASIOSTDSDInt8MSB1, ASIOSTDSDInt8LSB1, and ASIOSTDSDInt8NER8 are not supported.");
}
}
}
/**
* A convenience method to read samples from the input buffer to a <code>float</code> array.
* The argument array must have the same length as the configured buffer size. The returned samples
* are bounded to within [-1,1]. The <code>input</code> array should be the same size as the input
* array. If it larger, then a <code>BufferUnderflowException</code> will be thrown. If it is smaller,
* then the buffer will be incompletely read.
* @param input A <code>float</code> array to read into.
*/
public void read(float[] input) {
if (!isInput) {
throw new IllegalStateException("Only input channels can be read from.");
}
if (!isActive) {
throw new IllegalStateException("This channel is not active: " + toString());
}
ByteBuffer inputBuffer = getByteBuffer();
switch (sampleType) {
case ASIOSTFloat64MSB:
case ASIOSTFloat64LSB: {
for (int i = 0; i < input.length; i++) {
input[i] = (float) inputBuffer.getDouble();
System.out.println(inputBuffer.getDouble());
}
break;
}
case ASIOSTFloat32MSB:
case ASIOSTFloat32LSB: {
for (int i = 0; i < input.length; i++) {
input[i] = inputBuffer.getFloat();
}
break;
}
case ASIOSTInt32MSB:
case ASIOSTInt32LSB: {
for (int i = 0; i < input.length; i++) {
input[i] = (inputBuffer.getInt()) / MAX_INT32;
}
break;
}
case ASIOSTInt32MSB16:
case ASIOSTInt32LSB16: {
for (int i = 0; i < input.length; i++) {
input[i] = (inputBuffer.getInt()) / MAX_INT16;
}
break;
}
case ASIOSTInt32MSB18:
case ASIOSTInt32LSB18: {
for (int i = 0; i < input.length; i++) {
input[i] = (inputBuffer.getInt()) / MAX_INT18;
}
break;
}
case ASIOSTInt32MSB20:
case ASIOSTInt32LSB20: {
for (int i = 0; i < input.length; i++) {
input[i] = (inputBuffer.getInt()) / MAX_INT20;
}
break;
}
case ASIOSTInt32MSB24:
case ASIOSTInt32LSB24: {
for (int i = 0; i < input.length; i++) {
input[i] = (inputBuffer.getInt()) / MAX_INT24;
}
break;
}
case ASIOSTInt16MSB:
case ASIOSTInt16LSB: {
for (int i = 0; i < input.length; i++) {
input[i] = (inputBuffer.getShort()) / MAX_INT16;
}
break;
}
case ASIOSTInt24MSB: {
for (int i = 0; i < input.length; i++) {
int sampleValueInt = (inputBuffer.get()) & 0xFFFF; sampleValueInt <<= 8;
sampleValueInt |= (inputBuffer.get()) & 0xFF; sampleValueInt <<= 8;
sampleValueInt |= (inputBuffer.get()) & 0xFF;
input[i] = (sampleValueInt) / MAX_INT24;
}
break;
}
case ASIOSTInt24LSB: {
for (int i = 0; i < input.length; i++) {
int sampleValueInt = (inputBuffer.get()) & 0xFF;
sampleValueInt |= ((inputBuffer.get()) & 0xFF) << 8;
sampleValueInt |= ((inputBuffer.get()) & 0xFFFF) << 16;
input[i] = (sampleValueInt) / MAX_INT24;
}
break;
}
case ASIOSTDSDInt8MSB1:
case ASIOSTDSDInt8LSB1:
case ASIOSTDSDInt8NER8: {
throw new IllegalStateException(
"The sample types ASIOSTDSDInt8MSB1, ASIOSTDSDInt8LSB1, and ASIOSTDSDInt8NER8 are not supported.");
}
}
}
/*
* equals() is overridden such that it may be used in a Set
*/
@Override
public boolean equals(Object o) {
if (!(o instanceof AsioChannel)) {
return false;
} else {
AsioChannel channelInfo = (AsioChannel) o;
return (channelInfo.getChannelIndex() == index && channelInfo.isInput() == isInput);
}
}
/*
* hashCode() overridden in order to accompany equals()
*/
@Override
public int hashCode() {
return isInput ? index : ~index + 1; // : 2's complement
}
/**
* Returns a string description of the channel in the format,
* "Output Channel 0: Analog Out 1/2 Delta-AP [1], ASIOSTInt32LSB, group 0, inactive"
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(isInput ? "Input" : "Output");
sb.append(" Channel "); sb.append(Integer.toString(index));
sb.append(": "); sb.append(name);
sb.append(", "); sb.append(sampleType.toString());
sb.append(", group "); sb.append(Integer.toString(channelGroup));
sb.append(", "); sb.append(isActive ? "active" : "inactive");
return sb.toString();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.near;
import java.io.Externalizable;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.Collections;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.internal.GridDirectCollection;
import org.apache.ignite.internal.GridDirectTransient;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheDeployable;
import org.apache.ignite.internal.processors.cache.GridCacheEntryInfo;
import org.apache.ignite.internal.processors.cache.GridCacheMessage;
import org.apache.ignite.internal.processors.cache.GridCacheSharedContext;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersionable;
import org.apache.ignite.internal.util.GridLeanSet;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.jetbrains.annotations.NotNull;
/**
* Get response.
*/
public class GridNearGetResponse extends GridCacheMessage implements GridCacheDeployable,
GridCacheVersionable {
/** */
private static final long serialVersionUID = 0L;
/** Future ID. */
private IgniteUuid futId;
/** Sub ID. */
private IgniteUuid miniId;
/** Version. */
private GridCacheVersion ver;
/** Result. */
@GridToStringInclude
@GridDirectCollection(GridCacheEntryInfo.class)
private Collection<GridCacheEntryInfo> entries;
/** Keys to retry due to ownership shift. */
@GridToStringInclude
@GridDirectCollection(int.class)
private Collection<Integer> invalidParts = new GridLeanSet<>();
/** Topology version if invalid partitions is not empty. */
private AffinityTopologyVersion topVer;
/** Error. */
@GridDirectTransient
private IgniteCheckedException err;
/** Serialized error. */
private byte[] errBytes;
/**
* Empty constructor required for {@link Externalizable}.
*/
public GridNearGetResponse() {
// No-op.
}
/**
* @param cacheId Cache ID.
* @param futId Future ID.
* @param miniId Sub ID.
* @param ver Version.
* @param addDepInfo Deployment info.
*/
public GridNearGetResponse(
int cacheId,
IgniteUuid futId,
IgniteUuid miniId,
GridCacheVersion ver,
boolean addDepInfo
) {
assert futId != null;
this.cacheId = cacheId;
this.futId = futId;
this.miniId = miniId;
this.ver = ver;
this.addDepInfo = addDepInfo;
}
/**
* @return Future ID.
*/
public IgniteUuid futureId() {
return futId;
}
/**
* @return Sub ID.
*/
public IgniteUuid miniId() {
return miniId;
}
/** {@inheritDoc} */
@Override public GridCacheVersion version() {
return ver;
}
/**
* @return Entries.
*/
public Collection<GridCacheEntryInfo> entries() {
return entries != null ? entries : Collections.<GridCacheEntryInfo>emptyList();
}
/**
* @param entries Entries.
*/
public void entries(Collection<GridCacheEntryInfo> entries) {
this.entries = entries;
}
/**
* @return Failed filter set.
*/
public Collection<Integer> invalidPartitions() {
return invalidParts;
}
/**
* @param invalidParts Partitions to retry due to ownership shift.
* @param topVer Topology version.
*/
public void invalidPartitions(Collection<Integer> invalidParts, @NotNull AffinityTopologyVersion topVer) {
this.invalidParts = invalidParts;
this.topVer = topVer;
}
/**
* @return Topology version if this response has invalid partitions.
*/
@Override public AffinityTopologyVersion topologyVersion() {
return topVer != null ? topVer : super.topologyVersion();
}
/** {@inheritDoc} */
@Override public IgniteCheckedException error() {
return err;
}
/**
* @param err Error.
*/
public void error(IgniteCheckedException err) {
this.err = err;
}
/** {@inheritDoc}
* @param ctx*/
@Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException {
super.prepareMarshal(ctx);
GridCacheContext cctx = ctx.cacheContext(cacheId);
if (entries != null) {
for (GridCacheEntryInfo info : entries)
info.marshal(cctx);
}
if (err != null && errBytes == null)
errBytes = U.marshal(ctx, err);
}
/** {@inheritDoc} */
@Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException {
super.finishUnmarshal(ctx, ldr);
GridCacheContext cctx = ctx.cacheContext(cacheId());
if (entries != null) {
for (GridCacheEntryInfo info : entries)
info.unmarshal(cctx, ldr);
}
if (errBytes != null && err == null)
err = U.unmarshal(ctx, errBytes, U.resolveClassLoader(ldr, ctx.gridConfig()));
}
/** {@inheritDoc} */
@Override public boolean addDeploymentInfo() {
return addDepInfo;
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!super.writeTo(buf, writer))
return false;
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 3:
if (!writer.writeCollection("entries", entries, MessageCollectionItemType.MSG))
return false;
writer.incrementState();
case 4:
if (!writer.writeByteArray("errBytes", errBytes))
return false;
writer.incrementState();
case 5:
if (!writer.writeIgniteUuid("futId", futId))
return false;
writer.incrementState();
case 6:
if (!writer.writeCollection("invalidParts", invalidParts, MessageCollectionItemType.INT))
return false;
writer.incrementState();
case 7:
if (!writer.writeIgniteUuid("miniId", miniId))
return false;
writer.incrementState();
case 8:
if (!writer.writeMessage("topVer", topVer))
return false;
writer.incrementState();
case 9:
if (!writer.writeMessage("ver", ver))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
if (!super.readFrom(buf, reader))
return false;
switch (reader.state()) {
case 3:
entries = reader.readCollection("entries", MessageCollectionItemType.MSG);
if (!reader.isLastRead())
return false;
reader.incrementState();
case 4:
errBytes = reader.readByteArray("errBytes");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 5:
futId = reader.readIgniteUuid("futId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 6:
invalidParts = reader.readCollection("invalidParts", MessageCollectionItemType.INT);
if (!reader.isLastRead())
return false;
reader.incrementState();
case 7:
miniId = reader.readIgniteUuid("miniId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 8:
topVer = reader.readMessage("topVer");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 9:
ver = reader.readMessage("ver");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(GridNearGetResponse.class);
}
/** {@inheritDoc} */
@Override public short directType() {
return 50;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 10;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridNearGetResponse.class, this);
}
}
|
|
/*******************************************************************************
* Copyright 2013-16 Indra Sistemas S.A.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.indra.sofia2.ssap.kp.config;
//import javax.ws.rs.core.Response;
import java.util.UUID;
import org.fusesource.mqtt.client.QoS;
import com.indra.sofia2.ssap.kp.config.ConnectionConfig;
import com.indra.sofia2.ssap.kp.exceptions.ConnectionConfigException;
import com.indra.sofia2.ssap.kp.implementations.mqtt.KpMQTTClient;
import com.indra.sofia2.ssap.kp.implementations.mqtt.MqttConstants;
public class MQTTConnectionConfig extends ConnectionConfig {
private static final long serialVersionUID = 1L;
private static final String SEPARADOR_CLIENTID = "_";
private static final int PREFIX_MAX_LENGHT = 15;
/**
* Sets the quality of service to use for the Will message. Defaults to
* QoS.AT_MOST_ONCE.
*/
private QoS qualityOfService = QoS.AT_MOST_ONCE;
/**
* Sets an IP to connect the KP in case of DNS resolver fails
*/
private String dnsFailHostSIB;
/**
* Sets the user name used to authenticate against the server.
*/
private String user;
/**
* Sets the password used to authenticate against the server.
*/
private String password;
/**
* ClientId
*/
private String clientId;
/**
* Set to false if you want the MQTT server to persist topic subscriptions
* and ack positions across client sessions. Defaults to true.
*/
private boolean cleanSession = false;
/**
* Configures the Keep Alive timer in seconds. Defines the maximum time
* interval between messages received from a client. It enables the server
* to detect that the network connection to a client has dropped, without
* having to wait for the long TCP/IP timeout.
*/
private int keepAliveInSeconds = 0;
/**
* Timeout to receive a SSAP response in milliseconds.
*/
private int ssapResponseTimeout = 5000;
/**
* The maximum number of reconnect attempts before an error is reported back
* to the client on the first attempt by the client to connect to a server.
* Set to -1 to use unlimited attempts. Defaults to -1.
*/
private int maxConnectAttempts;
/**
* The maximum number of reconnect attempts before an error is reported back
* to the client after a server connection had previously been established.
* Set to -1 to use unlimited attempts. Defaults to -1.
*/
private int maxReconnectAttempts;
/**
* How long to wait in ms before the first reconnect attempt. Defaults to
* 10.
*/
private long reconnectDelay;
/**
* The maximum amount of time in ms to wait between reconnect attempts.
* Defaults to 30,000.
*/
private long maxReconnectDelay;
/**
* The Exponential backoff to be used between reconnect attempts. Set to 1
* to disable exponential backoff. Defaults to 2.
*/
private double reconnectBackOffMultiplier;
/**
* The size of the internal socket receive buffer. Defaults to 65536 (64k)
*/
private int receiveBufferSize;
/**
* The size of the internal socket send buffer. Defaults to 65536 (64k)
*/
private int sendBufferSize;
/**
* The traffic class or type-of-service octet in the IP header for packets
* sent from the transport. Defaults to 8 which means the traffic should be
* optimized for throughput.
*/
private int trafficClass;
/**
* The maximum bytes per second that this client will receive data at. This
* setting throttles reads so that the rate is not exceeded. Defaults to 0
* which disables throttling.
*/
private int maxReadRate;
/**
* Sets the maximum bytes per second that this client will send data at.
* This setting throttles writes so that the rate is not exceeded. Defaults
* to 0 which disables throttling.
*/
private int maxWriteRate;
/**
* Runs a DNS and an internet connectivity test before any connection
* attempt.
*/
private boolean checkInternetConnection;
/**
* ClientId prefix
*/
private String clientIdPrefix;
public MQTTConnectionConfig() {
this.maxConnectAttempts = 0;
this.maxReconnectAttempts = 0;
this.reconnectDelay = 10;
this.maxReconnectDelay = 30000;
this.reconnectBackOffMultiplier = 2;
this.receiveBufferSize = 65536;
this.sendBufferSize = 65536;
this.trafficClass = 8;
this.maxReadRate = 0;
this.maxWriteRate = 0;
this.checkInternetConnection = false;
this.clientId = UUID.randomUUID().toString().replaceAll("-", "").substring(0, MqttConstants.CLIENT_ID_LENGTH);
this.qualityOfService = QoS.AT_LEAST_ONCE;
}
public int getMaxReadRate() {
return maxReadRate;
}
public int getMaxWriteRate() {
return maxWriteRate;
}
public int getSendBufferSize() {
return sendBufferSize;
}
public int getReceiveBufferSize() {
return receiveBufferSize;
}
public int getTrafficClass() {
return trafficClass;
}
public void validate() throws ConnectionConfigException {
super.validate();
if (qualityOfService == null) {
throw new ConnectionConfigException("The QoS level is required");
}
}
public QoS getQualityOfService() {
return qualityOfService;
}
public void setQualityOfService(QoS qualityOfService) {
this.qualityOfService = qualityOfService;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public boolean isCleanSession() {
return cleanSession;
}
public void setCleanSession(boolean cleanSession) {
this.cleanSession = cleanSession;
}
public int getKeepAliveInSeconds() {
return keepAliveInSeconds;
}
public void setKeepAliveInSeconds(int keepAliveInSeconds) {
this.keepAliveInSeconds = keepAliveInSeconds;
}
public int getSsapResponseTimeout() {
return ssapResponseTimeout;
}
public void setSsapResponseTimeout(int ssapResponseTimeout) {
this.ssapResponseTimeout = ssapResponseTimeout;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getClientIdPrefix() {
return clientIdPrefix;
}
public void setClientIdPrefix(String prefix){
if(prefix.length() > PREFIX_MAX_LENGHT) throw new ConnectionConfigException("ClientId prefix must be smaller than " + PREFIX_MAX_LENGHT);
this.clientIdPrefix = prefix;
String clientId = UUID.randomUUID().toString().substring(0, MqttConstants.CLIENT_ID_LENGTH - prefix.length() - 1);
StringBuffer str = new StringBuffer();
this.clientId = str.append(prefix).append(SEPARADOR_CLIENTID).append(clientId).toString();
}
public void resetClientId() {
this.clientId = UUID.randomUUID().toString().replaceAll("-", "").substring(0, MqttConstants.CLIENT_ID_LENGTH);
}
public int getConnectAttemptsMax() {
return maxConnectAttempts;
}
public void setConnectAttemptsMax(int connectAttemptsMax) {
this.maxConnectAttempts = connectAttemptsMax;
}
public int getReconnectAttemptsMax() {
return maxReconnectAttempts;
}
public void setReconnectAttemptsMax(int reconnectAttemptsMax) {
this.maxReconnectAttempts = reconnectAttemptsMax;
}
public long getReconnectDelay() {
return reconnectDelay;
}
public void setReconnectDelay(long reconnectDelay) {
this.reconnectDelay = reconnectDelay;
}
public long getReconnectDelayMax() {
return maxReconnectDelay;
}
public void setReconnectDelayMax(long reconnectDelayMax) {
this.maxReconnectDelay = reconnectDelayMax;
}
public double getReconnectBackOffMultiplier() {
return reconnectBackOffMultiplier;
}
public void setReconnectBackOffMultiplier(double reconnectBackOffMultiplier) {
this.reconnectBackOffMultiplier = reconnectBackOffMultiplier;
}
public String getDnsFailHostSIB() {
return dnsFailHostSIB;
}
public void setDnsFailHostSIB(String dnsFailHostSIB) {
this.dnsFailHostSIB = dnsFailHostSIB;
}
public boolean isCheckInternetConnection() {
return checkInternetConnection;
}
public void setCheckInternetConnection(boolean checkInternetConnection) {
this.checkInternetConnection = checkInternetConnection;
}
}
|
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser;
/**
* Class representing the state of a single download.
*/
public final class DownloadInfo {
private final String mUrl;
private final String mUserAgent;
private final String mMimeType;
private final String mCookie;
private final String mFileName;
private final String mDescription;
private final String mFilePath;
private final String mReferer;
private final long mContentLength;
private final boolean mHasDownloadId;
private final int mDownloadId;
private final String mContentDisposition;
private final boolean mIsGETRequest;
private final boolean mIsSuccessful;
private DownloadInfo(Builder builder) {
mUrl = builder.mUrl;
mUserAgent = builder.mUserAgent;
mMimeType = builder.mMimeType;
mCookie = builder.mCookie;
mFileName = builder.mFileName;
mDescription = builder.mDescription;
mFilePath = builder.mFilePath;
mReferer = builder.mReferer;
mContentLength = builder.mContentLength;
mHasDownloadId = builder.mHasDownloadId;
mDownloadId = builder.mDownloadId;
mIsSuccessful = builder.mIsSuccessful;
mIsGETRequest = builder.mIsGETRequest;
mContentDisposition = builder.mContentDisposition;
}
public String getUrl() {
return mUrl;
}
public String getUserAgent() {
return mUserAgent;
}
public String getMimeType() {
return mMimeType;
}
public String getCookie() {
return mCookie;
}
public String getFileName() {
return mFileName;
}
public String getDescription() {
return mDescription;
}
public String getFilePath() {
return mFilePath;
}
public String getReferer() {
return mReferer;
}
public long getContentLength() {
return mContentLength;
}
public boolean isGETRequest() {
return mIsGETRequest;
}
public boolean hasDownloadId() {
return mHasDownloadId;
}
public int getDownloadId() {
return mDownloadId;
}
public boolean isSuccessful() {
return mIsSuccessful;
}
public String getContentDisposition() {
return mContentDisposition;
}
public static class Builder {
private String mUrl;
private String mUserAgent;
private String mMimeType;
private String mCookie;
private String mFileName;
private String mDescription;
private String mFilePath;
private String mReferer;
private long mContentLength;
private boolean mIsGETRequest;
private boolean mHasDownloadId;
private int mDownloadId;
private boolean mIsSuccessful;
private String mContentDisposition;
public Builder setUrl(String url) {
mUrl = url;
return this;
}
public Builder setUserAgent(String userAgent) {
mUserAgent = userAgent;
return this;
}
public Builder setMimeType(String mimeType) {
mMimeType = mimeType;
return this;
}
public Builder setCookie(String cookie) {
mCookie = cookie;
return this;
}
public Builder setFileName(String fileName) {
mFileName = fileName;
return this;
}
public Builder setDescription(String description) {
mDescription = description;
return this;
}
public Builder setFilePath(String filePath) {
mFilePath = filePath;
return this;
}
public Builder setReferer(String referer) {
mReferer = referer;
return this;
}
public Builder setContentLength(long contentLength) {
mContentLength = contentLength;
return this;
}
public Builder setIsGETRequest(boolean isGETRequest) {
mIsGETRequest = isGETRequest;
return this;
}
public Builder setHasDownloadId(boolean hasDownloadId) {
mHasDownloadId = hasDownloadId;
return this;
}
public Builder setDownloadId(int downloadId) {
mDownloadId = downloadId;
return this;
}
public Builder setIsSuccessful(boolean isSuccessful) {
mIsSuccessful = isSuccessful;
return this;
}
public Builder setContentDisposition(String contentDisposition) {
mContentDisposition = contentDisposition;
return this;
}
public DownloadInfo build() {
return new DownloadInfo(this);
}
/**
* Create a builder from the DownloadInfo object.
* @param downloadInfo DownloadInfo object from which builder fields are populated.
* @return A builder initialized with fields from downloadInfo object.
*/
public static Builder fromDownloadInfo(final DownloadInfo downloadInfo) {
Builder builder = new Builder();
builder
.setUrl(downloadInfo.getUrl())
.setUserAgent(downloadInfo.getUserAgent())
.setMimeType(downloadInfo.getMimeType())
.setCookie(downloadInfo.getCookie())
.setFileName(downloadInfo.getFileName())
.setDescription(downloadInfo.getDescription())
.setFilePath(downloadInfo.getFilePath())
.setReferer(downloadInfo.getReferer())
.setContentLength(downloadInfo.getContentLength())
.setHasDownloadId(downloadInfo.hasDownloadId())
.setDownloadId(downloadInfo.getDownloadId())
.setContentDisposition(downloadInfo.getContentDisposition())
.setIsGETRequest(downloadInfo.isGETRequest())
.setIsSuccessful(downloadInfo.isSuccessful());
return builder;
}
}
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.java18StreamApi;
import com.intellij.codeInsight.intention.impl.config.ActionUsagePanel;
import com.intellij.ide.highlighter.JavaFileType;
import com.intellij.java.JavaBundle;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.ComboBox;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.refactoring.ui.ClassNameReferenceEditor;
import com.intellij.ui.CollectionComboBoxModel;
import com.intellij.ui.ColoredListCellRenderer;
import com.intellij.ui.SimpleListCellRenderer;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.util.List;
import java.util.*;
/**
* @author Dmitry Batkovich
*/
public class AddMethodsDialog extends DialogWrapper {
public static final @NlsSafe String OR_ELSE_DEFAULT_VALUE = ".orElseGet(() -> defaultValue)";
private static final @NlsSafe String STREAM_PREFIX = "stream.";
private final static Logger LOG = Logger.getInstance(AddMethodsDialog.class);
@NotNull private final Project myProject;
private JPanel myPanel;
private ComboBox myTemplatesCombo;
private ClassNameReferenceEditor myClassNameEditor;
private ComboBox<Collection<PsiMethod>> myMethodNameCombo;
private ActionUsagePanel myBeforeActionPanel;
private ActionUsagePanel myAfterActionPanel;
private JPanel myExamplePanel;
@SuppressWarnings("unchecked")
protected AddMethodsDialog(@NotNull final Project project, @NotNull final Component parent, boolean canBeParent) {
super(parent, canBeParent);
myProject = project;
myTemplatesCombo.setEnabled(false);
myTemplatesCombo.setRenderer(new ColoredListCellRenderer<PseudoLambdaReplaceTemplate>() {
@Override
protected void customizeCellRenderer(@NotNull JList list,
PseudoLambdaReplaceTemplate template,
int index,
boolean selected,
boolean hasFocus) {
if (template == null) {
return;
}
append(STREAM_PREFIX);
final String streamApiMethodName = template.getStreamApiMethodName();
if (StreamApiConstants.STREAM_STREAM_API_METHODS.getValue().contains(streamApiMethodName)) {
append(streamApiMethodName + "()", SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
}
else {
LOG.assertTrue(StreamApiConstants.FAKE_FIND_MATCHED.equals(streamApiMethodName));
@NlsSafe String fragment = String.format(StreamApiConstants.FAKE_FIND_MATCHED_PATTERN, "condition");
append(fragment, SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
append(JavaBundle.message("add.methods.dialog.or"));
append(OR_ELSE_DEFAULT_VALUE, SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
}
}
});
myTemplatesCombo.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
final PseudoLambdaReplaceTemplate template = (PseudoLambdaReplaceTemplate)e.getItem();
final Collection<PsiMethod> methods = (Collection<PsiMethod>)myMethodNameCombo.getSelectedItem();
if (methods == null) {
return;
}
for (PsiMethod method : methods) {
if (template.validate(method) != null) {
showTemplateExample(template, method);
break;
}
}
}
});
myMethodNameCombo.setModel(new DefaultComboBoxModel());
myMethodNameCombo.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
if (!myExamplePanel.isEnabled()) {
myExamplePanel.setEnabled(true);
}
final Collection<PseudoLambdaReplaceTemplate> suitableTemplates = new LinkedHashSet<>();
final Collection<PsiMethod> methods = (Collection<PsiMethod>) e.getItem();
for (PseudoLambdaReplaceTemplate template : PseudoLambdaReplaceTemplate.getAllTemplates()) {
for (PsiMethod method : methods) {
if (template.validate(method) != null) {
if (suitableTemplates.isEmpty()) {
showTemplateExample(template, method);
}
suitableTemplates.add(template);
}
}
}
if (!myTemplatesCombo.isEnabled()) {
myTemplatesCombo.setEnabled(true);
}
LOG.assertTrue(!suitableTemplates.isEmpty());
final List<PseudoLambdaReplaceTemplate> templatesAsList = new ArrayList<>(suitableTemplates);
myTemplatesCombo.setModel(new CollectionComboBoxModel(templatesAsList));
myTemplatesCombo.setSelectedItem(templatesAsList.get(0));
}
});
myMethodNameCombo.setRenderer(SimpleListCellRenderer.create("", value -> value.iterator().next().getName()));
myClassNameEditor.addDocumentListener(new DocumentListener() {
@Override
public void documentChanged(@NotNull DocumentEvent e) {
final String classFqn = e.getDocument().getText();
final PsiClass aClass = JavaPsiFacade.getInstance(project).findClass(classFqn, GlobalSearchScope.allScope(project));
final DefaultComboBoxModel comboBoxModel = (DefaultComboBoxModel)myMethodNameCombo.getModel();
comboBoxModel.removeAllElements();
if (aClass == null) {
enable(false);
}
else {
final List<PseudoLambdaReplaceTemplate> possibleTemplates = PseudoLambdaReplaceTemplate.getAllTemplates();
final MultiMap<String, PsiMethod> nameToMethod = MultiMap.createLinked();
for (PsiMethod m : ContainerUtil.filter(aClass.getMethods(), method -> {
if (method.isConstructor() ||
!method.hasModifierProperty(PsiModifier.STATIC) ||
method.hasModifierProperty(PsiModifier.PRIVATE)) {
return false;
}
boolean templateFound = false;
for (PseudoLambdaReplaceTemplate template : possibleTemplates) {
if (template.validate(method) != null) {
templateFound = true;
}
}
if (!templateFound) {
return false;
}
return true;
})) {
nameToMethod.putValue(m.getName(), m);
}
for (Map.Entry<String, Collection<PsiMethod>> entry : nameToMethod.entrySet()) {
comboBoxModel.addElement(entry.getValue());
}
final boolean isSuitableMethodsFound = comboBoxModel.getSize() != 0;
enable(isSuitableMethodsFound);
}
}
});
setOKActionEnabled(false);
init();
}
private void enable(boolean isEnabled) {
myMethodNameCombo.setEnabled(isEnabled);
myTemplatesCombo.setEnabled(isEnabled);
setOKActionEnabled(isEnabled);
myExamplePanel.setEnabled(isEnabled);
if (!isEnabled) {
myBeforeActionPanel.reset("", JavaFileType.INSTANCE);
myAfterActionPanel.reset("", JavaFileType.INSTANCE);
}
}
private void showTemplateExample(final PseudoLambdaReplaceTemplate template, final PsiMethod method) {
final PsiClass aClass = method.getContainingClass();
LOG.assertTrue(aClass != null);
final String fqn = aClass.getQualifiedName();
LOG.assertTrue(fqn != null);
final String parameters =
StringUtil.join(ContainerUtil.map(method.getParameterList().getParameters(), parameter -> parameter.getName()), ", ");
final String expressionText = fqn + "." + method.getName() + "(" + parameters + ")";
final PsiExpression psiExpression = JavaPsiFacade.getElementFactory(method.getProject())
.createExpressionFromText(expressionText, null);
LOG.assertTrue(psiExpression instanceof PsiMethodCallExpression);
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)psiExpression;
template.convertToStream(methodCallExpression, method, false);
myBeforeActionPanel.reset("void example() {\n <spot>" + methodCallExpression.getText() + "</spot>;\n}", JavaFileType.INSTANCE);
myAfterActionPanel.reset("void example() {\n <spot>" + template.convertToStream(methodCallExpression, method, true).getText() + "</spot>\n}", JavaFileType.INSTANCE);
}
@Override
protected void dispose() {
Disposer.dispose(myBeforeActionPanel);
Disposer.dispose(myAfterActionPanel);
super.dispose();
}
private void createUIComponents() {
myClassNameEditor = new ClassNameReferenceEditor(myProject, null);
}
public StaticPseudoFunctionalStyleMethodOptions.PipelineElement getSelectedElement() {
return new StaticPseudoFunctionalStyleMethodOptions.PipelineElement(myClassNameEditor.getText(),
ContainerUtil.getFirstItem((Collection < PsiMethod >)myMethodNameCombo.getSelectedItem()).getName(),
(PseudoLambdaReplaceTemplate)myTemplatesCombo.getSelectedItem());
}
@Nullable
@Override
protected JComponent createCenterPanel() {
return myPanel;
}
@Nullable
@Override
public JComponent getPreferredFocusedComponent() {
return myClassNameEditor;
}
}
|
|
/*
* Copyright 2015 The SageTV Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sage;
/*
* This class is a wrapper for the IRTuner dll interface from SourceForge.
*/
public class SFIRTuner implements Runnable
{
static
{
//System.loadLibrary("Sage");
}
public static final String REMOTE_DIR = "remote_dir";
public static final String IRTUNE_REPEAT_FACTOR = "irtune_repeat_factor";
public static final String IRTUNE_PREFIX_EXTRA_DELAY = "irtune_prefix_extra_delay";
public static final String USB_IRTUNE_REPEAT_FACTOR = "usb_irtune_repeat_factor";
public static final String IRTUNE_GLOBAL_PREROLL = "actisys_irtune_global_preroll";
public static final String USBIRTUNE_GLOBAL_PREROLL = "usbuirt_irtune_global_preroll";
public static final String ASYNC_TUNING = "async_tuning";
public static class Pattern
{
public int bit_length;
public int length;
public char r_flag;
public byte[] bytes;
public Pattern next;
public String toString()
{
return "Pattern[bit_length=" + bit_length + ", length=" + length + ", r_flag=" + r_flag +
", next=" + next + ']';
}
}
public static class Command
{
public String name;
public Pattern pattern;
public Command next;
public String toString()
{
return "Command[name=" + name + ", pattern=" + pattern + ", next=" + next + ']';
}
}
public static class Remote
{
public String name;
public long carrier_freq;
public long bit_time;
public Command command;
public Remote next;
// SageTV added fields
public int channelDigits;
public String confirmCmd;
public int buttonDelay;
public int sequenceDelay;
public String prefixCmd;
public String toString()
{
return "Remote[name=" + name + ", carrier=" + carrier_freq + ", bit_time=" + bit_time +
", command=" + command + ", next=" + next + ']';
}
}
public static native String[] getValidDeviceFiles(String[] tryFiles);
public static native String[] getPrettyDeviceNames(String[] validFiles);
public static String getSFIRTunerPluginDir()
{
String theDir = null;
if(Sage.WINDOWS_OS)
theDir = Sage.readStringValue(Sage.HKEY_LOCAL_MACHINE, "SOFTWARE\\Frey Technologies\\Common", "IRTunerPluginsDir");
else if(Sage.LINUX_OS)
theDir = Sage.get("irtuner_plugins_dir", "irtunerplugins");
// this guarantees we return a valid path (user.dir or Plug-Ins on Mac OS X)
if(theDir == null) theDir = Sage.getPath("plugins");
// System.out.println("getSFIRTunerPluginDir: dir = " + theDir);
return theDir;
}
public static String getPrettyNameForFile(String inFilename)
{
java.io.File testFile = new java.io.File(inFilename);
if (!testFile.isFile())
{
// Look in the global directory
String globalIRDir = getSFIRTunerPluginDir();
if (globalIRDir != null)
testFile = new java.io.File(globalIRDir, inFilename);
}
inFilename = testFile.getAbsolutePath();
String[] rv = getPrettyDeviceNames(new String[] { inFilename });
return (rv != null && rv.length > 0) ? rv[0] : inFilename;
}
private static final java.util.Map prettyNameMap = java.util.Collections.synchronizedMap(new java.util.HashMap());
public static String getFileForPrettyDeviceName(String prettyName)
{
if (prettyNameMap.get(prettyName) != null)
return prettyNameMap.get(prettyName).toString();
String irPluginDir = getSFIRTunerPluginDir();
java.io.File[] suspectDLLFiles = new java.io.File(irPluginDir).
listFiles(new java.io.FilenameFilter(){
public boolean accept(java.io.File dir,String filename){return filename.toLowerCase().endsWith(Sage.WINDOWS_OS ? ".dll" :
(Sage.LINUX_OS ? ".so" : ".dylib"));}});
String[] suspectDLLs = (suspectDLLFiles == null) ? Pooler.EMPTY_STRING_ARRAY : new String[suspectDLLFiles.length];
for (int i = 0; i < suspectDLLs.length; i++)
suspectDLLs[i] = suspectDLLFiles[i].getAbsolutePath();
String[] irDevFiles = getValidDeviceFiles(suspectDLLs);
String[] allPretty = getPrettyDeviceNames(irDevFiles);
for (int i = 0; i < allPretty.length; i++)
if (allPretty[i].equals(prettyName))
{
prettyNameMap.put(prettyName, irDevFiles[i]);
return irDevFiles[i];
}
prettyNameMap.put(prettyName, prettyName);
return prettyName;
}
/*
* Cmd line params
* -r name : create remote with name, calculates bitrate & carrier
* -c rname cname : record command 'cname' to the remote 'rname'
* -l name : load the remotes from this filename
* -s name : save the remotes to this filename
* -p rname cname repeat : play the command 'cname' from remote 'rname' repeat times
* -w time : wait for time seconds
* -i : run initDevice
* -x comport : open comport #
*/
/*public static void main(String[] args)
{
if (args.length == 0)
{
System.out.println("Usage:");
System.out.println("-r name : create remote with name, calculates bitrate & carrier");
System.out.println("-c rname cname : record command 'cname' to the remote 'rname'");
System.out.println("-l name : load the remotes from this filename");
System.out.println("-s name : save the remotes to this filename");
System.out.println("-p rname cname repeat : play the command 'cname' from remote 'rname' repeat times");
System.out.println("-w time : wait for time seconds");
System.out.println("-i : run initdevice");
System.out.println("-x comport : open comport #");
return;
}
String[] dllFiles = new java.io.File(System.getProperty("user.dir")).list(new java.io.FilenameFilter()
{
public boolean accept(java.io.File dir,String filename){return filename.endsWith(".dll");}
});
System.out.println("dllFiles=" + java.util.Arrays.asList(dllFiles));
String[] validFiles = getValidDeviceFiles(dllFiles);
System.out.println("validFiles=" + java.util.Arrays.asList(validFiles));
SFIRTuner tuney = new SFIRTuner(validFiles[0]);
for (int i = 0; i < args.length; i++)
{
if (args[i].equals("-r"))
{
String rname = args[++i];
System.out.println("Create remote named " + rname);
long carrier=0, bitrate=0;
if (tuney.needCarrierFrequency())
{
while (carrier == 0)
{
System.out.println("Hold a remote button down for a while. Scanning for frequency...");
carrier = tuney.findCarrierFrequency();
System.out.println("Carrier frequency=" + carrier);
if (carrier > 100000)
{
System.out.println("BAD CARRIER, do it again!");
carrier = 0;
}
}
}
if (tuney.needBitrate())
{
System.out.println("Hold a remote button down for a while. Calculating bitrate...");
bitrate = tuney.findBitRate();
System.out.println("Bitrate=" + bitrate);
}
Remote newRem = tuney.createRemote(rname, carrier, bitrate, null);
tuney.addRemote(newRem);
System.out.println("Created & added remote " + newRem);
}
else if (args[i].equals("-c"))
{
String rname = args[++i];
String cname = args[++i];
Remote rem = tuney.findRemote(rname);
if (rem == null)
{
System.out.println("Can't find remote named:" + rname);
continue;
}
System.out.println("Hit the " + cname + " key for remote " + rname);
Command cmd = tuney.recordCommand(cname);
System.out.println("Recorded command:" + cmd);
tuney.addCommand(rem, cmd);
}
else if (args[i].equals("-l"))
{
String fname = args[++i];
System.out.println("Loading remotes from filename:" + fname);
tuney.loadRemotes(fname);
System.out.println("Remotes=" + tuney.baseRemote);
}
else if (args[i].equals("-s"))
{
String fname = args[++i];
System.out.println("Saving remotes to filename:" + fname);
tuney.saveRemotes(fname);
System.out.println("Remotes=" + tuney.baseRemote);
}
else if (args[i].equals("-p"))
{
String rname = args[++i];
String cname = args[++i];
int rep = Integer.parseInt(args[++i]);
System.out.println("Starting to play command " + cname + " for remote " + rname + " " + rep + " times");
tuney.playCommand(tuney.findRemote(rname), cname, rep);
System.out.println("Done playing command");
}
else if (args[i].equals("-w"))
{
try{Thread.sleep(1000*Integer.parseInt(args[++i]));}catch(Exception e){}
}
else if (args[i].equals("-i"))
tuney.initDevice();
else if (args[i].equals("-x"))
{
int comport = Integer.parseInt(args[++i]);
boolean openD = tuney.openDevice(comport);
if (!openD)
{
System.out.println("Failed opening COM port. Trying again!");
tuney.closeDevice();
openD = tuney.openDevice(comport);
if (!openD)
{
System.out.println("Failed opening COM port. Darn!");
return;
}
}
System.out.println("Opened com port " + openD);
}
}
tuney.closeDevice();
System.out.println("Closed COM port");
}*/
private static java.util.Vector loadedTuneys = new java.util.Vector();
/*
* The carrier & bit timing for the hardware gets set in initDevice. The values
* it uses are from the last remote it loaded via a call to loadRemotes
*/
public SFIRTuner(String inFilename)
{
java.io.File testFile = new java.io.File(inFilename);
String globalIRDir = getSFIRTunerPluginDir();
if (!testFile.isFile())
{
// Look in the global directory
if (globalIRDir != null)
testFile = new java.io.File(globalIRDir, inFilename);
}
inFilename = testFile.getAbsolutePath();
if (Sage.WINDOWS_OS && getValidDeviceFiles(new String[] { inFilename} ).length == 0)
{
System.err.println("Invalid device filename for IRTuner: " + inFilename);
}
devFilename = inFilename;
if (globalIRDir != null)
remoteDir = new java.io.File(globalIRDir, "RemoteCodes");
else
remoteDir = new java.io.File(Sage.getPath("plugins")/*System.getProperty("user.dir")*/, "RemoteCodes");
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
remoteDir.mkdirs();
String[] prettyNames = getPrettyDeviceNames(new String[] { devFilename });
if (prettyNames != null && prettyNames.length > 0)
{
remoteDir2 = new java.io.File(remoteDir, prettyNames[0]);
remoteDir2.mkdirs();
}
}
else
remoteDir2 = new java.io.File(remoteDir, devFilename);
asyncTuning = Sage.getBoolean(ASYNC_TUNING, true);
tuneVec = new java.util.Vector();
globalPreroll = 0;
initialize();
}
private void initialize()
{
checkForTuneyConflicts();
init0();
alive = true;
if (asyncTuning)
{
asyncThread = new Thread(this, "AsyncTuner");
asyncThread.setDaemon(true);
asyncThread.setPriority(Thread.MAX_PRIORITY - 3);
asyncThread.start();
if (Sage.WINDOWS_OS)
{
globalPreroll = (devFilename.toLowerCase().indexOf("uu_irsage") == -1) ? Sage.getLong(IRTUNE_GLOBAL_PREROLL, 2000L) :
Sage.getLong(USBIRTUNE_GLOBAL_PREROLL, 150);
}
else
globalPreroll = Sage.getLong(USBIRTUNE_GLOBAL_PREROLL, 0);
}
loadedTuneys.add(this);
}
private void checkForTuneyConflicts()
{
// We can't have more than one Actisys plugin open at once, so shut down any others if this is one
if (devFilename.startsWith("as_ir200l"))
{
for (int i = 0; i < loadedTuneys.size(); i++)
{
SFIRTuner tuney = (SFIRTuner) loadedTuneys.get(i);
if (tuney.devFilename.equals(devFilename))
{
System.out.println("SFIRTuner shutting down tuning plugin due to conflict");
tuney.goodbye();
}
}
}
}
public boolean isConfigurable()
{
return !canMacroTune();
}
public void run()
{
Object[] tuneData = null;
while (alive)
{
String nextTune = null;
String nextRemote = null;
synchronized (tuneVec)
{
if (tuneData != null)
{
tuneVec.remove(tuneData);
tuneData = null;
}
if (tuneVec.isEmpty())
{
tuneVec.notifyAll();
try{tuneVec.wait(0);}catch(InterruptedException e){}
continue;
}
tuneData = (Object[]) tuneVec.lastElement();
nextRemote = (String) tuneData[0];
nextTune = (String) tuneData[1];
// Only send the last channel change command for a given remote since any prior
// ones will be overidden by it.
for (int i = tuneVec.size() - 2; i >= 0; i--)
{
Object[] tempTuneData = (Object[]) tuneVec.get(i);
if (tempTuneData[0].equals(nextRemote))
tuneVec.removeElementAt(i);
}
}
if (globalPreroll != 0 && !canMacroTune())
try{ Thread.sleep(globalPreroll); } catch(Exception e){}
playTuneString(nextRemote, nextTune);
}
}
public String[] getRemoteNames()
{
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
java.util.ArrayList rv = new java.util.ArrayList();
String[] irFiles = remoteDir.list(new java.io.FilenameFilter()
{
public boolean accept(java.io.File dir,String filename){return filename.endsWith(".ir") &&
hasRemoteFileData(filename.substring(0, filename.length() - 3));}
});
if (irFiles != null)
{
for (int i = 0; i < irFiles.length; i++)
rv.add(irFiles[i].substring(0, irFiles[i].length() - 3));
}
irFiles = remoteDir2.list(new java.io.FilenameFilter()
{
public boolean accept(java.io.File dir,String filename){return filename.endsWith(".ir") &&
hasRemoteFileData(filename.substring(0, filename.length() - 3));}
});
if (irFiles != null)
{
for (int i = 0; i < irFiles.length; i++)
rv.add(irFiles[i].substring(0, irFiles[i].length() - 3));
}
return (String[]) rv.toArray(Pooler.EMPTY_STRING_ARRAY);
}
else
{
// This means load the whole remote list
synchronized (this)
{
loadRemotes(null);
java.util.ArrayList rv = new java.util.ArrayList();
Remote tempRemote = baseRemote;
while (tempRemote != null)
{
rv.add(tempRemote.name);
tempRemote = tempRemote.next;
}
baseRemote = null; // since they don't load fully this way
String[] rvArray = (String[]) rv.toArray(Pooler.EMPTY_STRING_ARRAY);
java.util.Arrays.sort(rvArray);
return rvArray;
}
}
}
public synchronized void goodbye()
{
boolean needToKill = alive; // Don't close the hardware if it's not active or it may crash!
alive = false;
synchronized (tuneVec)
{
tuneVec.notifyAll();
}
if (needToKill)
{
closeDevice();
goodbye0();
}
loadedTuneys.remove(this);
}
private void addCommand(Remote daRemote, Command addMe)
{
Command cmdList = daRemote.command;
if (cmdList == null)
{
daRemote.command = addMe;
return;
}
while (cmdList.next != null)
cmdList = cmdList.next;
cmdList.next = addMe;
}
/* private void addRemote(Remote addMe)
{
if (baseRemote == null)
baseRemote = addMe;
else
{
Remote currRemote = baseRemote;
while (currRemote.next != null)
currRemote = currRemote.next;
currRemote.next = addMe;
}
}
/* private Remote findRemote(String name)
{
Remote rem = baseRemote;
while (rem != null)
{
if (name.equals(rem.name))
return rem;
rem = rem.next;
}
return null;
}
*/
private native void closeDevice();
private Remote createRemote(String remoteName, long carrier, long bitrate, Command commands)
{
Remote rv = new Remote();
rv.name = remoteName;
rv.carrier_freq = carrier;
rv.bit_time = bitrate;
rv.command = commands;
rv.buttonDelay = Sage.WINDOWS_OS ? 600 : 800;
rv.sequenceDelay = 800;
rv.channelDigits = 3;
return rv;
}
public synchronized void playCommand(String remoteName, String cmdName, int repeats, boolean sleepAfter)
{
if (!ensureRemoteLoaded(remoteName)) return;
long waitNow = baseRemote.buttonDelay + baseRemote.sequenceDelay - (Sage.eventTime() - lastIRTime);
if (waitNow > 0)
{
try
{
Thread.sleep(waitNow);
} catch(Exception e){}
}
if (!Sage.WINDOWS_OS && devFilename.endsWith("PVR150Tuner.so") && UIManager.getLocalUI() != null)
{
// Sync the PVR150 xmt & recv
if (UIManager.getLocalUI().getRouter() == null)
{
playCommand(baseRemote, cmdName, repeats);
// if (sleepAfter) // We get I2C failures if we don't wait at least 350 msec after a send
{
try
{
Thread.sleep(baseRemote.buttonDelay);
} catch(Exception e){}
}
}
else
{
synchronized (UIManager.getLocalUI().getRouter().getDefaultInputPlugin())
{
//System.out.println("PVR150 SyncBlock Enter");
playCommand(baseRemote, cmdName, repeats);
// if (sleepAfter) // We get I2C failures if we don't wait at least 350 msec after a send
{
try
{
Thread.sleep(baseRemote.buttonDelay);
} catch(Exception e){}
}
//System.out.println("PVR150 SyncBlock Exit");
}
}
}
else
{
playCommand(baseRemote, cmdName, repeats);
if (sleepAfter)
{
try
{
Thread.sleep(baseRemote.buttonDelay);
} catch(Exception e){}
}
}
}
private int getRepeatFactor()
{
if (devFilename.toLowerCase().indexOf("uu_irsage") != -1)
return Sage.getInt(USB_IRTUNE_REPEAT_FACTOR, 2);
else
return Sage.getInt(IRTUNE_REPEAT_FACTOR, Sage.LINUX_OS ? 1 : 2);
}
public void playTuneString(String remoteName, String cmdString)
{
playTuneString(remoteName, cmdString, false);
}
public void playTuneString(String remoteName, String cmdString, boolean forceSynchronous)
{
if (cmdString == null || cmdString.length() == 0) return;
if (!forceSynchronous && asyncTuning && Thread.currentThread() != asyncThread)
{
synchronized (tuneVec)
{
tuneVec.addElement(new Object[] { remoteName, cmdString });
tuneVec.notifyAll();
}
return;
}
synchronized (this)
{
if (Sage.DBG) System.out.println("Playing IR tune command of " + cmdString);
if (!ensureRemoteLoaded(remoteName)) return;
if (canMacroTune())
{
macroTune(Integer.parseInt(cmdString));
}
else
{
// To deal with reinitializing the IR XMT for the 150 on Linux after the receive fails
if (!Sage.WINDOWS_OS && devFilename.endsWith("PVR150Tuner.so"))
{
closeDevice();
openDevice(currPortNum);
// Wait for the init to complete
try{Thread.sleep(Sage.getInt("linux/pvr150_ir_reset_wait", 750));}catch (Exception e){}
}
try
{
int cmdNum = Integer.parseInt(cmdString);
if (baseRemote.channelDigits != 0)
{
if (cmdString.length() != baseRemote.channelDigits)
{
int hiChan = (int)Math.round(Math.pow(10, baseRemote.channelDigits));
while (hiChan/10 > cmdNum)
{
cmdString = "0" + cmdString;
hiChan /= 10;
}
}
}
}catch (Exception e){}
if (baseRemote.prefixCmd != null && baseRemote.prefixCmd.length() > 0)
{
playCommand(remoteName, baseRemote.prefixCmd, getRepeatFactor(), true);
long extraPrefixDelay = Sage.getLong(IRTUNE_PREFIX_EXTRA_DELAY, 0);
if (extraPrefixDelay > 0)
{
try{Thread.sleep(extraPrefixDelay);}catch(Exception e){}
}
}
boolean needsConfirm = baseRemote.confirmCmd != null && baseRemote.confirmCmd.length() > 0;
for (int i = 0; i < cmdString.length(); i++)
playCommand(remoteName, "" + cmdString.charAt(i), getRepeatFactor(),
needsConfirm ? true : (i < cmdString.length() - 1));
if (needsConfirm)
playCommand(remoteName, baseRemote.confirmCmd, getRepeatFactor(), false);
lastIRTime = Sage.eventTime();
}
}
}
public void waitForCompletion()
{
synchronized (tuneVec)
{
while (!tuneVec.isEmpty())
{
try
{
tuneVec.wait(5000);
}
catch (InterruptedException e){}
}
}
}
public synchronized String addNewRemote(String name)
{
name = createValidRemoteName(name);
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
if (new java.io.File(remoteDir2, name + ".ir").isFile()) return null;
}
if (Sage.DBG) System.out.println("Creating remote named " + name);
long carrier=0, bitrate=0;
if (needCarrierFrequency())
{
while (carrier == 0)
{
if (Sage.DBG) System.out.println("Hold a remote button down for a while. Scanning for frequency...");
carrier = findCarrierFrequency();
if (Sage.DBG) System.out.println("Carrier frequency=" + carrier);
if (carrier > 100000)
{
if (Sage.DBG) System.out.println("BAD CARRIER, do it again!");
carrier = 0;
}
}
}
if (needBitrate())
{
if (Sage.DBG) System.out.println("Hold a remote button down for a while. Calculating bitrate...");
bitrate = findBitRate();
if (Sage.DBG) System.out.println("Bitrate=" + bitrate);
}
Remote newRem = createRemote(name, carrier, bitrate, null);
baseRemote = newRem;
saveRemotes(new java.io.File(remoteDir2, baseRemote.name + ".ir").toString());
return name;
}
private static String createValidRemoteName(String tryMe)
{
int len = tryMe.length();
StringBuffer sb = new StringBuffer(len);
for (int i = 0; i < len; i++)
{
char c = tryMe.charAt(i);
if (Character.isLetterOrDigit(c))
sb.append(c);
}
return sb.toString();
}
public synchronized boolean recordNewCommand(String remoteName, String cmdName)
{
if (!ensureRemoteLoaded(remoteName)) return false;
// If it's already there, remove it so we can reprogram it
removeCommand(remoteName, cmdName);
Command cmd = recordCommand(cmdName);
if (cmd != null)
addCommand(baseRemote, cmd);
return (cmd != null);
}
private boolean ensureRemoteLoaded(String remoteName)
{
if (baseRemote == null || !baseRemote.name.equals(remoteName))
{
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
java.io.File remFile = new java.io.File(remoteDir, remoteName + ".ir");
if (!remFile.isFile())
remFile = new java.io.File(remoteDir2, remoteName + ".ir");
loadRemotes(remFile.toString());
java.io.BufferedReader inStream = null;
try
{
inStream = new java.io.BufferedReader(new java.io.FileReader(remFile));
String str = inStream.readLine();
if (str != null)
{
java.util.StringTokenizer toker = new java.util.StringTokenizer(str, " \t");
if (toker.countTokens() > 3)
{
toker.nextToken();
toker.nextToken();
toker.nextToken();
if (toker.hasMoreTokens())
baseRemote.channelDigits = Integer.parseInt(toker.nextToken());
if (toker.hasMoreTokens())
baseRemote.buttonDelay = Integer.parseInt(toker.nextToken());
if (toker.hasMoreTokens())
baseRemote.sequenceDelay = Integer.parseInt(toker.nextToken());
if (toker.hasMoreTokens())
{
baseRemote.confirmCmd = toker.nextToken();
if ("|".equals(baseRemote.confirmCmd))
baseRemote.confirmCmd = null;
}
if (toker.hasMoreTokens())
baseRemote.prefixCmd = toker.nextToken();
}
}
}
catch (Exception e)
{
System.err.println("I/O Error loading remote control data of:" + e);
}
finally
{
if (inStream != null)
try{inStream.close();}catch(Exception e){}
}
if (baseRemote != null)
{
if (baseRemote.buttonDelay <= 0)
baseRemote.buttonDelay = Sage.WINDOWS_OS ? 600 : 800;
if (baseRemote.sequenceDelay <= 0)
baseRemote.sequenceDelay = 800;
initDevice();
}
}
else
{
loadRemotes(remoteName);
if (baseRemote != null)
{
baseRemote.channelDigits = Sage.getInt("lirc/remotes/" + remoteName + "/channel_digits", 3);
baseRemote.buttonDelay = Sage.getInt("lirc/remotes/" + remoteName + "/button_delay", 800);
baseRemote.sequenceDelay = Sage.getInt("lirc/remotes/" + remoteName + "/sequence_delay", 800);
baseRemote.confirmCmd = Sage.get("lirc/remotes/" + remoteName + "/confirm_cmd", "");
baseRemote.prefixCmd = Sage.get("lirc/remotes/" + remoteName + "/prefix_cmd", "");
initDevice();
}
}
}
return baseRemote != null;
}
// DO NOT MODIFY THE RETURNED DATA STRUCTURE!!
public synchronized Remote getRemoteInfo(String remoteName)
{
ensureRemoteLoaded(remoteName);
return baseRemote;
}
public synchronized void renameCommand(String remoteName, String oldCmdName, String newCmdName)
{
if (!ensureRemoteLoaded(remoteName)) return;
Command currCmd = baseRemote.command;
while (currCmd != null)
{
if (currCmd.name.equals(oldCmdName))
{
currCmd.name = newCmdName;
break;
}
currCmd = currCmd.next;
}
}
public synchronized void removeCommand(String remoteName, String cmdName)
{
if (!ensureRemoteLoaded(remoteName)) return;
Command currCmd = baseRemote.command;
Command lastCmd = null;
while (currCmd != null)
{
if (currCmd.name.equals(cmdName))
{
if (lastCmd == null)
baseRemote.command = currCmd.next;
else
lastCmd.next = currCmd.next;
break;
}
lastCmd = currCmd;
currCmd = currCmd.next;
}
}
private boolean hasRemoteFileData(String remoteName)
{
java.io.File remFile = new java.io.File(remoteDir, remoteName + ".ir");
if (remFile.isFile() && remFile.length() > 0)
return true;
remFile = new java.io.File(remoteDir2, remoteName + ".ir");
return (remFile.isFile() && remFile.length() > 0);
}
public synchronized void saveChanges()
{
if (baseRemote != null)
{
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
java.io.File remFile = new java.io.File(remoteDir, baseRemote.name + ".ir");
if (!remFile.isFile())
remFile = new java.io.File(remoteDir2, baseRemote.name + ".ir");
saveRemotes(remFile.toString());
// Load back the file data, and then rewrite the first line in our format
java.io.BufferedReader inStream = null;
java.io.PrintWriter outStream = null;
try
{
inStream = new java.io.BufferedReader(new java.io.FileReader(remFile));
StringBuffer sb = new StringBuffer();
sb.append(inStream.readLine());
sb.append(' ');
sb.append(baseRemote.channelDigits);
sb.append(' ');
sb.append(baseRemote.buttonDelay);
sb.append(' ');
sb.append(baseRemote.sequenceDelay);
if (baseRemote.confirmCmd != null && baseRemote.confirmCmd.length() > 0)
sb.append(" " + baseRemote.confirmCmd);
else if (baseRemote.prefixCmd != null && baseRemote.prefixCmd.length() > 0)
sb.append(" |"); // delimiter to separate prefixCmd
if (baseRemote.prefixCmd != null && baseRemote.prefixCmd.length() > 0)
sb.append(" " + baseRemote.prefixCmd);
sb.append("\r\n");
char[] buf = new char[1024];
int numRead = inStream.read(buf);
while (numRead != -1)
{
sb.append(buf, 0, numRead);
numRead = inStream.read(buf);
}
inStream.close();
inStream = null;
outStream = new java.io.PrintWriter(new java.io.BufferedWriter(new java.io.FileWriter(remFile)));
outStream.print(sb.toString());
}
catch (java.io.IOException e)
{
System.err.println("I/O Error resaving remote control data of:" + e);
}
finally
{
if (inStream != null)
try{inStream.close();}catch(Exception e){}
if (outStream != null)
try{outStream.close();}catch(Exception e){}
}
}
else
{
Sage.putInt("lirc/remotes/" + baseRemote.name + "/channel_digits", baseRemote.channelDigits);
Sage.putInt("lirc/remotes/" + baseRemote.name + "/button_delay", baseRemote.buttonDelay);
Sage.putInt("lirc/remotes/" + baseRemote.name + "/sequence_delay", baseRemote.sequenceDelay);
Sage.put("lirc/remotes/" + baseRemote.name + "/confirm_cmd", baseRemote.confirmCmd);
Sage.put("lirc/remotes/" + baseRemote.name + "/prefix_cmd", baseRemote.prefixCmd);
}
}
}
public synchronized void cancelChanges()
{
baseRemote = null;
}
public synchronized void removeRemote(String remoteName)
{
if (Sage.WINDOWS_OS || Sage.MAC_OS_X)
{
// This just erases the file
if (baseRemote != null && baseRemote.name.equals(remoteName))
baseRemote = null;
java.io.File remFile = new java.io.File(remoteDir, remoteName + ".ir");
if (!remFile.isFile())
remFile = new java.io.File(remoteDir2, remoteName + ".ir");
if (remFile.canWrite()) // read-only files are devices that can't be removed
remFile.delete();
}
}
public synchronized void setChannelDigits(int x)
{
if (baseRemote != null)
baseRemote.channelDigits = x;
}
public synchronized void setButtonDelay(int millis)
{
if (baseRemote != null)
baseRemote.buttonDelay = millis;
}
public synchronized void setSequenceDelay(int millis)
{
if (baseRemote != null)
baseRemote.sequenceDelay = millis;
}
public synchronized void setConfirmKey(String x)
{
if (baseRemote != null)
baseRemote.confirmCmd = x;
}
public synchronized void setPrefixKey(String x)
{
if (baseRemote != null)
baseRemote.prefixCmd = x;
}
public boolean isAlive() { return alive; }
public Remote getDefaultRemoteInfo() { return baseRemote; }
public int getMinChannel()
{
return 1;
}
public int getMaxChannel()
{
if (baseRemote != null)
if (baseRemote.channelDigits == 0)
return 999;
else return (int)Math.round(Math.pow(10, baseRemote.channelDigits)) - 1;
return 999;
}
public native String deviceName();
private native long findBitRate();
private native long findCarrierFrequency();
private native void initDevice(); // init before playback
private native void loadRemotes(String filename);
private native boolean needBitrate();
private native boolean needCarrierFrequency();
public synchronized boolean openPort(int portNum)
{
currPortNum = portNum;
if (!alive)
initialize();
if (!Sage.WINDOWS_OS && devFilename.endsWith("PVR150Tuner.so") && UIManager.getLocalUI() != null)
{
if (UIManager.getLocalUI().getRouter() == null)
{
boolean openD = openDevice(portNum);
if (!openD)
{
if (Sage.DBG) System.out.println("Failed opening IR port " + portNum + ". Darn!");
return false;
}
}
else
{
// Sync the PVR150 xmt & recv
synchronized (UIManager.getLocalUI().getRouter().getDefaultInputPlugin())
{
boolean openD = openDevice(portNum);
if (!openD)
{
if (Sage.DBG) System.out.println("Failed opening IR port " + portNum + ". Darn!");
return false;
}
}
}
if (Sage.DBG) System.out.println("SUCCESSFULLY opened IRTuner on port " + portNum);
return true;
}
boolean openD = openDevice(portNum);
if (!openD)
{
if (Sage.DBG) System.out.println("Failed opening COM port " + portNum + ". Trying again!");
closeDevice();
openD = openDevice(portNum);
if (!openD)
{
if (Sage.DBG) System.out.println("Failed opening COM port " + portNum + ". Darn!");
return false;
}
}
if (Sage.DBG) System.out.println("SUCCESSFULLY opened IRTuner on port " + portNum);
return true;
}
private native boolean openDevice(int portNum);
private native void playCommand(Remote theRemote, String cmdName, int repeat);
private native Command recordCommand(String commandName);
private native void saveRemotes(String filename);
private native void init0();
private native void goodbye0();
private native boolean canMacroTune();
private native void macroTune(int number);
private String devFilename;
private Remote baseRemote;
private long nativePort;
private long nativeDllHandle;
private java.io.File remoteDir;
private java.io.File remoteDir2;
private java.util.Vector tuneVec;
private boolean asyncTuning;
private Thread asyncThread;
private int currPortNum;
private long lastIRTime;
private long globalPreroll;
private boolean alive;
}
|
|
/**
*
*/
package uk.co.jemos.podam.api;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.co.jemos.podam.common.PodamExclude;
/**
* Default abstract implementation of a {@link ClassInfoStrategy}
* <p>
* This default implementation is based on field introspection.
* </p>
*
* @author daivanov
*
* @since 5.1.0
*
*/
public abstract class AbstractClassInfoStrategy implements ClassInfoStrategy,
ClassAttributeApprover {
// ------------------->> Constants
private final Pattern GETTER_PATTERN = getGetterPattern();
private final Pattern SETTER_PATTERN = getSetterPattern();
// ------------------->> Instance / Static variables
/** The application logger. */
private static final Logger LOG = LoggerFactory.getLogger(AbstractClassInfoStrategy.class);
/**
* Set of annotations, which mark fields to be skipped from populating.
*/
private final Set<Class<? extends Annotation>> excludedAnnotations =
new HashSet<Class<? extends Annotation>>();
/**
* Set of fields, which mark fields to be skipped from populating.
*/
private Map<Class<?>, Set<String>> excludedFields
= new HashMap<Class<?>, Set<String>>();
/**
* Set of extra methods to execute.
* @since 5.3.0
**/
private final Map<Class<?>, List<Method>> extraMethods = new HashMap<Class<?>, List<Method>>();
// ------------------->> Constructors
// ------------------->> Public methods
/**
* Adds the specified {@link Annotation} to set of excluded annotations,
* if it is not already present.
*
* @param annotation
* the annotation to use as an exclusion mark
* @return itself
*/
public AbstractClassInfoStrategy addExcludedAnnotation(
final Class<? extends Annotation> annotation) {
excludedAnnotations.add(annotation);
return this;
}
/**
* It adds an extra method to execute
* @param pojoClass The pojo class where to execute the method
* @param methodName name to be scheduled for execution
* @param methodArgs list of method arguments
* @return this object
* @throws SecurityException If a security exception occurred while retrieving the method
* @throws NoSuchMethodException If pojoClass doesn't declare the required method
* @since 5.3.0
*/
public AbstractClassInfoStrategy addExtraMethod(
Class<?> pojoClass, String methodName, Class<?> ... methodArgs)
throws NoSuchMethodException, SecurityException {
Method method = pojoClass.getMethod(methodName, methodArgs);
List<Method> methods = extraMethods.get(pojoClass);
if (methods == null) {
methods = new ArrayList<Method>();
extraMethods.put(pojoClass, methods);
}
methods.add(method);
return this;
}
/**
* Removes the specified {@link Annotation} from set of excluded annotations.
*
* @param annotation
* the annotation used as an exclusion mark
* @return itself
*/
public AbstractClassInfoStrategy removeExcludedAnnotation(
final Class<? extends Annotation> annotation) {
excludedAnnotations.remove(annotation);
return this;
}
/**
* Adds the specified field to set of excluded fields,
* if it is not already present.
*
* @param pojoClass
* a class for which fields should be skipped
* @param fieldName
* the field name to use as an exclusion mark
* @return itself
*/
public AbstractClassInfoStrategy addExcludedField(
final Class<?> pojoClass, final String fieldName) {
Set<String> fields = excludedFields.get(pojoClass);
if (fields == null) {
fields = new HashSet<String>();
excludedFields.put(pojoClass, fields);
}
fields.add(fieldName);
return this;
}
/**
* Removes the field name from set of excluded fields.
*
* @param pojoClass
* a class for which fields should be skipped
* @param fieldName
* the field name used as an exlusion mark
* @return itself
*/
public AbstractClassInfoStrategy removeExcludedField(
final Class<?> pojoClass, final String fieldName) {
Set<String> fields = excludedFields.get(pojoClass);
if (fields != null) {
fields.remove(fieldName);
}
return this;
}
/**
* {@inheritDoc}
*/
@Override
public boolean approve(ClassAttribute attribute) {
/* skip setters having more than one parameter,
* when there is more than one setter for a field */
if (attribute.getRawSetters().size() > 1) {
for (Method setter : attribute.getRawSetters()) {
if (setter.getParameterTypes().length > 1) {
return false;
}
}
}
return (attribute.getAttribute() != null);
}
// ------------------->> Getters / Setters
/**
* {@inheritDoc}
*/
@Override
public Set<Class<? extends Annotation>> getExcludedAnnotations() {
return excludedAnnotations;
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> getExcludedFields(final Class<?> pojoClass) {
return excludedFields.get(pojoClass);
}
/**
* {@inheritDoc}
*/
@Override
public ClassInfo getClassInfo(Class<?> pojoClass) {
Set<String> excludedAttributes = excludedFields.get(pojoClass);
if (null == excludedAttributes) {
excludedAttributes = Collections.emptySet();
}
List<Method> localExtraMethods = extraMethods.get(pojoClass);
if (null == localExtraMethods) {
localExtraMethods = Collections.emptyList();
}
return getClassInfo(pojoClass,
excludedAnnotations, excludedAttributes, this, localExtraMethods);
}
@Override
public ClassAttributeApprover getClassAttributeApprover(Class<?> pojoClass) {
return this;
}
@Override
public Collection<Method> getExtraMethods(Class<?> pojoClass) {
return extraMethods.get(pojoClass);
}
/**
* It returns a {@link ClassInfo} object for the given class
*
* @param clazz
* The class to retrieve info from
* @param excludeFieldAnnotations
* the fields marked with any of these annotations will not be
* included in the class info
* @param excludedFields
* the fields matching the given names will not be included in the class info
* @param attributeApprover
* a {@link ClassAttributeApprover} implementation,
* which defines which attributes to skip and which to process
* @param extraMethods
* extra methods to call after object initialization
* @return a {@link ClassInfo} object for the given class
*/
public ClassInfo getClassInfo(Class<?> clazz,
Set<Class<? extends Annotation>> excludeFieldAnnotations,
Set<String> excludedFields,
ClassAttributeApprover attributeApprover,
Collection<Method> extraMethods) {
if (null == attributeApprover) {
attributeApprover = DefaultClassInfoStrategy.getInstance().getClassAttributeApprover(clazz);
}
Map<String, ClassAttribute> attributeMap = new TreeMap<String, ClassAttribute>();
findPojoAttributes(clazz, attributeMap, excludeFieldAnnotations, excludedFields);
/* Approve all found attributes */
Collection<ClassAttribute> attributes = new ArrayList<ClassAttribute>(attributeMap.values());
Iterator<ClassAttribute> iter = attributes.iterator();
main : while(iter.hasNext()) {
ClassAttribute attribute = iter.next();
Field field = attribute.getAttribute();
if (excludedFields.contains(attribute.getName()) ||
(field != null && containsAnyAnnotation(field, excludeFieldAnnotations))) {
iter.remove();
continue;
}
for (Method classGetter : attribute.getRawGetters()) {
if (containsAnyAnnotation(classGetter, excludeFieldAnnotations)) {
iter.remove();
continue main;
}
}
for (Method classSetter : attribute.getRawSetters()) {
if (containsAnyAnnotation(classSetter, excludeFieldAnnotations)) {
iter.remove();
continue main;
}
}
if (!attributeApprover.approve(attribute)) {
iter.remove();
}
}
return new ClassInfo(clazz, attributes, extraMethods);
}
// ------------------->> Private methods
/**
* Checks if the given method has any one of the annotations
*
* @param method
* the method to check for
* @param annotations
* the set of annotations to look for in the field
* @return true if the field is marked with any of the given annotations
*/
private boolean containsAnyAnnotation(Method method,
Set<Class<? extends Annotation>> annotations) {
for (Class<? extends Annotation> annotation : annotations) {
if (method.getAnnotation(annotation) != null) {
return true;
}
}
return false;
}
/**
* Checks if the given field has any one of the annotations
*
* @param field
* the field to check for
* @param annotations
* the set of annotations to look for in the field
* @return true if the field is marked with any of the given annotations
*/
private boolean containsAnyAnnotation(Field field,
Set<Class<? extends Annotation>> annotations) {
for (Class<? extends Annotation> annotation : annotations) {
if (field.getAnnotation(annotation) != null) {
return true;
}
}
return false;
}
/**
* Given a class and a set of class declared fields it returns a map of
* setters, getters and fields defined for this class
*
* @param clazz
* The class to analyze for setters
* @param attributeMap
* The {@link Map} which will be filled with class' attributes
* @param excludeAnnotations
* The {@link Set} containing annotations marking fields to be excluded
* @param excludedFields
* The {@link Set} containing field names to be excluded
*/
protected void findPojoAttributes(Class<?> clazz,
Map<String, ClassAttribute> attributeMap,
Set<Class<? extends Annotation>> excludeAnnotations,
Set<String> excludedFields) {
if (excludeAnnotations == null) {
excludeAnnotations = new HashSet<Class<? extends Annotation>>();
}
excludeAnnotations.add(PodamExclude.class);
Class<?> workClass = clazz;
while (!Object.class.equals(workClass)) {
Method[] declaredMethods = workClass.getDeclaredMethods();
Field[] declaredFields = workClass.getDeclaredFields();
for (Field field : declaredFields) {
int modifiers = field.getModifiers();
if (!Modifier.isStatic(modifiers)) {
String attributeName = field.getName();
ClassAttribute attribute = attributeMap.get(attributeName);
if (attribute != null) {
/* In case we have hidden fields, we probably want the
* latest one, but there could be corner cases */
if (attribute.getAttribute() == null) {
attribute.setAttribute(field);
}
} else {
attribute = new ClassAttribute(field.getName(),
field, Collections.<Method>emptySet(), Collections.<Method>emptySet());
attributeMap.put(field.getName(), attribute);
}
}
}
for (Method method : declaredMethods) {
/*
* Bridge methods are automatically generated by compiler to
* deal with type erasure and they are not type safe. That why
* they should be ignored
*/
if (!method.isBridge() && !Modifier.isNative(method.getModifiers())) {
Pattern pattern;
if (method.getParameterTypes().length == 0
&& !method.getReturnType().equals(void.class)) {
pattern = GETTER_PATTERN;
} else if (method.getParameterTypes().length > 0
&& (method.getReturnType().equals(void.class)
|| method.getReturnType().isAssignableFrom(workClass))) {
pattern = SETTER_PATTERN;
} else {
continue;
}
String methodName = method.getName();
String attributeName = extractFieldNameFromMethod(methodName,
pattern);
if (!attributeName.equals(methodName)) {
if (!attributeName.isEmpty()) {
ClassAttribute attribute = attributeMap.get(attributeName);
if (attribute == null) {
attribute = new ClassAttribute(attributeName, null,
Collections.<Method>emptySet(),
Collections.<Method>emptySet());
attributeMap.put(attributeName, attribute);
}
Set<Method> accessors;
if (pattern == GETTER_PATTERN) {
accessors = attribute.getRawGetters();
} else {
accessors = attribute.getRawSetters();
}
accessors.add(method);
} else {
LOG.debug("Encountered accessor {}. This will be ignored.", method);
}
}
}
}
workClass = workClass.getSuperclass();
}
}
/**
* Given a accessor's name, it extracts the field name, according to
* JavaBean standards
* <p>
* This method, given a accessor method's name, it returns the corresponding
* attribute name. For example: given setIntField the method would return
* intField. given getIntField the method would return intField; given
* isBoolField the method would return boolField.The correctness of the
* return value depends on the adherence to
* JavaBean standards.
* </p>
*
* @param methodName
* The accessor method from which the field name is required
* @param pattern
* The pattern to strip from the method name
* @return The field name corresponding to the setter
*/
protected String extractFieldNameFromMethod(String methodName, Pattern pattern) {
String candidateField = pattern.matcher(methodName).replaceFirst("");
if (!candidateField.isEmpty()
&& !candidateField.equals(methodName)
&& (candidateField.length() == 1 || !candidateField.toUpperCase().equals(candidateField))) {
candidateField = Character.toLowerCase(candidateField.charAt(0))
+ candidateField.substring(1);
}
return candidateField;
}
/**
* Defines a regular expression for a getter's name
*
* @return a compiled pattern for the getter's name
*/
protected Pattern getGetterPattern() {
return Pattern.compile("^(get|is)");
}
/**
* Defines a regular expression for a setters name
*
* @return a compiled pattern for the setter's name
*/
protected Pattern getSetterPattern() {
return Pattern.compile("^set");
}
// ------------------->> equals() / hashcode() / toString()
// ------------------->> Inner classes
}
|
|
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.server;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.server.browserlaunchers.BrowserLauncher;
import org.openqa.selenium.remote.SessionId;
import org.openqa.selenium.remote.server.log.LoggingManager;
import org.openqa.selenium.remote.server.log.PerSessionLogHandler;
import org.openqa.selenium.server.browserlaunchers.BrowserLauncherFactory;
import org.openqa.selenium.server.browserlaunchers.InvalidBrowserExecutableException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Manages browser sessions, their creation, and their closure.
* <p>
* Maintains a cache of unused and available browser sessions in case the server is reusing
* sessions. Also manages the creation and finalization of all browser sessions.
*
* @author jbevan@google.com (Jennifer Bevan)
*/
public class BrowserSessionFactory {
private static final long DEFAULT_CLEANUP_INTERVAL = 300000; // 5 minutes.
private static final long DEFAULT_MAX_IDLE_SESSION_TIME = 600000; // 10 minutes
private static Logger log = Logger.getLogger(BrowserSessionFactory.class.getName());
// cached, unused, already-launched browser sessions.
protected final Set<BrowserSessionInfo> availableSessions =
Collections.synchronizedSet(new HashSet<BrowserSessionInfo>());
// active browser sessions.
protected final Set<BrowserSessionInfo> activeSessions =
Collections.synchronizedSet(new HashSet<BrowserSessionInfo>());
private final BrowserLauncherFactory browserLauncherFactory;
private final Timer cleanupTimer;
private final long maxIdleSessionTime;
private final boolean doCleanup;
public BrowserSessionFactory(BrowserLauncherFactory blf) {
this(blf, DEFAULT_CLEANUP_INTERVAL, DEFAULT_MAX_IDLE_SESSION_TIME, true);
}
/**
* Constructor for testing purposes.
*
* @param blf an injected BrowserLauncherFactory.
* @param cleanupInterval the time between idle available session cleaning sweeps.
* @param maxIdleSessionTime the max time in ms for an available session to be idle.
* @param doCleanup whether or not the idle session cleanup thread should run.
*/
protected BrowserSessionFactory(BrowserLauncherFactory blf,
long cleanupInterval, long maxIdleSessionTime, boolean doCleanup) {
browserLauncherFactory = blf;
this.maxIdleSessionTime = maxIdleSessionTime;
this.doCleanup = doCleanup;
cleanupTimer = new Timer(/* daemon= */true);
if (doCleanup) {
cleanupTimer.schedule(new CleanupTask(), 0, cleanupInterval);
}
}
/**
* Gets a new browser session, using the SeleniumServer static fields to populate parameters.
*
* @param browserString browser name string
* @param startURL starting url
* @param extensionJs per-session user extension Javascript
* @param configuration Remote Control configuration. Cannot be null.
* @param browserConfigurations capabilities of requested browser
* @return the BrowserSessionInfo for the new browser session.
* @throws RemoteCommandException remote command exception if new session fails
*/
public BrowserSessionInfo getNewBrowserSession(String browserString, String startURL,
String extensionJs, Capabilities browserConfigurations,
RemoteControlConfiguration configuration)
throws RemoteCommandException {
return getNewBrowserSession(browserString, startURL, extensionJs,
browserConfigurations,
configuration.reuseBrowserSessions(),
configuration.isEnsureCleanSession(), configuration);
}
/**
* Gets a new browser session
*
* @param browserString browser name string
* @param startURL starting url
* @param extensionJs per-session user extension Javascript
* @param configuration Remote Control configuration. Cannot be null.
* @param useCached if a cached session should be used if one is available
* @param ensureClean if a clean session (e.g. no previous cookies) is required.
* @param browserConfigurations capabilities of requested browser
* @return the BrowserSessionInfo for the new browser session.
* @throws RemoteCommandException remote command exception if new session fails
*/
protected BrowserSessionInfo getNewBrowserSession(String browserString, String startURL,
String extensionJs, Capabilities browserConfigurations,
boolean useCached, boolean ensureClean, RemoteControlConfiguration configuration)
throws RemoteCommandException {
BrowserSessionInfo sessionInfo = null;
browserString = validateBrowserString(browserString, configuration);
if (configuration.getProxyInjectionModeArg()) {
InjectionHelper.setBrowserSideLogEnabled(configuration.isBrowserSideLogEnabled());
InjectionHelper.init();
}
if (useCached) {
log.info("grabbing available session...");
sessionInfo = grabAvailableSession(browserString, startURL);
}
// couldn't find one in the cache, or not reusing sessions.
if (null == sessionInfo) {
log.info("creating new remote session");
sessionInfo = createNewRemoteSession(browserString, startURL, extensionJs,
browserConfigurations, ensureClean, configuration);
}
assert null != sessionInfo;
if (false/* ensureClean */) {
// need to add this to the launcher API.
// sessionInfo.launcher.hideCurrentSessionData();
}
return sessionInfo;
}
/**
* Ends all browser sessions.
* <p>
* Active and available but inactive sessions are ended.
* @param configuration remote control configuration
*/
protected void endAllBrowserSessions(RemoteControlConfiguration configuration) {
boolean done = false;
Set<BrowserSessionInfo> allSessions = new HashSet<>();
while (!done) {
// to avoid concurrent modification exceptions...
synchronized (activeSessions) {
for (BrowserSessionInfo sessionInfo : activeSessions) {
allSessions.add(sessionInfo);
}
}
synchronized (availableSessions) {
for (BrowserSessionInfo sessionInfo : availableSessions) {
allSessions.add(sessionInfo);
}
}
for (BrowserSessionInfo sessionInfo : allSessions) {
endBrowserSession(true, sessionInfo.sessionId, configuration);
}
done = (0 == activeSessions.size() && 0 == availableSessions.size());
allSessions.clear();
if (doCleanup) {
cleanupTimer.cancel();
}
}
}
/**
* Ends a browser session, using SeleniumServer static fields to populate parameters.
*
* @param sessionId the id of the session to be ended
* @param configuration Remote Control configuration. Cannot be null.
*/
public void endBrowserSession(String sessionId, RemoteControlConfiguration configuration) {
endBrowserSession(false, sessionId, configuration, configuration.isEnsureCleanSession());
}
/**
* Ends a browser session, using SeleniumServer static fields to populate parameters.
*
* @param forceClose if the session should not be reused
* @param sessionId the id of the session to be ended
* @param configuration Remote Control configuration. Cannot be null.
*/
public void endBrowserSession(boolean forceClose, String sessionId,
RemoteControlConfiguration configuration) {
endBrowserSession(forceClose, sessionId, configuration, configuration.isEnsureCleanSession());
}
/**
* Ends a browser session.
*
* @param forceClose if the session should not be reused
* @param sessionId the id of the session to be ended
* @param configuration Remote Control configuration. Cannot be null.
* @param ensureClean if clean sessions (e.g. no leftover cookies) are required.
*/
protected void endBrowserSession(boolean forceClose, String sessionId,
RemoteControlConfiguration configuration,
boolean ensureClean) {
BrowserSessionInfo sessionInfo = lookupInfoBySessionId(sessionId, activeSessions);
if (null != sessionInfo) {
activeSessions.remove(sessionInfo);
try {
if (forceClose || !configuration.reuseBrowserSessions()) {
shutdownBrowserAndClearSessionData(sessionInfo);
} else {
if (null != sessionInfo.session) { // optional field
sessionInfo.session.reset(sessionInfo.baseUrl);
}
// mark what time this session was ended
sessionInfo.lastClosedAt = System.currentTimeMillis();
availableSessions.add(sessionInfo);
}
} finally {
LoggingManager.perSessionLogHandler().removeSessionLogs(new SessionId(sessionId));
if (ensureClean) {
// need to add this to the launcher API.
// sessionInfo.launcher.restoreOriginalSessionData();
}
}
} else {
// look for it in the available sessions.
sessionInfo = lookupInfoBySessionId(sessionId, availableSessions);
if (null != sessionInfo && (forceClose || !configuration.reuseBrowserSessions())) {
try {
availableSessions.remove(sessionInfo);
shutdownBrowserAndClearSessionData(sessionInfo);
} finally {
LoggingManager.perSessionLogHandler().removeSessionLogs(new SessionId(sessionId));
if (ensureClean) {
// sessionInfo.launcher.restoreOriginalSessionData();
}
}
}
}
}
/**
* Shuts down this browser session's launcher and clears out its session data (if session is not
* null).
*
* @param sessionInfo the browser session to end.
*/
protected void shutdownBrowserAndClearSessionData(BrowserSessionInfo sessionInfo) {
try {
sessionInfo.launcher.close(); // can throw RuntimeException
} finally {
if (null != sessionInfo.session) {
FrameGroupCommandQueueSet.clearQueueSet(sessionInfo.sessionId);
}
}
}
/**
* Rewrites the given browser string based on server settings.
*
* @param inputString the input browser string
* @return a possibly-modified browser string.
* @throws IllegalArgumentException if inputString is null.
*/
private String validateBrowserString(String inputString, RemoteControlConfiguration configuration)
throws IllegalArgumentException {
String browserString = inputString;
if (configuration.getForcedBrowserMode() != null) {
browserString = configuration.getForcedBrowserMode();
log.info("overriding browser mode w/ forced browser mode setting: " + browserString);
}
if (configuration.getProxyInjectionModeArg() && browserString.equals("*iexplore")) {
log.warning("running in proxy injection mode, but you used a *iexplore browser string; this is "
+
"almost surely inappropriate, so I'm changing it to *piiexplore...");
browserString = "*piiexplore";
} else if (configuration.getProxyInjectionModeArg() && (browserString.equals("*firefox")
|| browserString.equals("*firefox2") || browserString.equals("*firefox3"))) {
log.warning("running in proxy injection mode, but you used a " + browserString +
" browser string; this is " +
"almost surely inappropriate, so I'm changing it to *pifirefox...");
browserString = "*pifirefox";
}
if (null == browserString) {
throw new IllegalArgumentException("browser string may not be null");
}
return browserString;
}
/**
* Retrieves an available, unused session from the cache.
*
* @param browserString the necessary browser for a suitable session
* @param baseUrl the necessary baseUrl for a suitable session
* @return the session info of the cached session, null if none found.
*/
protected BrowserSessionInfo grabAvailableSession(String browserString,
String baseUrl) {
BrowserSessionInfo sessionInfo = null;
synchronized (availableSessions) {
sessionInfo = lookupInfoByBrowserAndUrl(browserString, baseUrl,
availableSessions);
if (null != sessionInfo) {
availableSessions.remove(sessionInfo);
}
}
if (null != sessionInfo) {
activeSessions.add(sessionInfo);
}
return sessionInfo;
}
/**
* Isolated dependency
*
* @param sessionId session id
* @param port port
* @param configuration Remote Control Configuration
* @return a new FrameGroupCommandQueueSet instance
*/
protected FrameGroupCommandQueueSet makeQueueSet(String sessionId, int port,
RemoteControlConfiguration configuration) {
return FrameGroupCommandQueueSet.makeQueueSet(sessionId,
configuration.getPortDriversShouldContact(), configuration);
}
/**
* Isolated dependency
*
* @param sessionId session id
* @return an existing FrameGroupCommandQueueSet instance
*/
protected FrameGroupCommandQueueSet getQueueSet(String sessionId) {
return FrameGroupCommandQueueSet.getQueueSet(sessionId);
}
/**
* Creates and tries to open a new session.
*
* @param browserString browser name string
* @param startURL starting url
* @param extensionJs per-session user extension javascript
* @param configuration Remote Control configuration. Cannot be null.
* @param browserConfiguration capabilities of requested browser
* @param ensureClean if a clean session is required
* @return the BrowserSessionInfo of the new session.
* @throws RemoteCommandException if the browser failed to launch and request work in the required
* amount of time.
*/
protected BrowserSessionInfo createNewRemoteSession(String browserString, String startURL,
String extensionJs, Capabilities browserConfiguration, boolean ensureClean,
RemoteControlConfiguration configuration)
throws RemoteCommandException {
final FrameGroupCommandQueueSet queueSet;
final BrowserSessionInfo sessionInfo;
final BrowserLauncher launcher;
String sessionId;
sessionId = UUID.randomUUID().toString().replace("-", "");
if ("*webdriver".equals(browserString) && browserConfiguration != null) {
Object id = browserConfiguration.getCapability("webdriver.remote.sessionid");
if (id != null && id instanceof String) {
sessionId = (String) id;
}
}
queueSet = makeQueueSet(sessionId, configuration.getPortDriversShouldContact(), configuration);
queueSet.setExtensionJs(extensionJs);
try {
launcher =
browserLauncherFactory.getBrowserLauncher(browserString, sessionId, configuration,
browserConfiguration);
} catch (InvalidBrowserExecutableException e) {
throw new RemoteCommandException(e.getMessage(), "");
}
sessionInfo = new BrowserSessionInfo(sessionId, browserString, startURL, launcher, queueSet);
SessionIdTracker.setLastSessionId(sessionId);
log.info("Allocated session " + sessionId + " for " + startURL + ", launching...");
final PerSessionLogHandler perSessionLogHandler = LoggingManager.perSessionLogHandler();
perSessionLogHandler.attachToCurrentThread(new SessionId(sessionId));
try {
launcher.launchRemoteSession(startURL);
queueSet.waitForLoad(configuration.getTimeoutInSeconds() * 1000L);
// TODO DGF log4j only
// NDC.push("sessionId="+sessionId);
FrameGroupCommandQueueSet queue = getQueueSet(sessionId);
queue.doCommand("setContext", sessionId, "");
activeSessions.add(sessionInfo);
return sessionInfo;
} catch (Exception e) {
/*
* At this point the session might not have been added to neither available nor active
* sessions. This session is unlikely to be of any practical use so we need to make sure we
* close the browser and clear all session data.
*/
log.log(Level.SEVERE,
"Failed to start new browser session, shutdown browser and clear all session data", e);
shutdownBrowserAndClearSessionData(sessionInfo);
throw new RemoteCommandException("Error while launching browser", "", e);
} finally {
perSessionLogHandler.detachFromCurrentThread();
}
}
/**
* Adds a browser session that was not created by this factory to the set of active sessions.
* <p>
* Allows for creation of unmanaged sessions (i.e. no FrameGroupCommandQueueSet) for task such as
* running the HTML tests (see HTMLLauncher.java). All fields other than session are required to
* be non-null.
*
* @param sessionInfo the session info to register.
* @return true if session was registered
*/
protected boolean registerExternalSession(BrowserSessionInfo sessionInfo) {
boolean result = false;
if (BrowserSessionInfo.isValid(sessionInfo)) {
activeSessions.add(sessionInfo);
result = true;
}
return result;
}
/**
* Removes a previously registered external browser session from the list of active sessions.
*
* @param sessionInfo the session to remove.
*/
protected void deregisterExternalSession(BrowserSessionInfo sessionInfo) {
activeSessions.remove(sessionInfo);
}
/**
* Looks up a session in the named set by session id
*
* @param sessionId the session id to find
* @param set the Set to inspect
* @return the matching BrowserSessionInfo or null if not found.
*/
protected BrowserSessionInfo lookupInfoBySessionId(String sessionId,
Set<BrowserSessionInfo> set) {
BrowserSessionInfo result = null;
synchronized (set) {
for (BrowserSessionInfo info : set) {
if (info.sessionId.equals(sessionId)) {
result = info;
break;
}
}
}
return result;
}
/**
* Looks up a session in the named set by browser string and base URL
*
* @param browserString the browser string to match
* @param baseUrl the base URL to match.
* @param set the Set to inspect
* @return the matching BrowserSessionInfo or null if not found.
*/
protected BrowserSessionInfo lookupInfoByBrowserAndUrl(String browserString,
String baseUrl, Set<BrowserSessionInfo> set) {
BrowserSessionInfo result = null;
synchronized (set) {
for (BrowserSessionInfo info : set) {
if (info.browserString.equals(browserString)
&& info.baseUrl.equals(baseUrl)) {
result = info;
break;
}
}
}
return result;
}
protected void removeIdleAvailableSessions() {
long now = System.currentTimeMillis();
synchronized (availableSessions) {
Iterator<BrowserSessionInfo> iter = availableSessions.iterator();
while (iter.hasNext()) {
BrowserSessionInfo info = iter.next();
if (now - info.lastClosedAt > maxIdleSessionTime) {
iter.remove();
shutdownBrowserAndClearSessionData(info);
}
}
}
}
/**
* for testing only
* @param sessionId session id
* @return true if it has one
*/
protected boolean hasActiveSession(String sessionId) {
BrowserSessionInfo info = lookupInfoBySessionId(sessionId, activeSessions);
return (null != info);
}
/**
* for testing only
* @param sessionId session id
* @return true if it has one
*/
protected boolean hasAvailableSession(String sessionId) {
BrowserSessionInfo info = lookupInfoBySessionId(sessionId, availableSessions);
return (null != info);
}
/**
* for testing only
* @param sessionInfo browser session info
*/
protected void addToAvailableSessions(BrowserSessionInfo sessionInfo) {
availableSessions.add(sessionInfo);
}
/**
* Collection class to hold the objects associated with a browser session.
*
* @author jbevan@google.com (Jennifer Bevan)
*/
public static class BrowserSessionInfo {
public BrowserSessionInfo(String sessionId, String browserString,
String baseUrl, BrowserLauncher launcher,
FrameGroupCommandQueueSet session) {
this.sessionId = sessionId;
this.browserString = browserString;
this.baseUrl = baseUrl;
this.launcher = launcher;
this.session = session; // optional field; may be null.
lastClosedAt = 0;
}
public final String sessionId;
public final String browserString;
public final String baseUrl;
public final BrowserLauncher launcher;
public final FrameGroupCommandQueueSet session;
public long lastClosedAt;
/**
* Browser sessions require the session id, the browser, the base URL, and the launcher. They
* don't actually require the session to be set up as a FrameGroupCommandQueueSet.
*
* @param sessionInfo the sessionInfo to validate.
* @return true if all fields excepting session are non-null.
*/
protected static boolean isValid(BrowserSessionInfo sessionInfo) {
boolean result = (null != sessionInfo.sessionId
&& null != sessionInfo.browserString
&& null != sessionInfo.baseUrl
&& null != sessionInfo.launcher);
return result;
}
}
/**
* TimerTask that looks for unused sessions in the availableSessions collection.
*
* @author jbevan@google.com (Jennifer Bevan)
*/
protected class CleanupTask extends TimerTask {
@Override
public void run() {
removeIdleAvailableSessions();
}
}
}
|
|
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.hfile.Compression;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hdfs.MiniDFSCluster;
/**
* Test of a long-lived scanner validating as we go.
*/
public class TestScanner extends HBaseTestCase {
private final Log LOG = LogFactory.getLog(this.getClass());
private static final byte [] FIRST_ROW = HConstants.EMPTY_START_ROW;
private static final byte [][] COLS = { HConstants.CATALOG_FAMILY };
private static final byte [][] EXPLICIT_COLS = {
HConstants.REGIONINFO_QUALIFIER, HConstants.SERVER_QUALIFIER,
// TODO ryan
//HConstants.STARTCODE_QUALIFIER
};
static final HTableDescriptor TESTTABLEDESC =
new HTableDescriptor("testscanner");
static {
TESTTABLEDESC.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY,
10, // Ten is arbitrary number. Keep versions to help debuggging.
Compression.Algorithm.NONE.getName(), false, true, 8 * 1024,
HConstants.FOREVER, false));
}
/** HRegionInfo for root region */
public static final HRegionInfo REGION_INFO =
new HRegionInfo(TESTTABLEDESC, HConstants.EMPTY_BYTE_ARRAY,
HConstants.EMPTY_BYTE_ARRAY);
private static final byte [] ROW_KEY = REGION_INFO.getRegionName();
private static final long START_CODE = Long.MAX_VALUE;
private MiniDFSCluster cluster = null;
private HRegion r;
private HRegionIncommon region;
@Override
public void setUp() throws Exception {
cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
// Set the hbase.rootdir to be the home directory in mini dfs.
this.conf.set(HConstants.HBASE_DIR,
this.cluster.getFileSystem().getHomeDirectory().toString());
super.setUp();
}
/**
* Test basic stop row filter works.
* @throws Exception
*/
public void testStopRow() throws Exception {
byte [] startrow = Bytes.toBytes("bbb");
byte [] stoprow = Bytes.toBytes("ccc");
try {
this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null);
addContent(this.r, HConstants.CATALOG_FAMILY);
List<KeyValue> results = new ArrayList<KeyValue>();
// Do simple test of getting one row only first.
Scan scan = new Scan(Bytes.toBytes("abc"), Bytes.toBytes("abd"));
scan.addFamily(HConstants.CATALOG_FAMILY);
InternalScanner s = r.getScanner(scan);
int count = 0;
while (s.next(results)) {
count++;
}
s.close();
assertEquals(1, count);
// Now do something a bit more imvolved.
scan = new Scan(startrow, stoprow);
scan.addFamily(HConstants.CATALOG_FAMILY);
s = r.getScanner(scan);
count = 0;
KeyValue kv = null;
results = new ArrayList<KeyValue>();
for (boolean first = true; s.next(results);) {
kv = results.get(0);
if (first) {
assertTrue(Bytes.BYTES_COMPARATOR.compare(startrow, kv.getRow()) == 0);
first = false;
}
count++;
}
assertTrue(Bytes.BYTES_COMPARATOR.compare(stoprow, kv.getRow()) > 0);
// We got something back.
assertTrue(count > 10);
s.close();
} finally {
this.r.close();
this.r.getLog().closeAndDelete();
shutdownDfs(this.cluster);
}
}
/** The test!
* @throws IOException
*/
public void testScanner() throws IOException {
try {
r = createNewHRegion(TESTTABLEDESC, null, null);
region = new HRegionIncommon(r);
// Write information to the meta table
Put put = new Put(ROW_KEY);
put.setTimeStamp(System.currentTimeMillis());
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
DataOutputStream s = new DataOutputStream(byteStream);
REGION_INFO.write(s);
put.add(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER,
byteStream.toByteArray());
region.put(put);
// What we just committed is in the memstore. Verify that we can get
// it back both with scanning and get
scan(false, null);
getRegionInfo();
// Close and re-open
r.close();
r = openClosedRegion(r);
region = new HRegionIncommon(r);
// Verify we can get the data back now that it is on disk.
scan(false, null);
getRegionInfo();
// Store some new information
HServerAddress address = new HServerAddress("foo.bar.com:1234");
put = new Put(ROW_KEY);
put.setTimeStamp(System.currentTimeMillis());
put.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
Bytes.toBytes(address.toString()));
// put.add(HConstants.COL_STARTCODE, Bytes.toBytes(START_CODE));
region.put(put);
// Validate that we can still get the HRegionInfo, even though it is in
// an older row on disk and there is a newer row in the memstore
scan(true, address.toString());
getRegionInfo();
// flush cache
region.flushcache();
// Validate again
scan(true, address.toString());
getRegionInfo();
// Close and reopen
r.close();
r = openClosedRegion(r);
region = new HRegionIncommon(r);
// Validate again
scan(true, address.toString());
getRegionInfo();
// Now update the information again
address = new HServerAddress("bar.foo.com:4321");
put = new Put(ROW_KEY);
put.setTimeStamp(System.currentTimeMillis());
put.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
Bytes.toBytes(address.toString()));
region.put(put);
// Validate again
scan(true, address.toString());
getRegionInfo();
// flush cache
region.flushcache();
// Validate again
scan(true, address.toString());
getRegionInfo();
// Close and reopen
r.close();
r = openClosedRegion(r);
region = new HRegionIncommon(r);
// Validate again
scan(true, address.toString());
getRegionInfo();
// clean up
r.close();
r.getLog().closeAndDelete();
} finally {
shutdownDfs(cluster);
}
}
/** Compare the HRegionInfo we read from HBase to what we stored */
private void validateRegionInfo(byte [] regionBytes) throws IOException {
HRegionInfo info =
(HRegionInfo) Writables.getWritable(regionBytes, new HRegionInfo());
assertEquals(REGION_INFO.getRegionId(), info.getRegionId());
assertEquals(0, info.getStartKey().length);
assertEquals(0, info.getEndKey().length);
assertEquals(0, Bytes.compareTo(info.getRegionName(), REGION_INFO.getRegionName()));
assertEquals(0, info.getTableDesc().compareTo(REGION_INFO.getTableDesc()));
}
/** Use a scanner to get the region info and then validate the results */
private void scan(boolean validateStartcode, String serverName)
throws IOException {
InternalScanner scanner = null;
Scan scan = null;
List<KeyValue> results = new ArrayList<KeyValue>();
byte [][][] scanColumns = {
COLS,
EXPLICIT_COLS
};
for(int i = 0; i < scanColumns.length; i++) {
try {
scan = new Scan(FIRST_ROW);
for (int ii = 0; ii < EXPLICIT_COLS.length; ii++) {
scan.addColumn(COLS[0], EXPLICIT_COLS[ii]);
}
scanner = r.getScanner(scan);
while (scanner.next(results)) {
assertTrue(hasColumn(results, HConstants.CATALOG_FAMILY,
HConstants.REGIONINFO_QUALIFIER));
byte [] val = getColumn(results, HConstants.CATALOG_FAMILY,
HConstants.REGIONINFO_QUALIFIER).getValue();
validateRegionInfo(val);
if(validateStartcode) {
// assertTrue(hasColumn(results, HConstants.CATALOG_FAMILY,
// HConstants.STARTCODE_QUALIFIER));
// val = getColumn(results, HConstants.CATALOG_FAMILY,
// HConstants.STARTCODE_QUALIFIER).getValue();
assertNotNull(val);
assertFalse(val.length == 0);
long startCode = Bytes.toLong(val);
assertEquals(START_CODE, startCode);
}
if(serverName != null) {
assertTrue(hasColumn(results, HConstants.CATALOG_FAMILY,
HConstants.SERVER_QUALIFIER));
val = getColumn(results, HConstants.CATALOG_FAMILY,
HConstants.SERVER_QUALIFIER).getValue();
assertNotNull(val);
assertFalse(val.length == 0);
String server = Bytes.toString(val);
assertEquals(0, server.compareTo(serverName));
}
results.clear();
}
} finally {
InternalScanner s = scanner;
scanner = null;
if(s != null) {
s.close();
}
}
}
}
private boolean hasColumn(final List<KeyValue> kvs, final byte [] family,
final byte [] qualifier) {
for (KeyValue kv: kvs) {
if (kv.matchingFamily(family) && kv.matchingQualifier(qualifier)) {
return true;
}
}
return false;
}
private KeyValue getColumn(final List<KeyValue> kvs, final byte [] family,
final byte [] qualifier) {
for (KeyValue kv: kvs) {
if (kv.matchingFamily(family) && kv.matchingQualifier(qualifier)) {
return kv;
}
}
return null;
}
/** Use get to retrieve the HRegionInfo and validate it */
private void getRegionInfo() throws IOException {
Get get = new Get(ROW_KEY);
get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
Result result = region.get(get, null);
byte [] bytes = result.value();
validateRegionInfo(bytes);
}
/**
* Tests to do a sync flush during the middle of a scan. This is testing the StoreScanner
* update readers code essentially. This is not highly concurrent, since its all 1 thread.
* HBase-910.
* @throws Exception
*/
public void testScanAndSyncFlush() throws Exception {
this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null);
HRegionIncommon hri = new HRegionIncommon(r);
try {
LOG.info("Added: " + addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
int count = count(hri, -1, false);
assertEquals(count, count(hri, 100, false)); // do a sync flush.
} catch (Exception e) {
LOG.error("Failed", e);
throw e;
} finally {
this.r.close();
this.r.getLog().closeAndDelete();
shutdownDfs(cluster);
}
}
/**
* Tests to do a concurrent flush (using a 2nd thread) while scanning. This tests both
* the StoreScanner update readers and the transition from memstore -> snapshot -> store file.
*
* @throws Exception
*/
public void testScanAndRealConcurrentFlush() throws Exception {
this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null);
HRegionIncommon hri = new HRegionIncommon(r);
try {
LOG.info("Added: " + addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
int count = count(hri, -1, false);
assertEquals(count, count(hri, 100, true)); // do a true concurrent background thread flush
} catch (Exception e) {
LOG.error("Failed", e);
throw e;
} finally {
this.r.close();
this.r.getLog().closeAndDelete();
shutdownDfs(cluster);
}
}
/*
* @param hri Region
* @param flushIndex At what row we start the flush.
* @param concurrent if the flush should be concurrent or sync.
* @return Count of rows found.
* @throws IOException
*/
private int count(final HRegionIncommon hri, final int flushIndex,
boolean concurrent)
throws IOException {
LOG.info("Taking out counting scan");
ScannerIncommon s = hri.getScanner(HConstants.CATALOG_FAMILY, EXPLICIT_COLS,
HConstants.EMPTY_START_ROW, HConstants.LATEST_TIMESTAMP);
List<KeyValue> values = new ArrayList<KeyValue>();
int count = 0;
boolean justFlushed = false;
while (s.next(values)) {
if (justFlushed) {
LOG.info("after next() just after next flush");
justFlushed=false;
}
count++;
if (flushIndex == count) {
LOG.info("Starting flush at flush index " + flushIndex);
Thread t = new Thread() {
public void run() {
try {
hri.flushcache();
LOG.info("Finishing flush");
} catch (IOException e) {
LOG.info("Failed flush cache");
}
}
};
if (concurrent) {
t.start(); // concurrently flush.
} else {
t.run(); // sync flush
}
LOG.info("Continuing on after kicking off background flush");
justFlushed = true;
}
}
s.close();
LOG.info("Found " + count + " items");
return count;
}
}
|
|
/*
* Copyright (c) 2006-2017 DMDirc Developers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.dmdirc.addons.identd;
import com.dmdirc.config.provider.AggregateConfigProvider;
import com.dmdirc.config.provider.ReadOnlyConfigProvider;
import com.dmdirc.interfaces.Connection;
import com.dmdirc.interfaces.ConnectionManager;
import com.dmdirc.interfaces.User;
import com.dmdirc.util.LogUtils;
import com.dmdirc.util.io.StreamUtils;
import com.dmdirc.util.system.SystemInfo;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The IdentClient responds to an ident request.
*/
public class IdentClient implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(IdentClient.class);
/** The IdentdServer that owns this Client. */
private final IdentdServer server;
/** The Socket that we are in charge of. */
private final Socket socket;
/** The Thread in use for this client. */
private volatile Thread thread;
/** Server manager. */
private final ConnectionManager connectionManager;
/** Global configuration to read settings from. */
private final AggregateConfigProvider config;
/** This plugin's settings domain. */
private final String domain;
/** System wrapper to use. */
private final SystemInfo systemInfo;
/**
* Create the IdentClient.
*/
public IdentClient(final IdentdServer server, final Socket socket,
final ConnectionManager connectionManager, final AggregateConfigProvider config,
final String domain, final SystemInfo systemInfo) {
this.server = server;
this.socket = socket;
this.connectionManager = connectionManager;
this.config = config;
this.domain = domain;
this.systemInfo = systemInfo;
}
/**
* Starts this ident client in a new thread.
*/
public void start() {
thread = new Thread(this);
thread.start();
}
/**
* Process this connection.
*/
@Override
public void run() {
final Thread thisThread = Thread.currentThread();
try (PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream()))) {
final String inputLine;
if ((inputLine = in.readLine()) != null) {
out.println(getIdentResponse(inputLine, config));
}
} catch (IOException e) {
if (thisThread == thread) {
LOG.error(LogUtils.USER_ERROR, "ClientSocket Error: {}", e.getMessage(), e);
}
} finally {
StreamUtils.close(socket);
server.delClient(this);
}
}
/**
* Get the ident response for a given line. Complies with rfc1413
* (http://www.faqs.org/rfcs/rfc1413.html)
*
* @param input Line to generate response for
* @param config The config manager to use for settings
*
* @return the ident response for the given line
*/
protected String getIdentResponse(final String input, final ReadOnlyConfigProvider config) {
final String unescapedInput = unescapeString(input);
final String[] bits = unescapedInput.replaceAll("\\s+", "").split(",", 2);
if (bits.length < 2) {
return String.format("%s : ERROR : X-INVALID-INPUT", escapeString(unescapedInput));
}
final int myPort;
final int theirPort;
try {
myPort = Integer.parseInt(bits[0].trim());
theirPort = Integer.parseInt(bits[1].trim());
} catch (NumberFormatException e) {
return String.format("%s , %s : ERROR : X-INVALID-INPUT", escapeString(bits[0]),
escapeString(bits[1]));
}
if (myPort > 65535 || myPort < 1 || theirPort > 65535 || theirPort < 1) {
return String.format("%d , %d : ERROR : INVALID-PORT", myPort, theirPort);
}
final Connection connection = getConnectionByPort(myPort);
if (!config.getOptionBool(domain, "advanced.alwaysOn") && (connection == null
|| config.getOptionBool(domain, "advanced.isNoUser"))) {
return String.format("%d , %d : ERROR : NO-USER", myPort, theirPort);
}
if (config.getOptionBool(domain, "advanced.isHiddenUser")) {
return String.format("%d , %d : ERROR : HIDDEN-USER", myPort, theirPort);
}
final String osName = systemInfo.getProperty("os.name").toLowerCase();
final String os;
final String customSystem = config.getOption(domain, "advanced.customSystem");
if (config.getOptionBool(domain, "advanced.useCustomSystem") && customSystem
!= null && !customSystem.isEmpty() && customSystem.length() < 513) {
os = customSystem;
} else {
// Tad excessive maybe, but complete!
// Based on: http://mindprod.com/jgloss/properties.html
// and the SYSTEM NAMES section of rfc1340 (http://www.faqs.org/rfcs/rfc1340.html)
if (osName.startsWith("windows")) {
os = "WIN32";
} else if (osName.startsWith("mac")) {
os = "MACOS";
} else if (osName.startsWith("linux")) {
os = "UNIX";
} else if (osName.contains("bsd")) {
os = "UNIX-BSD";
} else if ("os/2".equals(osName)) {
os = "OS/2";
} else if (osName.contains("unix")) {
os = "UNIX";
} else if ("irix".equals(osName)) {
os = "IRIX";
} else {
os = "UNKNOWN";
}
}
final String customName = config.getOption(domain, "general.customName");
final String username;
if (config.getOptionBool(domain, "general.useCustomName") && customName
!= null && !customName.isEmpty() && customName.length() < 513) {
username = customName;
} else if (connection != null && config.getOptionBool(domain, "general.useNickname")) {
username = connection.getLocalUser().map(User::getNickname).orElse("Unknown");
} else if (connection != null && config.getOptionBool(domain, "general.useUsername")) {
username = connection.getLocalUser().flatMap(User::getUsername).orElse("Unknown");
} else {
username = systemInfo.getProperty("user.name");
}
return String.format("%d , %d : USERID : %s : %s", myPort, theirPort, escapeString(os),
escapeString(username));
}
/**
* Escape special chars.
*
* @param str String to escape
*
* @return Escaped string.
*/
public static String escapeString(final String str) {
return str.replace("\\", "\\\\").replace(":", "\\:").replace(",", "\\,").replace(" ", "\\ ");
}
/**
* Unescape special chars.
*
* @param str String to escape
*
* @return Escaped string.
*/
public static String unescapeString(final String str) {
return str.replace("\\:", ":").replace("\\ ", " ").replace("\\,", ",").replace("\\\\", "\\");
}
/**
* Close this IdentClient.
*/
public void close() {
if (thread != null) {
final Thread tmpThread = thread;
thread = null;
if (tmpThread != null) {
tmpThread.interrupt();
}
StreamUtils.close(socket);
}
}
/**
* Retrieves the server that is bound to the specified local port.
*
* @param port Port to check for
*
* @return The server instance listening on the given port
*/
protected Connection getConnectionByPort(final int port) {
for (Connection connection : connectionManager.getConnections()) {
if (connection.getParser().get().getLocalPort() == port) {
return connection;
}
}
return null;
}
}
|
|
/*
* Copyright 2002-2006,2009 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opensymphony.xwork2.ognl;
import com.opensymphony.xwork2.ActionContext;
import com.opensymphony.xwork2.TextProvider;
import com.opensymphony.xwork2.XWorkConstants;
import com.opensymphony.xwork2.XWorkException;
import com.opensymphony.xwork2.conversion.impl.XWorkConverter;
import com.opensymphony.xwork2.inject.Container;
import com.opensymphony.xwork2.inject.Inject;
import com.opensymphony.xwork2.ognl.accessor.CompoundRootAccessor;
import com.opensymphony.xwork2.util.ClearableValueStack;
import com.opensymphony.xwork2.util.CompoundRoot;
import com.opensymphony.xwork2.util.MemberAccessValueStack;
import com.opensymphony.xwork2.util.ValueStack;
import com.opensymphony.xwork2.util.logging.Logger;
import com.opensymphony.xwork2.util.logging.LoggerFactory;
import com.opensymphony.xwork2.util.logging.LoggerUtils;
import com.opensymphony.xwork2.util.reflection.ReflectionContextState;
import ognl.*;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
/**
* Ognl implementation of a value stack that allows for dynamic Ognl expressions to be evaluated against it. When evaluating an expression,
* the stack will be searched down the stack, from the latest objects pushed in to the earliest, looking for a bean with a getter or setter
* for the given property or a method of the given name (depending on the expression being evaluated).
*
* @author Patrick Lightbody
* @author tm_jee
* @version $Date: 2013-05-28 22:14:15 +0200 (Tue, 28 May 2013) $ $Id: OgnlValueStack.java 1487092 2013-05-28 20:14:15Z lukaszlenart $
*/
public class OgnlValueStack implements Serializable, ValueStack, ClearableValueStack, MemberAccessValueStack {
public static final String THROW_EXCEPTION_ON_FAILURE = OgnlValueStack.class.getName() + ".throwExceptionOnFailure";
private static final long serialVersionUID = 370737852934925530L;
private static final String MAP_IDENTIFIER_KEY = "com.opensymphony.xwork2.util.OgnlValueStack.MAP_IDENTIFIER_KEY";
private static final Logger LOG = LoggerFactory.getLogger(OgnlValueStack.class);
CompoundRoot root;
transient Map<String, Object> context;
Class defaultType;
Map<Object, Object> overrides;
transient OgnlUtil ognlUtil;
transient SecurityMemberAccess securityMemberAccess;
private boolean devMode;
private boolean logMissingProperties;
protected OgnlValueStack(XWorkConverter xworkConverter, CompoundRootAccessor accessor, TextProvider prov, boolean allowStaticAccess) {
setRoot(xworkConverter, accessor, new CompoundRoot(), allowStaticAccess);
push(prov);
}
protected OgnlValueStack(ValueStack vs, XWorkConverter xworkConverter, CompoundRootAccessor accessor, boolean allowStaticAccess) {
setRoot(xworkConverter, accessor, new CompoundRoot(vs.getRoot()), allowStaticAccess);
}
@Inject
public void setOgnlUtil(OgnlUtil ognlUtil) {
this.ognlUtil = ognlUtil;
}
protected void setRoot(XWorkConverter xworkConverter, CompoundRootAccessor accessor, CompoundRoot compoundRoot,
boolean allowStaticMethodAccess) {
this.root = compoundRoot;
this.securityMemberAccess = new SecurityMemberAccess(allowStaticMethodAccess);
this.context = Ognl.createDefaultContext(this.root, accessor, new OgnlTypeConverterWrapper(xworkConverter), securityMemberAccess);
context.put(VALUE_STACK, this);
Ognl.setClassResolver(context, accessor);
((OgnlContext) context).setTraceEvaluations(false);
((OgnlContext) context).setKeepLastEvaluation(false);
}
@Inject(XWorkConstants.DEV_MODE)
public void setDevMode(String mode) {
devMode = "true".equalsIgnoreCase(mode);
}
@Inject(value = "logMissingProperties", required = false)
public void setLogMissingProperties(String logMissingProperties) {
this.logMissingProperties = "true".equalsIgnoreCase(logMissingProperties);
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#getContext()
*/
public Map<String, Object> getContext() {
return context;
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#setDefaultType(java.lang.Class)
*/
public void setDefaultType(Class defaultType) {
this.defaultType = defaultType;
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#setExprOverrides(java.util.Map)
*/
public void setExprOverrides(Map<Object, Object> overrides) {
if (this.overrides == null) {
this.overrides = overrides;
} else {
this.overrides.putAll(overrides);
}
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#getExprOverrides()
*/
public Map<Object, Object> getExprOverrides() {
return this.overrides;
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#getRoot()
*/
public CompoundRoot getRoot() {
return root;
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#setParameter(String, Object)
*/
public void setParameter(String expr, Object value) {
setValue(expr, value, devMode, false);
}
/**
/**
* @see com.opensymphony.xwork2.util.ValueStack#setValue(java.lang.String, java.lang.Object)
*/
public void setValue(String expr, Object value) {
setValue(expr, value, devMode);
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#setValue(java.lang.String, java.lang.Object, boolean)
*/
public void setValue(String expr, Object value, boolean throwExceptionOnFailure) {
setValue(expr, value, throwExceptionOnFailure, true);
}
private void setValue(String expr, Object value, boolean throwExceptionOnFailure, boolean evalExpression) {
Map<String, Object> context = getContext();
try {
trySetValue(expr, value, throwExceptionOnFailure, context, evalExpression);
} catch (OgnlException e) {
handleOgnlException(expr, value, throwExceptionOnFailure, e);
} catch (RuntimeException re) { //XW-281
handleRuntimeException(expr, value, throwExceptionOnFailure, re);
} finally {
cleanUpContext(context);
}
}
private void trySetValue(String expr, Object value, boolean throwExceptionOnFailure, Map<String, Object> context, boolean evalExpression) throws OgnlException {
context.put(XWorkConverter.CONVERSION_PROPERTY_FULLNAME, expr);
context.put(REPORT_ERRORS_ON_NO_PROP, (throwExceptionOnFailure) ? Boolean.TRUE : Boolean.FALSE);
ognlUtil.setValue(expr, context, root, value, evalExpression);
}
private void cleanUpContext(Map<String, Object> context) {
ReflectionContextState.clear(context);
context.remove(XWorkConverter.CONVERSION_PROPERTY_FULLNAME);
context.remove(REPORT_ERRORS_ON_NO_PROP);
}
private void handleRuntimeException(String expr, Object value, boolean throwExceptionOnFailure, RuntimeException re) {
if (throwExceptionOnFailure) {
String message = ErrorMessageBuilder.create()
.errorSettingExpressionWithValue(expr, value)
.build();
throw new XWorkException(message, re);
} else {
if (LOG.isWarnEnabled()) {
LOG.warn("Error setting value", re);
}
}
}
private void handleOgnlException(String expr, Object value, boolean throwExceptionOnFailure, OgnlException e) {
String msg = "Error setting expression '" + expr + "' with value '" + value + "'";
if (LOG.isWarnEnabled()) {
LOG.warn(msg, e);
}
if (throwExceptionOnFailure) {
throw new XWorkException(msg, e);
}
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#findString(java.lang.String)
*/
public String findString(String expr) {
return (String) findValue(expr, String.class);
}
public String findString(String expr, boolean throwExceptionOnFailure) {
return (String) findValue(expr, String.class, throwExceptionOnFailure);
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#findValue(java.lang.String)
*/
public Object findValue(String expr, boolean throwExceptionOnFailure) {
try {
setupExceptionOnFailure(throwExceptionOnFailure);
return tryFindValueWhenExpressionIsNotNull(expr);
} catch (OgnlException e) {
return handleOgnlException(expr, throwExceptionOnFailure, e);
} catch (Exception e) {
return handleOtherException(expr, throwExceptionOnFailure, e);
} finally {
ReflectionContextState.clear(context);
}
}
private void setupExceptionOnFailure(boolean throwExceptionOnFailure) {
if (throwExceptionOnFailure) {
context.put(THROW_EXCEPTION_ON_FAILURE, true);
}
}
private Object tryFindValueWhenExpressionIsNotNull(String expr) throws OgnlException {
if (expr == null) {
return null;
}
return tryFindValue(expr);
}
private Object handleOtherException(String expr, boolean throwExceptionOnFailure, Exception e) {
logLookupFailure(expr, e);
if (throwExceptionOnFailure)
throw new XWorkException(e);
return findInContext(expr);
}
private Object tryFindValue(String expr) throws OgnlException {
Object value;
expr = lookupForOverrides(expr);
if (defaultType != null) {
value = findValue(expr, defaultType);
} else {
value = getValueUsingOgnl(expr);
if (value == null) {
value = findInContext(expr);
}
}
return value;
}
private String lookupForOverrides(String expr) {
if ((overrides != null) && overrides.containsKey(expr)) {
expr = (String) overrides.get(expr);
}
return expr;
}
private Object getValueUsingOgnl(String expr) throws OgnlException {
try {
return ognlUtil.getValue(expr, context, root);
} finally {
context.remove(THROW_EXCEPTION_ON_FAILURE);
}
}
public Object findValue(String expr) {
return findValue(expr, false);
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#findValue(java.lang.String, java.lang.Class)
*/
public Object findValue(String expr, Class asType, boolean throwExceptionOnFailure) {
try {
setupExceptionOnFailure(throwExceptionOnFailure);
return tryFindValueWhenExpressionIsNotNull(expr, asType);
} catch (OgnlException e) {
return handleOgnlException(expr, throwExceptionOnFailure, e);
} catch (Exception e) {
return handleOtherException(expr, throwExceptionOnFailure, e);
} finally {
ReflectionContextState.clear(context);
}
}
private Object tryFindValueWhenExpressionIsNotNull(String expr, Class asType) throws OgnlException {
if (expr == null) {
return null;
}
return tryFindValue(expr, asType);
}
private Object handleOgnlException(String expr, boolean throwExceptionOnFailure, OgnlException e) {
Object ret = findInContext(expr);
if (ret == null) {
if (shouldLogNoSuchPropertyWarning(e)) {
LOG.warn("Could not find property [" + ((NoSuchPropertyException) e).getName() + "]");
}
if (throwExceptionOnFailure) {
throw new XWorkException(e);
}
}
return ret;
}
private boolean shouldLogNoSuchPropertyWarning(OgnlException e) {
return e instanceof NoSuchPropertyException && devMode && logMissingProperties;
}
private Object tryFindValue(String expr, Class asType) throws OgnlException {
Object value = null;
try {
expr = lookupForOverrides(expr);
value = getValue(expr, asType);
if (value == null) {
value = findInContext(expr);
}
} finally {
context.remove(THROW_EXCEPTION_ON_FAILURE);
}
return value;
}
private Object getValue(String expr, Class asType) throws OgnlException {
return ognlUtil.getValue(expr, context, root, asType);
}
private Object findInContext(String name) {
return getContext().get(name);
}
public Object findValue(String expr, Class asType) {
return findValue(expr, asType, false);
}
/**
* Log a failed lookup, being more verbose when devMode=true.
*
* @param expr The failed expression
* @param e The thrown exception.
*/
private void logLookupFailure(String expr, Exception e) {
String msg = LoggerUtils.format("Caught an exception while evaluating expression '#0' against value stack", expr);
if (devMode && LOG.isWarnEnabled()) {
LOG.warn(msg, e);
LOG.warn("NOTE: Previous warning message was issued due to devMode set to true.");
} else if (LOG.isDebugEnabled()) {
LOG.debug(msg, e);
}
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#peek()
*/
public Object peek() {
return root.peek();
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#pop()
*/
public Object pop() {
return root.pop();
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#push(java.lang.Object)
*/
public void push(Object o) {
root.push(o);
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#set(java.lang.String, java.lang.Object)
*/
public void set(String key, Object o) {
//set basically is backed by a Map pushed on the stack with a key being put on the map and the Object being the value
Map setMap = retrieveSetMap();
setMap.put(key, o);
}
private Map retrieveSetMap() {
Map setMap;
Object topObj = peek();
if (shouldUseOldMap(topObj)) {
setMap = (Map) topObj;
} else {
setMap = new HashMap();
setMap.put(MAP_IDENTIFIER_KEY, "");
push(setMap);
}
return setMap;
}
/**
* check if this is a Map put on the stack for setting if so just use the old map (reduces waste)
*/
private boolean shouldUseOldMap(Object topObj) {
return topObj instanceof Map && ((Map) topObj).get(MAP_IDENTIFIER_KEY) != null;
}
/**
* @see com.opensymphony.xwork2.util.ValueStack#size()
*/
public int size() {
return root.size();
}
private Object readResolve() {
// TODO: this should be done better
ActionContext ac = ActionContext.getContext();
Container cont = ac.getContainer();
XWorkConverter xworkConverter = cont.getInstance(XWorkConverter.class);
CompoundRootAccessor accessor = (CompoundRootAccessor) cont.getInstance(PropertyAccessor.class, CompoundRoot.class.getName());
TextProvider prov = cont.getInstance(TextProvider.class, "system");
boolean allow = "true".equals(cont.getInstance(String.class, "allowStaticMethodAccess"));
OgnlValueStack aStack = new OgnlValueStack(xworkConverter, accessor, prov, allow);
aStack.setOgnlUtil(cont.getInstance(OgnlUtil.class));
aStack.setRoot(xworkConverter, accessor, this.root, allow);
return aStack;
}
public void clearContextValues() {
//this is an OGNL ValueStack so the context will be an OgnlContext
//it would be better to make context of type OgnlContext
((OgnlContext) context).getValues().clear();
}
public void setAcceptProperties(Set<Pattern> acceptedProperties) {
securityMemberAccess.setAcceptProperties(acceptedProperties);
}
public void setPropertiesJudge(PropertiesJudge judge) {
securityMemberAccess.setPropertiesJudge(judge);
}
public void setExcludeProperties(Set<Pattern> excludeProperties) {
securityMemberAccess.setExcludeProperties(excludeProperties);
}
}
|
|
/*
* This file is part of the DITA Open Toolkit project.
* See the accompanying license.txt file for applicable licenses.
*/
/*
* (c) Copyright IBM Corp. 2004, 2005 All Rights Reserved.
*/
package org.dita.dost.reader;
import static org.dita.dost.util.Configuration.*;
import static org.dita.dost.util.Constants.*;
import static org.dita.dost.util.URLUtils.*;
import static org.dita.dost.reader.ChunkMapReader.*;
import java.net.URI;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import org.dita.dost.exception.DITAOTException;
import org.dita.dost.log.MessageBean;
import org.dita.dost.log.MessageUtils;
import org.dita.dost.util.DitaClass;
import org.dita.dost.util.Job;
import org.dita.dost.writer.AbstractXMLFilter;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
/**
* Parse relevant DITA files and collect information.
*
* <p>
* <strong>Not thread-safe</strong>. Instances can be reused by calling
* {@link #reset()} between calls to parse.
* </p>
*/
public final class GenListModuleReader extends AbstractXMLFilter {
public static final URI ROOT_URI = toURI("ROOT");
/** Output utilities */
private Job job;
/** Absolute basedir of the current parsing file */
private URI currentDir = null;
/** Flag for conref in parsing file */
private boolean hasConRef = false;
/** Flag for href in parsing file */
private boolean hasHref = false;
/** Flag for keyref in parsing file */
private boolean hasKeyRef = false;
/** Flag for whether parsing file contains coderef */
private boolean hasCodeRef = false;
/** Set of all the non-conref and non-copyto targets refered in current parsing file */
private final Set<Reference> nonConrefCopytoTargets = new LinkedHashSet<>(64);
/** Set of conref targets refered in current parsing file */
private final Set<URI> conrefTargets = new HashSet<>(32);
/** Set of href nonConrefCopytoTargets refered in current parsing file */
private final Set<URI> hrefTargets = new HashSet<>(32);
/** Set of href targets with anchor appended */
private final Set<URI> hrefTopicSet = new HashSet<>(32);
/** Set of chunk targets */
private final Set<URI> chunkTopicSet = new HashSet<>(32);
/** Set of subject schema files */
private final Set<URI> schemeSet = new HashSet<>(32);
/** Set of coderef or object target files */
private final Set<URI> coderefTargetSet = new HashSet<>(16);
/** Set of sources of those copy-to that were ignored */
private final Set<URI> ignoredCopytoSourceSet = new HashSet<>(16);
/** Map of copy-to target to souce */
private final Map<URI, URI> copytoMap = new HashMap<>(16);
/** Flag for conrefpush */
private boolean hasconaction = false;
/** foreign/unknown nesting level */
private int foreignLevel = 0;
/** chunk nesting level */
private int chunkLevel = 0;
/** mark topics in reltables */
private int relTableLevel = 0;
/** chunk to-navigation level */
private int chunkToNavLevel = 0;
/** Topic group nesting level */
private int topicGroupLevel = 0;
/** Flag used to mark if current file is still valid after filtering */
private boolean isValidInput = false;
/** Set of outer dita files */
private final Set<URI> outDitaFilesSet = new HashSet<>(64);
/** Absolute system path to input file parent directory */
private URI rootDir = null;
/** Stack for @processing-role value */
private final Stack<String> processRoleStack = new Stack<>();
/** Topics with processing role of "resource-only" */
private final Set<URI> resourceOnlySet = new HashSet<>(32);
/** Topics with processing role of "normal" */
private final Set<URI> normalProcessingRoleSet = new HashSet<>(32);
/** Subject scheme relative file paths. */
private final Set<URI> schemeRefSet = new HashSet<>(32);
/** Relationship graph between subject schema. Keys are subject scheme map paths and values
* are subject scheme map paths, both relative to base directory. A key {@link #ROOT_URI} contains all subject scheme maps. */
private final Map<URI, Set<URI>> schemeRelationGraph = new LinkedHashMap<>();
/** Store the primary ditamap file name. */
private URI primaryDitamap;
private boolean isRootElement = true;
private DitaClass rootClass = null;
/**
* Set output utilities.
*
* @param job output utils
*/
public void setJob(final Job job) {
this.job = job;
}
/**
* Get out file set.
*
* @return out file set
*/
public Set<URI> getOutFilesSet() {
return outDitaFilesSet;
}
/**
* @return the hrefTopicSet
*/
public Set<URI> getHrefTopicSet() {
return hrefTopicSet;
}
/**
* @return the chunkTopicSet
*/
public Set<URI> getChunkTopicSet() {
return chunkTopicSet;
}
/**
* Get scheme set.
*
* @return scheme set
*/
public Set<URI> getSchemeSet() {
return schemeSet;
}
/**
* Get scheme ref set.
*
* @return scheme ref set
*/
public Set<URI> getSchemeRefSet() {
return schemeRefSet;
}
/**
* List of files with "@processing-role=resource-only".
*
* @return the resource-only set
*/
public Set<URI> getResourceOnlySet() {
final Set<URI> res = new HashSet<>(resourceOnlySet);
res.removeAll(normalProcessingRoleSet);
return res;
}
/**
* Is the processed file a DITA topic.
*
* @return {@code true} if DITA topic, otherwise {@code false}
*/
public boolean isDitaTopic() {
if (isRootElement) {
throw new IllegalStateException();
}
return rootClass == null || TOPIC_TOPIC.matches(rootClass);
}
/**
* Is the currently processed file a DITA map.
*
* @return {@code true} if DITA map, otherwise {@code false}
*/
public boolean isDitaMap() {
if (isRootElement) {
throw new IllegalStateException();
}
return rootClass != null && MAP_MAP.matches(rootClass);
}
/**
* Get relationship graph between subject schema. Keys are subject scheme map paths and values
* are subject scheme map paths, both relative to base directory. A key {@link #ROOT_URI} contains all subject scheme maps.
*
* @return relationship graph
*/
public Map<URI, Set<URI>> getRelationshipGrap() {
return schemeRelationGraph;
}
public void setPrimaryDitamap(final URI primaryDitamap) {
assert primaryDitamap.isAbsolute();
this.primaryDitamap = primaryDitamap;
}
/**
* To see if the parsed file has conref inside.
*
* @return true if has conref and false otherwise
*/
public boolean hasConRef() {
return hasConRef;
}
/**
* To see if the parsed file has keyref inside.
*
* @return true if has keyref and false otherwise
*/
public boolean hasKeyRef() {
return hasKeyRef;
}
/**
* To see if the parsed file has coderef inside.
*
* @return true if has coderef and false otherwise
*/
public boolean hasCodeRef() {
return hasCodeRef;
}
/**
* To see if the parsed file has href inside.
*
* @return true if has href and false otherwise
*/
public boolean hasHref() {
return hasHref;
}
/**
* Get all targets except copy-to.
*
* @return set of target file path with option format after
* {@link org.dita.dost.util.Constants#STICK STICK}
*/
public Set<Reference> getNonCopytoResult() {
final Set<Reference> nonCopytoSet = new LinkedHashSet<>(128);
nonCopytoSet.addAll(nonConrefCopytoTargets);
for (final URI f : conrefTargets) {
nonCopytoSet.add(new Reference(stripFragment(f)));
}
for (final URI f : copytoMap.values()) {
nonCopytoSet.add(new Reference(stripFragment(f)));
}
for (final URI f : ignoredCopytoSourceSet) {
nonCopytoSet.add(new Reference(stripFragment(f)));
}
for (final URI filename : coderefTargetSet) {
nonCopytoSet.add(new Reference(stripFragment(filename)));
}
return nonCopytoSet;
}
/**
* Get the href target.
*
* @return Returns the hrefTargets.
*/
public Set<URI> getHrefTargets() {
return hrefTargets;
}
/**
* Get conref targets.
*
* @return Returns the conrefTargets.
*/
public Set<URI> getConrefTargets() {
return conrefTargets;
}
/**
* Get coderef targets.
*
* @return Returns coderef targets.
*/
public Set<URI> getCoderefTargets() {
return coderefTargetSet;
}
/**
* Get outditafileslist.
*
* @return Returns the outditafileslist.
*/
public Set<URI> getOutDitaFilesSet() {
return outDitaFilesSet;
}
/**
* Get non-conref and non-copyto targets.
*
* @return Returns the nonConrefCopytoTargets.
*/
public Set<URI> getNonConrefCopytoTargets() {
final Set<URI> res = new HashSet<>(nonConrefCopytoTargets.size());
for (final Reference r : nonConrefCopytoTargets) {
res.add(r.filename);
}
return res;
}
/**
* Returns the ignoredCopytoSourceSet.
*
* @return Returns the ignoredCopytoSourceSet.
*/
public Set<URI> getIgnoredCopytoSourceSet() {
return ignoredCopytoSourceSet;
}
/**
* Get the copy-to map.
*
* @return copy-to map
*/
public Map<URI, URI> getCopytoMap() {
return copytoMap;
}
/**
* Set processing input directory absolute path.
*
* @param inputDir absolute path to base directory
*/
public void setInputDir(final URI inputDir) {
this.rootDir = inputDir;
}
/**
* Set current file absolute path
*
* @param currentFile absolute path to current file
*/
public void setCurrentFile(final URI currentFile) {
assert currentFile.isAbsolute();
super.setCurrentFile(currentFile);
currentDir = currentFile.resolve(".");
}
/**
* Check if the current file is valid after filtering.
*
* @return true if valid and false otherwise
*/
public boolean isValidInput() {
return isValidInput;
}
/**
* Check if the current file has conaction.
*
* @return true if has conaction and false otherwise
*/
public boolean hasConaction() {
return hasconaction;
}
/**
*
* Reset the internal variables.
*/
public void reset() {
hasKeyRef = false;
hasConRef = false;
hasHref = false;
hasCodeRef = false;
currentDir = null;
foreignLevel = 0;
chunkLevel = 0;
relTableLevel = 0;
chunkToNavLevel = 0;
topicGroupLevel = 0;
isValidInput = false;
hasconaction = false;
coderefTargetSet.clear();
nonConrefCopytoTargets.clear();
hrefTargets.clear();
hrefTopicSet.clear();
chunkTopicSet.clear();
conrefTargets.clear();
copytoMap.clear();
ignoredCopytoSourceSet.clear();
outDitaFilesSet.clear();
schemeSet.clear();
schemeRefSet.clear();
processRoleStack.clear();
isRootElement = true;
rootClass = null;
// Don't clean resourceOnlySet or normalProcessingRoleSet
}
@Override
public void startDocument() throws SAXException {
if (currentDir == null) {
throw new IllegalStateException();
}
processRoleStack.push(ATTR_PROCESSING_ROLE_VALUE_NORMAL);
getContentHandler().startDocument();
}
@Override
public void startElement(final String uri, final String localName, final String qName, final Attributes atts)
throws SAXException {
handleRootElement(atts);
handleSubjectScheme(atts);
String processingRole = atts.getValue(ATTRIBUTE_NAME_PROCESSING_ROLE);
if (processingRole == null) {
processingRole = processRoleStack.peek();
}
processRoleStack.push(processingRole);
final URI href = toURI(atts.getValue(ATTRIBUTE_NAME_HREF));
final String scope = atts.getValue(ATTRIBUTE_NAME_SCOPE);
if (href != null && !ATTR_SCOPE_VALUE_EXTERNAL.equals(scope)) {
if (ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equals(processingRole)) {
resourceOnlySet.add(stripFragment(currentDir.resolve(href)));
} else {
normalProcessingRoleSet.add(stripFragment(currentDir.resolve(href)));
}
}
final String classValue = atts.getValue(ATTRIBUTE_NAME_CLASS);
if (foreignLevel > 0) {
foreignLevel++;
return;
} else if (TOPIC_FOREIGN.matches(classValue) || TOPIC_UNKNOWN.matches(classValue)) {
foreignLevel++;
}
if (chunkLevel > 0) {
chunkLevel++;
} else if (atts.getValue(ATTRIBUTE_NAME_CHUNK) != null) {
chunkLevel++;
}
if (relTableLevel > 0) {
relTableLevel++;
} else if (MAP_RELTABLE.matches(classValue)) {
relTableLevel++;
}
if (chunkToNavLevel > 0) {
chunkToNavLevel++;
} else if (atts.getValue(ATTRIBUTE_NAME_CHUNK) != null
&& atts.getValue(ATTRIBUTE_NAME_CHUNK).contains(CHUNK_TO_NAVIGATION)) {
chunkToNavLevel++;
}
if (topicGroupLevel > 0) {
topicGroupLevel++;
} else if (MAPGROUP_D_TOPICGROUP.matches(classValue)) {
topicGroupLevel++;
}
if (classValue == null && !ELEMENT_NAME_DITA.equals(localName)) {
logger.info(MessageUtils.getInstance().getMessage("DOTJ030I", localName).toString());
}
if (TOPIC_TOPIC.matches(classValue) || MAP_MAP.matches(classValue)) {
final String domains = atts.getValue(ATTRIBUTE_NAME_DOMAINS);
if (domains == null) {
logger.info(MessageUtils.getInstance().getMessage("DOTJ029I", localName).toString());
}
}
if ((MAP_MAP.matches(classValue)) || (TOPIC_TITLE.matches(classValue))) {
isValidInput = true;
}
parseConrefAttr(atts);
if (PR_D_CODEREF.matches(classValue)) {
parseCoderef(atts);
} else if (TOPIC_OBJECT.matches(classValue)) {
parseObject(atts);
} else if (MAP_TOPICREF.matches(classValue)) {
parseAttribute(atts, ATTRIBUTE_NAME_HREF);
parseAttribute(atts, ATTRIBUTE_NAME_COPY_TO);
} else {
parseAttribute(atts, ATTRIBUTE_NAME_HREF);
}
parseConactionAttr(atts);
parseConkeyrefAttr(atts);
parseKeyrefAttr(atts);
getContentHandler().startElement(uri, localName, qName, atts);
}
private void parseCoderef(final Attributes atts) {
final URI href = toURI(atts.getValue(ATTRIBUTE_NAME_HREF));
if (href == null) {
return;
}
final String attrScope = atts.getValue(ATTRIBUTE_NAME_SCOPE);
if (ATTR_SCOPE_VALUE_EXTERNAL.equals(attrScope) || ATTR_SCOPE_VALUE_PEER.equals(attrScope)
|| href.toString().startsWith(SHARP)) {
return;
}
hasCodeRef = true;
final URI filename = stripFragment(href.isAbsolute() ? href : currentDir.resolve(href));
assert filename.isAbsolute();
coderefTargetSet.add(filename);
}
private void parseObject(final Attributes atts) throws SAXException {
URI attrValue = toURI(atts.getValue(ATTRIBUTE_NAME_DATA));
if (attrValue == null) {
return;
}
URI filename;
final URI codebase = toURI(atts.getValue(ATTRIBUTE_NAME_CODEBASE));
if (codebase != null) {
if (codebase.isAbsolute()) {
filename = codebase.resolve(attrValue);
} else {
filename = currentDir.resolve(codebase).resolve(attrValue);
}
} else {
filename = currentDir.resolve(attrValue);
}
filename = stripFragment(filename);
assert filename.isAbsolute();
nonConrefCopytoTargets.add(new Reference(filename, ATTR_FORMAT_VALUE_HTML));
}
private void handleSubjectScheme(final Attributes atts) {
final URI href = toURI(atts.getValue(ATTRIBUTE_NAME_HREF));
final String classValue = atts.getValue(ATTRIBUTE_NAME_CLASS);
// Generate Scheme relationship graph
if (SUBJECTSCHEME_SUBJECTSCHEME.matches(classValue)) {
// Make it easy to do the BFS later.
final URI key = ROOT_URI;
final Set<URI> children = schemeRelationGraph.containsKey(key) ? schemeRelationGraph.get(key) : new LinkedHashSet<URI>();
children.add(currentFile);
schemeRelationGraph.put(key, children);
schemeRefSet.add(currentFile);
} else if (SUBJECTSCHEME_SCHEMEREF.matches(classValue)) {
if (href != null) {
final URI key = currentFile;
final Set<URI> children = schemeRelationGraph.containsKey(key) ? schemeRelationGraph.get(key) : new LinkedHashSet<URI>();
final URI child = currentFile.resolve(href);
children.add(child);
schemeRelationGraph.put(key, children);
}
}
}
private void handleRootElement(final Attributes atts) {
if (isRootElement) {
isRootElement = false;
final String classValue = atts.getValue(ATTRIBUTE_NAME_CLASS);
if (classValue != null) {
rootClass = new DitaClass(atts.getValue(ATTRIBUTE_NAME_CLASS));
}
}
}
@Override
public void endElement(final String uri, final String localName, final String qName) throws SAXException {
// @processing-role
processRoleStack.pop();
if (foreignLevel > 0) {
foreignLevel--;
return;
}
if (chunkLevel > 0) {
chunkLevel--;
}
if (relTableLevel > 0) {
relTableLevel--;
}
if (chunkToNavLevel > 0) {
chunkToNavLevel--;
}
if (topicGroupLevel > 0) {
topicGroupLevel--;
}
getContentHandler().endElement(uri, localName, qName);
}
/**
* Clean up.
*/
@Override
public void endDocument() throws SAXException {
processRoleStack.pop();
getContentHandler().endDocument();
}
/**
* Parse the input attributes for needed information.
*
* @param atts all attributes
* @param attrName attributes to process
*/
private void parseAttribute(final Attributes atts, final String attrName) throws SAXException {
URI attrValue = toURI(atts.getValue(attrName));
if (attrValue == null) {
return;
}
final String attrClass = atts.getValue(ATTRIBUTE_NAME_CLASS);
final String attrScope = atts.getValue(ATTRIBUTE_NAME_SCOPE);
// external resource is filtered here.
if (ATTR_SCOPE_VALUE_EXTERNAL.equals(attrScope) || ATTR_SCOPE_VALUE_PEER.equals(attrScope)
// FIXME: testing for :// here is incorrect, rely on source scope instead
|| attrValue.toString().contains(COLON_DOUBLE_SLASH) || attrValue.toString().startsWith(SHARP)) {
return;
}
final URI filename = stripFragment(attrValue.isAbsolute() ? attrValue : currentDir.resolve(attrValue));
assert filename.isAbsolute();
final String attrType = atts.getValue(ATTRIBUTE_NAME_TYPE);
if (MAP_TOPICREF.matches(attrClass) && ATTR_TYPE_VALUE_SUBJECT_SCHEME.equalsIgnoreCase(attrType)) {
schemeSet.add(filename);
}
final String attrFormat = getFormat(atts);
if (ATTRIBUTE_NAME_HREF.equals(attrName)) {
hasHref = true;
// Collect non-conref and non-copyto targets
if ((atts.getValue(ATTRIBUTE_NAME_COPY_TO) == null
|| (atts.getValue(ATTRIBUTE_NAME_CHUNK) != null && atts.getValue(ATTRIBUTE_NAME_CHUNK).contains(CHUNK_TO_CONTENT)))
&& (followLinks()
|| (TOPIC_IMAGE.matches(attrClass) || DITAVAREF_D_DITAVALREF.matches(attrClass)))) {
nonConrefCopytoTargets.add(new Reference(filename, attrFormat));
}
}
if (isFormatDita(attrFormat)) {
if (ATTRIBUTE_NAME_HREF.equals(attrName)) {
if (followLinks()) {
hrefTargets.add(filename);
toOutFile(filename);
if (chunkLevel > 0 && chunkToNavLevel == 0 && topicGroupLevel == 0 && relTableLevel == 0) {
chunkTopicSet.add(filename);
} else {
hrefTopicSet.add(filename);
}
}
} else if (ATTRIBUTE_NAME_COPY_TO.equals(attrName)) {
final URI href = toURI(atts.getValue(ATTRIBUTE_NAME_HREF));
if (href != null) {
if (href.toString().isEmpty()) {
logger.warn("Copy-to task [href=\"\" copy-to=\"" + filename + "\"] was ignored.");
} else {
final URI value = stripFragment(currentDir.resolve(href));
if (copytoMap.get(filename) != null) {
if (!value.equals(copytoMap.get(filename))) {
logger.warn(MessageUtils.getInstance().getMessage("DOTX065W", href.toString(), filename.toString()).toString());
}
ignoredCopytoSourceSet.add(value);
} else if (!(atts.getValue(ATTRIBUTE_NAME_CHUNK) != null && atts.getValue(ATTRIBUTE_NAME_CHUNK).contains(
CHUNK_TO_CONTENT))) {
copytoMap.put(filename, value);
}
}
}
final URI pathWithoutID = stripFragment(currentDir.resolve(attrValue));
if (chunkLevel > 0 && chunkToNavLevel == 0 && topicGroupLevel == 0) {
chunkTopicSet.add(pathWithoutID);
} else {
hrefTopicSet.add(pathWithoutID);
}
}
}
}
private String getFormat(Attributes atts) {
final String attrClass = atts.getValue(ATTRIBUTE_NAME_CLASS);
if (TOPIC_IMAGE.matches(attrClass)) {
return ATTR_FORMAT_VALUE_IMAGE;
} else if (TOPIC_OBJECT.matches(attrClass)) {
throw new IllegalArgumentException();
//return ATTR_FORMAT_VALUE_HTML;
} else {
return atts.getValue(ATTRIBUTE_NAME_FORMAT);
}
}
private void parseConrefAttr(final Attributes atts) throws SAXException {
String attrValue = atts.getValue(ATTRIBUTE_NAME_CONREF);
if (attrValue != null) {
hasConRef = true;
URI filename;
final URI target = toURI(attrValue);
if (isAbsolute(target)) {
filename = target;
} else if (attrValue.startsWith(SHARP)) {
filename = currentFile;
} else {
filename = currentDir.resolve(attrValue);
}
filename = stripFragment(filename);
// Collect only conref target topic files
conrefTargets.add(filename);
toOutFile(filename);
}
}
private void parseConkeyrefAttr(final Attributes atts) {
final String conkeyref = atts.getValue(ATTRIBUTE_NAME_CONKEYREF);
if (conkeyref != null) {
hasConRef = true;
}
}
private final static String[] KEYREF_ATTRS = new String[] {
ATTRIBUTE_NAME_KEYREF,
ATTRIBUTE_NAME_CONKEYREF,
ATTRIBUTE_NAME_ARCHIVEKEYREFS,
ATTRIBUTE_NAME_CLASSIDKEYREF,
ATTRIBUTE_NAME_CODEBASEKEYREF,
ATTRIBUTE_NAME_DATAKEYREF
};
private void parseKeyrefAttr(final Attributes atts) {
for (final String attr: KEYREF_ATTRS) {
if (atts.getValue(attr) != null) {
hasKeyRef = true;
break;
}
}
}
private void parseConactionAttr(final Attributes atts) {
final String conaction = atts.getValue(ATTRIBUTE_NAME_CONACTION);
if (conaction != null) {
if (conaction.equals(ATTR_CONACTION_VALUE_MARK) || conaction.equals(ATTR_CONACTION_VALUE_PUSHREPLACE)) {
hasconaction = true;
}
}
}
/**
* Check if format is DITA topic.
*
* @param attrFormat format attribute value, may be {@code null}
* @return {@code true} if DITA topic, otherwise {@code false}
*/
public static boolean isFormatDita(final String attrFormat) {
if (attrFormat == null || attrFormat.equals(ATTR_FORMAT_VALUE_DITA)) {
return true;
}
for (final String f: ditaFormat) {
if (f.equals(attrFormat)) {
return true;
}
}
return false;
}
/**
* Check if path walks up in parent directories
*
* @param toCheckPath path to check
* @return {@code true} if path walks up, otherwise {@code false}
*/
private boolean isOutFile(final URI toCheckPath) {
return !toCheckPath.getPath().startsWith(rootDir.getPath());
}
/**
* Should links be followed.
*/
private boolean followLinks() {
return !job.getOnlyTopicInMap() || isDitaMap();
}
private void addToOutFilesSet(final URI hrefedFile) {
if (followLinks()) {
outDitaFilesSet.add(hrefedFile);
}
}
private void toOutFile(final URI filename) throws SAXException {
assert filename.isAbsolute();
// the filename is a relative path from the dita input file
final String[] prop = { currentFile.toString() };
if (job.getGeneratecopyouter() == Job.Generate.NOT_GENERATEOUTTER) {
if (isOutFile(filename)) {
if (job.getOutterControl() == Job.OutterControl.FAIL) {
final MessageBean msgBean = MessageUtils.getInstance().getMessage("DOTJ035F", prop);
throw new SAXParseException(null, null, new DITAOTException(msgBean, null, msgBean.toString()));
} else if (job.getOutterControl() == Job.OutterControl.WARN) {
final String message = MessageUtils.getInstance().getMessage("DOTJ036W", prop).toString();
logger.warn(message);
}
addToOutFilesSet(filename);
}
}
}
/**
* File reference with path and optional format.
*/
public static class Reference {
/** Absolute URI reference */
public final URI filename;
/** Format of the reference */
public final String format;
public Reference(final URI filename, final String format) {
assert filename.isAbsolute() && filename.getFragment() == null;
this.filename = filename;
this.format = format;
}
public Reference(final URI filename) {
this(filename, null);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((filename == null) ? 0 : filename.hashCode());
// result = prime * result + ((format == null) ? 0 : format.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof Reference)) {
return false;
}
final Reference other = (Reference) obj;
if (filename == null) {
if (other.filename != null) {
return false;
}
} else if (!filename.equals(other.filename)) {
return false;
}
// if (format == null) {
// if (other.format != null) {
// return false;
// }
// } else if (!format.equals(other.format)) {
// return false;
// }
return true;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.transactions;
import java.util.Collection;
import org.apache.ignite.IgniteTransactions;
import org.apache.ignite.configuration.TransactionConfiguration;
import org.apache.ignite.internal.IgniteTransactionsEx;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheSharedContext;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.lang.IgniteClosure;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.apache.ignite.transactions.TransactionIsolation;
import org.apache.ignite.transactions.TransactionMetrics;
import org.apache.ignite.transactions.TransactionException;
import org.jetbrains.annotations.Nullable;
/**
* Grid transactions implementation.
*/
public class IgniteTransactionsImpl<K, V> implements IgniteTransactionsEx {
/** Cache shared context. */
private GridCacheSharedContext<K, V> cctx;
/** Label. */
private String lb;
/**
* @param cctx Cache shared context.
*/
public IgniteTransactionsImpl(GridCacheSharedContext<K, V> cctx, @Nullable String lb) {
this.cctx = cctx;
this.lb = lb;
}
/** {@inheritDoc} */
@Override public Transaction txStart() throws IllegalStateException {
TransactionConfiguration cfg = CU.transactionConfiguration(null, cctx.kernalContext().config());
return txStart0(
cfg.getDefaultTxConcurrency(),
cfg.getDefaultTxIsolation(),
cfg.getDefaultTxTimeout(),
0,
null
).proxy();
}
/** {@inheritDoc} */
@Override public Transaction txStart(TransactionConcurrency concurrency, TransactionIsolation isolation) {
A.notNull(concurrency, "concurrency");
A.notNull(isolation, "isolation");
TransactionConfiguration cfg = CU.transactionConfiguration(null, cctx.kernalContext().config());
return txStart0(
concurrency,
isolation,
cfg.getDefaultTxTimeout(),
0,
null
).proxy();
}
/** {@inheritDoc} */
@Override public Transaction txStart(TransactionConcurrency concurrency, TransactionIsolation isolation,
long timeout, int txSize) {
A.notNull(concurrency, "concurrency");
A.notNull(isolation, "isolation");
A.ensure(timeout >= 0, "timeout cannot be negative");
A.ensure(txSize >= 0, "transaction size cannot be negative");
return txStart0(
concurrency,
isolation,
timeout,
txSize,
null
).proxy();
}
/** {@inheritDoc} */
@Override public GridNearTxLocal txStartEx(
GridCacheContext ctx,
TransactionConcurrency concurrency,
TransactionIsolation isolation,
long timeout,
int txSize)
{
A.notNull(concurrency, "concurrency");
A.notNull(isolation, "isolation");
A.ensure(timeout >= 0, "timeout cannot be negative");
A.ensure(txSize >= 0, "transaction size cannot be negative");
checkTransactional(ctx);
return txStart0(concurrency,
isolation,
timeout,
txSize,
ctx.systemTx() ? ctx : null);
}
/** {@inheritDoc} */
@Override public GridNearTxLocal txStartEx(
GridCacheContext ctx,
TransactionConcurrency concurrency,
TransactionIsolation isolation)
{
A.notNull(concurrency, "concurrency");
A.notNull(isolation, "isolation");
checkTransactional(ctx);
TransactionConfiguration cfg = CU.transactionConfiguration(ctx, cctx.kernalContext().config());
return txStart0(concurrency,
isolation,
cfg.getDefaultTxTimeout(),
0,
ctx.systemTx() ? ctx : null);
}
/**
* @param concurrency Transaction concurrency.
* @param isolation Transaction isolation.
* @param timeout Transaction timeout.
* @param txSize Expected transaction size.
* @param sysCacheCtx System cache context.
* @return Transaction.
*/
@SuppressWarnings("unchecked")
private GridNearTxLocal txStart0(
TransactionConcurrency concurrency,
TransactionIsolation isolation,
long timeout,
int txSize,
@Nullable GridCacheContext sysCacheCtx
) {
cctx.kernalContext().gateway().readLock();
try {
GridNearTxLocal tx = cctx.tm().userTx(sysCacheCtx);
if (tx != null)
throw new IllegalStateException("Failed to start new transaction " +
"(current thread already has a transaction): " + tx);
tx = cctx.tm().newTx(
false,
false,
sysCacheCtx,
concurrency,
isolation,
timeout,
true,
txSize,
lb
);
assert tx != null;
return tx;
}
finally {
cctx.kernalContext().gateway().readUnlock();
}
}
/** {@inheritDoc} */
@Nullable @Override public Transaction tx() {
GridNearTxLocal tx = cctx.tm().userTx();
return tx != null ? tx.proxy() : null;
}
/** {@inheritDoc} */
@Override public TransactionMetrics metrics() {
return cctx.txMetrics();
}
/** {@inheritDoc} */
@Override public void resetMetrics() {
cctx.resetTxMetrics();
}
/** {@inheritDoc} */
@Override public Collection<Transaction> localActiveTransactions() {
return F.viewReadOnly(cctx.tm().activeTransactions(), new IgniteClosure<IgniteInternalTx, Transaction>() {
@Override public Transaction apply(IgniteInternalTx tx) {
return ((GridNearTxLocal)tx).rollbackOnlyProxy();
}
}, new IgnitePredicate<IgniteInternalTx>() {
@Override public boolean apply(IgniteInternalTx tx) {
return tx.local() && tx.near();
}
});
}
/** {@inheritDoc} */
@Override public IgniteTransactions withLabel(String lb) {
A.notNull(lb, "label should not be empty.");
return new IgniteTransactionsImpl<>(cctx, lb);
}
/**
* @param ctx Cache context.
*/
private void checkTransactional(GridCacheContext ctx) {
if (!ctx.transactional())
throw new TransactionException("Failed to start transaction on non-transactional cache: " + ctx.name());
}
}
|
|
package ca.uhn.fhir.jpa.searchparam.extractor;
/*
* #%L
* HAPI FHIR Search Parameters
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantityNormalized;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
import ca.uhn.fhir.model.primitive.BoundCodeDt;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.HapiExtensions;
import ca.uhn.fhir.util.StringUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.text.StringTokenizer;
import org.fhir.ucum.Pair;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseEnumeration;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseReference;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.measure.quantity.Quantity;
import javax.measure.unit.NonSI;
import javax.measure.unit.Unit;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.strip;
import static org.apache.commons.lang3.StringUtils.trim;
public abstract class BaseSearchParamExtractor implements ISearchParamExtractor {
public static final Set<String> COORDS_INDEX_PATHS;
private static final Pattern SPLIT = Pattern.compile("\\||( or )");
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
static {
Set<String> coordsIndexPaths = Sets.newHashSet("Location.position");
COORDS_INDEX_PATHS = Collections.unmodifiableSet(coordsIndexPaths);
}
@Autowired
protected ApplicationContext myApplicationContext;
@Autowired
private FhirContext myContext;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
@Autowired
private ModelConfig myModelConfig;
@Autowired
private PartitionSettings myPartitionSettings;
private Set<String> myIgnoredForSearchDatatypes;
private BaseRuntimeChildDefinition myQuantityValueValueChild;
private BaseRuntimeChildDefinition myQuantitySystemValueChild;
private BaseRuntimeChildDefinition myQuantityCodeValueChild;
private BaseRuntimeChildDefinition myMoneyValueChild;
private BaseRuntimeChildDefinition myMoneyCurrencyChild;
private BaseRuntimeElementCompositeDefinition<?> myLocationPositionDefinition;
private BaseRuntimeChildDefinition myCodeSystemUrlValueChild;
private BaseRuntimeChildDefinition myRangeLowValueChild;
private BaseRuntimeChildDefinition myRangeHighValueChild;
private BaseRuntimeChildDefinition myAddressLineValueChild;
private BaseRuntimeChildDefinition myAddressCityValueChild;
private BaseRuntimeChildDefinition myAddressStateValueChild;
private BaseRuntimeChildDefinition myAddressCountryValueChild;
private BaseRuntimeChildDefinition myAddressPostalCodeValueChild;
private BaseRuntimeChildDefinition myCapabilityStatementRestSecurityServiceValueChild;
private BaseRuntimeChildDefinition myPeriodStartValueChild;
private BaseRuntimeChildDefinition myPeriodEndValueChild;
private BaseRuntimeChildDefinition myTimingEventValueChild;
private BaseRuntimeChildDefinition myTimingRepeatValueChild;
private BaseRuntimeChildDefinition myTimingRepeatBoundsValueChild;
private BaseRuntimeChildDefinition myDurationSystemValueChild;
private BaseRuntimeChildDefinition myDurationCodeValueChild;
private BaseRuntimeChildDefinition myDurationValueValueChild;
private BaseRuntimeChildDefinition myHumanNameFamilyValueChild;
private BaseRuntimeChildDefinition myHumanNameGivenValueChild;
private BaseRuntimeChildDefinition myHumanNameTextValueChild;
private BaseRuntimeChildDefinition myHumanNamePrefixValueChild;
private BaseRuntimeChildDefinition myHumanNameSuffixValueChild;
private BaseRuntimeChildDefinition myContactPointValueValueChild;
private BaseRuntimeChildDefinition myIdentifierSystemValueChild;
private BaseRuntimeChildDefinition myIdentifierValueValueChild;
private BaseRuntimeChildDefinition myIdentifierTypeValueChild;
private BaseRuntimeChildDefinition myIdentifierTypeTextValueChild;
private BaseRuntimeChildDefinition myCodeableConceptCodingValueChild;
private BaseRuntimeChildDefinition myCodeableConceptTextValueChild;
private BaseRuntimeChildDefinition myCodingSystemValueChild;
private BaseRuntimeChildDefinition myCodingCodeValueChild;
private BaseRuntimeChildDefinition myCodingDisplayValueChild;
private BaseRuntimeChildDefinition myContactPointSystemValueChild;
private BaseRuntimeChildDefinition myPatientCommunicationLanguageValueChild;
/**
* Constructor
*/
BaseSearchParamExtractor() {
super();
}
/**
* UNIT TEST constructor
*/
BaseSearchParamExtractor(ModelConfig theModelConfig, PartitionSettings thePartitionSettings, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
Validate.notNull(theModelConfig);
Validate.notNull(theCtx);
Validate.notNull(theSearchParamRegistry);
myModelConfig = theModelConfig;
myContext = theCtx;
mySearchParamRegistry = theSearchParamRegistry;
myPartitionSettings = thePartitionSettings;
}
@Override
public SearchParamSet<PathAndRef> extractResourceLinks(IBaseResource theResource, boolean theWantLocalReferences) {
IExtractor<PathAndRef> extractor = createReferenceExtractor();
return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.REFERENCE, theWantLocalReferences);
}
private IExtractor<PathAndRef> createReferenceExtractor() {
return new ResourceLinkExtractor();
}
@Override
public PathAndRef extractReferenceLinkFromResource(IBase theValue, String thePath) {
ResourceLinkExtractor extractor = new ResourceLinkExtractor();
return extractor.get(theValue, thePath);
}
@Override
public List<String> extractParamValuesAsStrings(RuntimeSearchParam theSearchParam, IBaseResource theResource) {
IExtractor extractor;
switch (theSearchParam.getParamType()) {
case DATE:
extractor = createDateExtractor(theResource);
break;
case STRING:
extractor = createStringExtractor(theResource);
break;
case TOKEN:
extractor = createTokenExtractor(theResource);
break;
case NUMBER:
extractor = createNumberExtractor(theResource);
break;
case REFERENCE:
extractor = createReferenceExtractor();
return extractReferenceParamsAsQueryTokens(theSearchParam, theResource, extractor);
case QUANTITY:
if (myModelConfig.getNormalizedQuantitySearchLevel().equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED)) {
extractor = new CompositeExtractor(
createQuantityExtractor(theResource),
createQuantityNormalizedExtractor(theResource)
);
} else {
extractor = createQuantityExtractor(theResource);
}
break;
case URI:
extractor = createUriExtractor(theResource);
break;
case SPECIAL:
extractor = createSpecialExtractor(theResource.getIdElement().getResourceType());
break;
case COMPOSITE:
default:
throw new UnsupportedOperationException("Type " + theSearchParam.getParamType() + " not supported for extraction");
}
return extractParamsAsQueryTokens(theSearchParam, theResource, extractor);
}
private List<String> extractReferenceParamsAsQueryTokens(RuntimeSearchParam theSearchParam, IBaseResource theResource, IExtractor<PathAndRef> theExtractor) {
SearchParamSet<PathAndRef> params = new SearchParamSet<>();
extractSearchParam(theSearchParam, theResource, theExtractor, params, false);
return refsToStringList(params);
}
private List<String> refsToStringList(SearchParamSet<PathAndRef> theParams) {
return theParams.stream()
.map(PathAndRef::getRef)
.map(ref -> ref.getReferenceElement().toUnqualifiedVersionless().getValue())
.collect(Collectors.toList());
}
private <T extends BaseResourceIndexedSearchParam> List<String> extractParamsAsQueryTokens(RuntimeSearchParam theSearchParam, IBaseResource theResource, IExtractor<T> theExtractor) {
SearchParamSet<T> params = new SearchParamSet<>();
extractSearchParam(theSearchParam, theResource, theExtractor, params, false);
return toStringList(params);
}
private <T extends BaseResourceIndexedSearchParam> List<String> toStringList(SearchParamSet<T> theParams) {
return theParams.stream()
.map(param -> param.toQueryParameterType().getValueAsQueryToken(myContext))
.collect(Collectors.toList());
}
@Override
public SearchParamSet<BaseResourceIndexedSearchParam> extractSearchParamTokens(IBaseResource theResource) {
IExtractor<BaseResourceIndexedSearchParam> extractor = createTokenExtractor(theResource);
return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.TOKEN, false);
}
@Override
public SearchParamSet<BaseResourceIndexedSearchParam> extractSearchParamTokens(IBaseResource theResource, RuntimeSearchParam theSearchParam) {
IExtractor<BaseResourceIndexedSearchParam> extractor = createTokenExtractor(theResource);
SearchParamSet<BaseResourceIndexedSearchParam> setToPopulate = new SearchParamSet<>();
extractSearchParam(theSearchParam, theResource, extractor, setToPopulate, false);
return setToPopulate;
}
private IExtractor<BaseResourceIndexedSearchParam> createTokenExtractor(IBaseResource theResource) {
String resourceTypeName = toRootTypeName(theResource);
String useSystem;
if (getContext().getVersion().getVersion().equals(FhirVersionEnum.DSTU2)) {
if (resourceTypeName.equals("ValueSet")) {
ca.uhn.fhir.model.dstu2.resource.ValueSet dstu2ValueSet = (ca.uhn.fhir.model.dstu2.resource.ValueSet) theResource;
useSystem = dstu2ValueSet.getCodeSystem().getSystem();
} else {
useSystem = null;
}
} else {
if (resourceTypeName.equals("CodeSystem")) {
useSystem = extractValueAsString(myCodeSystemUrlValueChild, theResource);
} else {
useSystem = null;
}
}
return new TokenExtractor(resourceTypeName, useSystem);
}
@Override
public SearchParamSet<BaseResourceIndexedSearchParam> extractSearchParamSpecial(IBaseResource theResource) {
String resourceTypeName = toRootTypeName(theResource);
IExtractor<BaseResourceIndexedSearchParam> extractor = createSpecialExtractor(resourceTypeName);
return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.SPECIAL, false);
}
private IExtractor<BaseResourceIndexedSearchParam> createSpecialExtractor(String theResourceTypeName) {
return (params, searchParam, value, path, theWantLocalReferences) -> {
if (COORDS_INDEX_PATHS.contains(path)) {
addCoords_Position(theResourceTypeName, params, searchParam, value);
}
};
}
private void addUnexpectedDatatypeWarning(SearchParamSet<?> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
theParams.addWarning("Search param " + theSearchParam.getName() + " is of unexpected datatype: " + theValue.getClass());
}
@Override
public SearchParamSet<ResourceIndexedSearchParamUri> extractSearchParamUri(IBaseResource theResource) {
IExtractor<ResourceIndexedSearchParamUri> extractor = createUriExtractor(theResource);
return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.URI, false);
}
private IExtractor<ResourceIndexedSearchParamUri> createUriExtractor(IBaseResource theResource) {
return (params, searchParam, value, path, theWantLocalReferences) -> {
String nextType = toRootTypeName(value);
String resourceType = toRootTypeName(theResource);
switch (nextType) {
case "uri":
case "url":
case "oid":
case "sid":
case "uuid":
addUri_Uri(resourceType, params, searchParam, value);
break;
default:
addUnexpectedDatatypeWarning(params, searchParam, value);
break;
}
};
}
@Override
public SearchParamSet<ResourceIndexedSearchParamDate> extractSearchParamDates(IBaseResource theResource) {
IExtractor<ResourceIndexedSearchParamDate> extractor = createDateExtractor(theResource);
return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.DATE, false);
}
private IExtractor<ResourceIndexedSearchParamDate> createDateExtractor(IBaseResource theResource) {
return new DateExtractor(theResource);
}
@Override
public Date extractDateFromResource(IBase theValue, String thePath) {
DateExtractor extractor = new DateExtractor("DateType");
return extractor.get(theValue, thePath, false).getValueHigh();
}
@Override
public SearchParamSet<ResourceIndexedSearchParamNumber> extractSearchParamNumber(IBaseResource theResource) {
IExtractor<ResourceIndexedSearchParamNumber> extractor = createNumberExtractor(theResource);
return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.NUMBER, false);
}
private IExtractor<ResourceIndexedSearchParamNumber> createNumberExtractor(IBaseResource theResource) {
return (params, searchParam, value, path, theWantLocalReferences) -> {
String nextType = toRootTypeName(value);
String resourceType = toRootTypeName(theResource);
switch (nextType) {
case "Duration":
addNumber_Duration(resourceType, params, searchParam, value);
break;
case "Quantity":
addNumber_Quantity(resourceType, params, searchParam, value);
break;
case "integer":
case "positiveInt":
case "unsignedInt":
addNumber_Integer(resourceType, params, searchParam, value);
break;
case "decimal":
addNumber_Decimal(resourceType, params, searchParam, value);
break;
default:
addUnexpectedDatatypeWarning(params, searchParam, value);
break;
}
};
}
@Override
public SearchParamSet<ResourceIndexedSearchParamQuantity> extractSearchParamQuantity(IBaseResource theResource) {
IExtractor<ResourceIndexedSearchParamQuantity> extractor = createQuantityExtractor(theResource);
return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.QUANTITY, false);
}
@Override
public SearchParamSet<ResourceIndexedSearchParamQuantityNormalized> extractSearchParamQuantityNormalized(IBaseResource theResource) {
IExtractor<ResourceIndexedSearchParamQuantityNormalized> extractor = createQuantityNormalizedExtractor(theResource);
return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.QUANTITY, false);
}
private IExtractor<ResourceIndexedSearchParamQuantity> createQuantityExtractor(IBaseResource theResource) {
return (params, searchParam, value, path, theWantLocalReferences) -> {
if (value.getClass().equals(myLocationPositionDefinition.getImplementingClass())) {
return;
}
String nextType = toRootTypeName(value);
String resourceType = toRootTypeName(theResource);
switch (nextType) {
case "Quantity":
addQuantity_Quantity(resourceType, params, searchParam, value);
break;
case "Money":
addQuantity_Money(resourceType, params, searchParam, value);
break;
case "Range":
addQuantity_Range(resourceType, params, searchParam, value);
break;
default:
addUnexpectedDatatypeWarning(params, searchParam, value);
break;
}
};
}
private IExtractor<ResourceIndexedSearchParamQuantityNormalized> createQuantityNormalizedExtractor(IBaseResource theResource) {
return (params, searchParam, value, path, theWantLocalReferences) -> {
if (value.getClass().equals(myLocationPositionDefinition.getImplementingClass())) {
return;
}
String nextType = toRootTypeName(value);
String resourceType = toRootTypeName(theResource);
switch (nextType) {
case "Quantity":
addQuantity_QuantityNormalized(resourceType, params, searchParam, value);
break;
case "Money":
addQuantity_MoneyNormalized(resourceType, params, searchParam, value);
break;
case "Range":
addQuantity_RangeNormalized(resourceType, params, searchParam, value);
break;
default:
addUnexpectedDatatypeWarning(params, searchParam, value);
break;
}
};
}
@Override
public SearchParamSet<ResourceIndexedSearchParamString> extractSearchParamStrings(IBaseResource theResource) {
IExtractor<ResourceIndexedSearchParamString> extractor = createStringExtractor(theResource);
return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.STRING, false);
}
private IExtractor<ResourceIndexedSearchParamString> createStringExtractor(IBaseResource theResource) {
return (params, searchParam, value, path, theWantLocalReferences) -> {
String resourceType = toRootTypeName(theResource);
if (value instanceof IPrimitiveType) {
IPrimitiveType<?> nextValue = (IPrimitiveType<?>) value;
String valueAsString = nextValue.getValueAsString();
createStringIndexIfNotBlank(resourceType, params, searchParam, valueAsString);
return;
}
String nextType = toRootTypeName(value);
switch (nextType) {
case "HumanName":
addString_HumanName(resourceType, params, searchParam, value);
break;
case "Address":
addString_Address(resourceType, params, searchParam, value);
break;
case "ContactPoint":
addString_ContactPoint(resourceType, params, searchParam, value);
break;
case "Quantity":
addString_Quantity(resourceType, params, searchParam, value);
break;
case "Range":
addString_Range(resourceType, params, searchParam, value);
break;
default:
addUnexpectedDatatypeWarning(params, searchParam, value);
break;
}
};
}
/**
* Override parent because we're using FHIRPath here
*/
@Override
public List<IBase> extractValues(String thePaths, IBaseResource theResource) {
List<IBase> values = new ArrayList<>();
if (isNotBlank(thePaths)) {
String[] nextPathsSplit = split(thePaths);
for (String nextPath : nextPathsSplit) {
List<? extends IBase> allValues;
// This path is hard to parse and isn't likely to produce anything useful anyway
if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU2)) {
if (nextPath.equals("Bundle.entry.resource(0)")) {
continue;
}
}
nextPath = trim(nextPath);
IValueExtractor allValuesFunc = getPathValueExtractor(theResource, nextPath);
try {
allValues = allValuesFunc.get();
} catch (Exception e) {
String msg = getContext().getLocalizer().getMessage(BaseSearchParamExtractor.class, "failedToExtractPaths", nextPath, e.toString());
throw new InternalErrorException(msg, e);
}
values.addAll(allValues);
}
for (int i = 0; i < values.size(); i++) {
IBase nextObject = values.get(i);
if (nextObject instanceof IBaseExtension) {
IBaseExtension nextExtension = (IBaseExtension) nextObject;
nextObject = nextExtension.getValue();
values.set(i, nextObject);
}
}
}
return values;
}
protected FhirContext getContext() {
return myContext;
}
@VisibleForTesting
public void setContext(FhirContext theContext) {
myContext = theContext;
}
protected ModelConfig getModelConfig() {
return myModelConfig;
}
@VisibleForTesting
public void setModelConfig(ModelConfig theModelConfig) {
myModelConfig = theModelConfig;
}
@VisibleForTesting
public void setSearchParamRegistry(ISearchParamRegistry theSearchParamRegistry) {
mySearchParamRegistry = theSearchParamRegistry;
}
private Collection<RuntimeSearchParam> getSearchParams(IBaseResource theResource) {
RuntimeResourceDefinition def = getContext().getResourceDefinition(theResource);
Collection<RuntimeSearchParam> retVal = mySearchParamRegistry.getActiveSearchParams(def.getName()).values();
List<RuntimeSearchParam> defaultList = Collections.emptyList();
retVal = ObjectUtils.defaultIfNull(retVal, defaultList);
return retVal;
}
private void addQuantity_Quantity(String theResourceType, Set<ResourceIndexedSearchParamQuantity> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
Optional<IPrimitiveType<BigDecimal>> valueField = myQuantityValueValueChild.getAccessor().getFirstValueOrNull(theValue);
if (valueField.isPresent() && valueField.get().getValue() != null) {
BigDecimal nextValueValue = valueField.get().getValue();
String system = extractValueAsString(myQuantitySystemValueChild, theValue);
String code = extractValueAsString(myQuantityCodeValueChild, theValue);
ResourceIndexedSearchParamQuantity nextEntity = new ResourceIndexedSearchParamQuantity(myPartitionSettings, theResourceType, theSearchParam.getName(), nextValueValue, system, code);
theParams.add(nextEntity);
}
}
private void addQuantity_QuantityNormalized(String theResourceType, Set<ResourceIndexedSearchParamQuantityNormalized> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
Optional<IPrimitiveType<BigDecimal>> valueField = myQuantityValueValueChild.getAccessor().getFirstValueOrNull(theValue);
if (valueField.isPresent() && valueField.get().getValue() != null) {
BigDecimal nextValueValue = valueField.get().getValue();
String system = extractValueAsString(myQuantitySystemValueChild, theValue);
String code = extractValueAsString(myQuantityCodeValueChild, theValue);
//-- convert the value/unit to the canonical form if any
Pair canonicalForm = UcumServiceUtil.getCanonicalForm(system, nextValueValue, code);
if (canonicalForm != null) {
double canonicalValue = Double.parseDouble(canonicalForm.getValue().asDecimal());
String canonicalUnits = canonicalForm.getCode();
ResourceIndexedSearchParamQuantityNormalized nextEntity = new ResourceIndexedSearchParamQuantityNormalized(myPartitionSettings, theResourceType, theSearchParam.getName(), canonicalValue, system, canonicalUnits);
theParams.add(nextEntity);
}
}
}
private void addQuantity_Money(String theResourceType, Set<ResourceIndexedSearchParamQuantity> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
Optional<IPrimitiveType<BigDecimal>> valueField = myMoneyValueChild.getAccessor().getFirstValueOrNull(theValue);
if (valueField.isPresent() && valueField.get().getValue() != null) {
BigDecimal nextValueValue = valueField.get().getValue();
String nextValueString = "urn:iso:std:iso:4217";
String nextValueCode = extractValueAsString(myMoneyCurrencyChild, theValue);
String searchParamName = theSearchParam.getName();
ResourceIndexedSearchParamQuantity nextEntity = new ResourceIndexedSearchParamQuantity(myPartitionSettings, theResourceType, searchParamName, nextValueValue, nextValueString, nextValueCode);
theParams.add(nextEntity);
}
}
private void addQuantity_MoneyNormalized(String theResourceType, Set<ResourceIndexedSearchParamQuantityNormalized> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
Optional<IPrimitiveType<BigDecimal>> valueField = myMoneyValueChild.getAccessor().getFirstValueOrNull(theValue);
if (valueField.isPresent() && valueField.get().getValue() != null) {
BigDecimal nextValueValue = valueField.get().getValue();
String nextValueString = "urn:iso:std:iso:4217";
String nextValueCode = extractValueAsString(myMoneyCurrencyChild, theValue);
String searchParamName = theSearchParam.getName();
ResourceIndexedSearchParamQuantityNormalized nextEntityNormalized = new ResourceIndexedSearchParamQuantityNormalized(myPartitionSettings, theResourceType, searchParamName, nextValueValue.doubleValue(), nextValueString, nextValueCode);
theParams.add(nextEntityNormalized);
}
}
private void addQuantity_Range(String theResourceType, Set<ResourceIndexedSearchParamQuantity> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
Optional<IBase> low = myRangeLowValueChild.getAccessor().getFirstValueOrNull(theValue);
low.ifPresent(theIBase -> addQuantity_Quantity(theResourceType, theParams, theSearchParam, theIBase));
Optional<IBase> high = myRangeHighValueChild.getAccessor().getFirstValueOrNull(theValue);
high.ifPresent(theIBase -> addQuantity_Quantity(theResourceType, theParams, theSearchParam, theIBase));
}
private void addQuantity_RangeNormalized(String theResourceType, Set<ResourceIndexedSearchParamQuantityNormalized> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
Optional<IBase> low = myRangeLowValueChild.getAccessor().getFirstValueOrNull(theValue);
low.ifPresent(theIBase -> addQuantity_QuantityNormalized(theResourceType, theParams, theSearchParam, theIBase));
Optional<IBase> high = myRangeHighValueChild.getAccessor().getFirstValueOrNull(theValue);
high.ifPresent(theIBase -> addQuantity_QuantityNormalized(theResourceType, theParams, theSearchParam, theIBase));
}
private void addToken_Identifier(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
String system = extractValueAsString(myIdentifierSystemValueChild, theValue);
String value = extractValueAsString(myIdentifierValueValueChild, theValue);
if (isNotBlank(value)) {
createTokenIndexIfNotBlank(theResourceType, theParams, theSearchParam, system, value);
}
if (shouldIndexTextComponentOfToken(theSearchParam)) {
Optional<IBase> type = myIdentifierTypeValueChild.getAccessor().getFirstValueOrNull(theValue);
if (type.isPresent()) {
String text = extractValueAsString(myIdentifierTypeTextValueChild, type.get());
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
}
}
}
protected boolean shouldIndexTextComponentOfToken(RuntimeSearchParam theSearchParam) {
return tokenTextIndexingEnabledForSearchParam(myModelConfig, theSearchParam);
}
private void addToken_CodeableConcept(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<IBase> codings = getCodingsFromCodeableConcept(theValue);
for (IBase nextCoding : codings) {
addToken_Coding(theResourceType, theParams, theSearchParam, nextCoding);
}
if (shouldIndexTextComponentOfToken(theSearchParam)) {
String text = getDisplayTextFromCodeableConcept(theValue);
if (isNotBlank(text)) {
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
}
}
}
@Override
public List<IBase> getCodingsFromCodeableConcept(IBase theValue) {
String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue);
if ("CodeableConcept".equals(nextType)) {
return myCodeableConceptCodingValueChild.getAccessor().getValues(theValue);
} else {
return null;
}
}
@Override
public String getDisplayTextFromCodeableConcept(IBase theValue) {
String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue);
if ("CodeableConcept".equals(nextType)) {
return extractValueAsString(myCodeableConceptTextValueChild, theValue);
} else {
return null;
}
}
private void addToken_Coding(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
ResourceIndexedSearchParamToken resourceIndexedSearchParamToken = createSearchParamForCoding(theResourceType, theSearchParam, theValue);
if (resourceIndexedSearchParamToken != null) {
theParams.add(resourceIndexedSearchParamToken);
}
if (shouldIndexTextComponentOfToken(theSearchParam)) {
String text = getDisplayTextForCoding(theValue);
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
}
}
@Override
public ResourceIndexedSearchParamToken createSearchParamForCoding(String theResourceType, RuntimeSearchParam theSearchParam, IBase theValue) {
String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue);
if ("Coding".equals(nextType)) {
String system = extractValueAsString(myCodingSystemValueChild, theValue);
String code = extractValueAsString(myCodingCodeValueChild, theValue);
return createTokenIndexIfNotBlank(theResourceType, theSearchParam, system, code);
} else {
return null;
}
}
@Override
public String getDisplayTextForCoding(IBase theValue) {
String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue);
if ("Coding".equals(nextType)) {
return extractValueAsString(myCodingDisplayValueChild, theValue);
} else {
return null;
}
}
private void addToken_ContactPoint(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
String system = extractValueAsString(myContactPointSystemValueChild, theValue);
String value = extractValueAsString(myContactPointValueValueChild, theValue);
createTokenIndexIfNotBlank(theResourceType, theParams, theSearchParam, system, value);
}
private void addToken_PatientCommunication(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<IBase> values = myPatientCommunicationLanguageValueChild.getAccessor().getValues(theValue);
for (IBase next : values) {
addToken_CodeableConcept(theResourceType, theParams, theSearchParam, next);
}
}
private void addToken_CapabilityStatementRestSecurity(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<IBase> values = myCapabilityStatementRestSecurityServiceValueChild.getAccessor().getValues(theValue);
for (IBase nextValue : values) {
addToken_CodeableConcept(theResourceType, theParams, theSearchParam, nextValue);
}
}
private void addDate_Period(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
Date start = extractValueAsDate(myPeriodStartValueChild, theValue);
String startAsString = extractValueAsString(myPeriodStartValueChild, theValue);
Date end = extractValueAsDate(myPeriodEndValueChild, theValue);
String endAsString = extractValueAsString(myPeriodEndValueChild, theValue);
if (start != null || end != null) {
if (start == null) {
start = myModelConfig.getPeriodIndexStartOfTime().getValue();
startAsString = myModelConfig.getPeriodIndexStartOfTime().getValueAsString();
}
if (end == null) {
end = myModelConfig.getPeriodIndexEndOfTime().getValue();
endAsString = myModelConfig.getPeriodIndexEndOfTime().getValueAsString();
}
ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), start, startAsString, end, endAsString, startAsString);
theParams.add(nextEntity);
}
}
private void addDate_Timing(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<IPrimitiveType<Date>> values = extractValuesAsFhirDates(myTimingEventValueChild, theValue);
TreeSet<Date> dates = new TreeSet<>();
TreeSet<String> dateStrings = new TreeSet<>();
String firstValue = null;
String finalValue = null;
for (IPrimitiveType<Date> nextEvent : values) {
if (nextEvent.getValue() != null) {
dates.add(nextEvent.getValue());
if (firstValue == null) {
firstValue = nextEvent.getValueAsString();
}
finalValue = nextEvent.getValueAsString();
}
}
Optional<IBase> repeat = myTimingRepeatValueChild.getAccessor().getFirstValueOrNull(theValue);
if (repeat.isPresent()) {
Optional<IBase> bounds = myTimingRepeatBoundsValueChild.getAccessor().getFirstValueOrNull(repeat.get());
if (bounds.isPresent()) {
String boundsType = toRootTypeName(bounds.get());
if ("Period".equals(boundsType)) {
Date start = extractValueAsDate(myPeriodStartValueChild, bounds.get());
Date end = extractValueAsDate(myPeriodEndValueChild, bounds.get());
String endString = extractValueAsString(myPeriodEndValueChild, bounds.get());
dates.add(start);
dates.add(end);
//TODO Check if this logic is valid. Does the start of the first period indicate a lower bound??
if (firstValue == null) {
firstValue = extractValueAsString(myPeriodStartValueChild, bounds.get());
}
finalValue = endString;
}
}
}
if (!dates.isEmpty()) {
ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), dates.first(), firstValue, dates.last(), finalValue, firstValue);
theParams.add(nextEntity);
}
}
private void addNumber_Duration(String theResourceType, Set<ResourceIndexedSearchParamNumber> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
String system = extractValueAsString(myDurationSystemValueChild, theValue);
String code = extractValueAsString(myDurationCodeValueChild, theValue);
BigDecimal value = extractValueAsBigDecimal(myDurationValueValueChild, theValue);
if (value != null) {
value = normalizeQuantityContainingTimeUnitsIntoDaysForNumberParam(system, code, value);
ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber(myPartitionSettings, theResourceType, theSearchParam.getName(), value);
theParams.add(nextEntity);
}
}
private void addNumber_Quantity(String theResourceType, Set<ResourceIndexedSearchParamNumber> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
BigDecimal value = extractValueAsBigDecimal(myQuantityValueValueChild, theValue);
if (value != null) {
String system = extractValueAsString(myQuantitySystemValueChild, theValue);
String code = extractValueAsString(myQuantityCodeValueChild, theValue);
value = normalizeQuantityContainingTimeUnitsIntoDaysForNumberParam(system, code, value);
ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber(myPartitionSettings, theResourceType, theSearchParam.getName(), value);
theParams.add(nextEntity);
}
}
@SuppressWarnings("unchecked")
private void addNumber_Integer(String theResourceType, Set<ResourceIndexedSearchParamNumber> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
IPrimitiveType<Integer> value = (IPrimitiveType<Integer>) theValue;
if (value.getValue() != null) {
BigDecimal valueDecimal = new BigDecimal(value.getValue());
ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber(myPartitionSettings, theResourceType, theSearchParam.getName(), valueDecimal);
theParams.add(nextEntity);
}
}
@SuppressWarnings("unchecked")
private void addNumber_Decimal(String theResourceType, Set<ResourceIndexedSearchParamNumber> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
IPrimitiveType<BigDecimal> value = (IPrimitiveType<BigDecimal>) theValue;
if (value.getValue() != null) {
BigDecimal valueDecimal = value.getValue();
ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber(myPartitionSettings, theResourceType, theSearchParam.getName(), valueDecimal);
theParams.add(nextEntity);
}
}
private void addCoords_Position(String theResourceType, SearchParamSet<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
BigDecimal latitude = null;
BigDecimal longitude = null;
if (theValue instanceof org.hl7.fhir.dstu3.model.Location.LocationPositionComponent) {
org.hl7.fhir.dstu3.model.Location.LocationPositionComponent value = (org.hl7.fhir.dstu3.model.Location.LocationPositionComponent) theValue;
latitude = value.getLatitude();
longitude = value.getLongitude();
} else if (theValue instanceof org.hl7.fhir.r4.model.Location.LocationPositionComponent) {
org.hl7.fhir.r4.model.Location.LocationPositionComponent value = (org.hl7.fhir.r4.model.Location.LocationPositionComponent) theValue;
latitude = value.getLatitude();
longitude = value.getLongitude();
} else if (theValue instanceof org.hl7.fhir.r5.model.Location.LocationPositionComponent) {
org.hl7.fhir.r5.model.Location.LocationPositionComponent value = (org.hl7.fhir.r5.model.Location.LocationPositionComponent) theValue;
latitude = value.getLatitude();
longitude = value.getLongitude();
}
// We only accept coordinates when both are present
if (latitude != null && longitude != null) {
double normalizedLatitude = GeopointNormalizer.normalizeLatitude(latitude.doubleValue());
double normalizedLongitude = GeopointNormalizer.normalizeLongitude(longitude.doubleValue());
ResourceIndexedSearchParamCoords nextEntity = new ResourceIndexedSearchParamCoords(myPartitionSettings, theResourceType, theSearchParam.getName(), normalizedLatitude, normalizedLongitude);
theParams.add(nextEntity);
}
}
private void addString_HumanName(String theResourceType, Set<ResourceIndexedSearchParamString> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<BaseRuntimeChildDefinition> myHumanNameChildren = Arrays.asList(myHumanNameFamilyValueChild, myHumanNameGivenValueChild, myHumanNameTextValueChild, myHumanNamePrefixValueChild, myHumanNameSuffixValueChild);
for (BaseRuntimeChildDefinition theChild : myHumanNameChildren) {
List<String> indices = extractValuesAsStrings(theChild, theValue);
for (String next : indices) {
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, next);
}
}
}
private void addString_Quantity(String theResourceType, Set<ResourceIndexedSearchParamString> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
BigDecimal value = extractValueAsBigDecimal(myQuantityValueValueChild, theValue);
if (value != null) {
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, value.toPlainString());
}
}
private void addString_Range(String theResourceType, Set<ResourceIndexedSearchParamString> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
BigDecimal value = extractValueAsBigDecimal(myRangeLowValueChild, theValue);
if (value != null) {
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, value.toPlainString());
}
}
private void addString_ContactPoint(String theResourceType, Set<ResourceIndexedSearchParamString> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
String value = extractValueAsString(myContactPointValueValueChild, theValue);
if (isNotBlank(value)) {
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, value);
}
}
private void addString_Address(String theResourceType, Set<ResourceIndexedSearchParamString> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<String> allNames = new ArrayList<>(extractValuesAsStrings(myAddressLineValueChild, theValue));
String city = extractValueAsString(myAddressCityValueChild, theValue);
if (isNotBlank(city)) {
allNames.add(city);
}
String state = extractValueAsString(myAddressStateValueChild, theValue);
if (isNotBlank(state)) {
allNames.add(state);
}
String country = extractValueAsString(myAddressCountryValueChild, theValue);
if (isNotBlank(country)) {
allNames.add(country);
}
String postalCode = extractValueAsString(myAddressPostalCodeValueChild, theValue);
if (isNotBlank(postalCode)) {
allNames.add(postalCode);
}
for (String nextName : allNames) {
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, nextName);
}
}
private <T> SearchParamSet<T> extractSearchParams(IBaseResource theResource, IExtractor<T> theExtractor, RestSearchParameterTypeEnum theSearchParamType, boolean theWantLocalReferences) {
SearchParamSet<T> retVal = new SearchParamSet<>();
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
cleanUpContainedResourceReferences(theResource, theSearchParamType, searchParams);
for (RuntimeSearchParam nextSpDef : searchParams) {
if (nextSpDef.getParamType() != theSearchParamType) {
continue;
}
extractSearchParam(nextSpDef, theResource, theExtractor, retVal, theWantLocalReferences);
}
return retVal;
}
/**
* Helper function to determine if a set of SPs for a resource uses a resolve as part of its fhir path.
*/
private boolean anySearchParameterUsesResolve(Collection<RuntimeSearchParam> searchParams, RestSearchParameterTypeEnum theSearchParamType) {
return searchParams.stream()
.filter(param -> param.getParamType() != theSearchParamType)
.map(RuntimeSearchParam::getPath)
.filter(Objects::nonNull)
.anyMatch(path -> path.contains("resolve"));
}
/**
* HAPI FHIR Reference objects (e.g. {@link org.hl7.fhir.r4.model.Reference}) can hold references either by text
* (e.g. "#3") or by resource (e.g. "new Reference(patientInstance)"). The FHIRPath evaluator only understands the
* first way, so if there is any chance of the FHIRPath evaluator needing to descend across references, we
* have to assign values to those references before indexing.
* <p>
* Doing this cleanup isn't hugely expensive, but it's not completely free either so we only do it
* if we think there's actually a chance
*/
private void cleanUpContainedResourceReferences(IBaseResource theResource, RestSearchParameterTypeEnum theSearchParamType, Collection<RuntimeSearchParam> searchParams) {
boolean havePathWithResolveExpression =
myModelConfig.isIndexOnContainedResources()
|| anySearchParameterUsesResolve(searchParams, theSearchParamType);
if (havePathWithResolveExpression && myContext.getParserOptions().isAutoContainReferenceTargetsWithNoId()) {
//TODO GGG/JA: At this point, if the Task.basedOn.reference.resource does _not_ have an ID, we will attempt to contain it internally. Wild
myContext.newTerser().containResources(theResource, FhirTerser.OptionsEnum.MODIFY_RESOURCE, FhirTerser.OptionsEnum.STORE_AND_REUSE_RESULTS);
}
}
private <T> void extractSearchParam(RuntimeSearchParam theSearchParameterDef, IBaseResource theResource, IExtractor<T> theExtractor, SearchParamSet<T> theSetToPopulate, boolean theWantLocalReferences) {
String nextPathUnsplit = theSearchParameterDef.getPath();
if (isBlank(nextPathUnsplit)) {
return;
}
String[] splitPaths = split(nextPathUnsplit);
for (String nextPath : splitPaths) {
nextPath = trim(nextPath);
for (IBase nextObject : extractValues(nextPath, theResource)) {
if (nextObject != null) {
String typeName = toRootTypeName(nextObject);
if (!myIgnoredForSearchDatatypes.contains(typeName)) {
theExtractor.extract(theSetToPopulate, theSearchParameterDef, nextObject, nextPath, theWantLocalReferences);
}
}
}
}
}
@Override
public String toRootTypeName(IBase nextObject) {
BaseRuntimeElementDefinition<?> elementDefinition = getContext().getElementDefinition(nextObject.getClass());
BaseRuntimeElementDefinition<?> rootParentDefinition = elementDefinition.getRootParentDefinition();
return rootParentDefinition.getName();
}
@Override
public String toTypeName(IBase nextObject) {
BaseRuntimeElementDefinition<?> elementDefinition = getContext().getElementDefinition(nextObject.getClass());
return elementDefinition.getName();
}
private void addUri_Uri(String theResourceType, Set<ResourceIndexedSearchParamUri> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
IPrimitiveType<?> value = (IPrimitiveType<?>) theValue;
String valueAsString = value.getValueAsString();
if (isNotBlank(valueAsString)) {
ResourceIndexedSearchParamUri nextEntity = new ResourceIndexedSearchParamUri(myPartitionSettings, theResourceType, theSearchParam.getName(), valueAsString);
theParams.add(nextEntity);
}
}
@SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"})
private void createStringIndexIfNotBlank(String theResourceType, Set<? extends BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, String theValue) {
String value = theValue;
if (isNotBlank(value)) {
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
String searchParamName = theSearchParam.getName();
String valueNormalized = StringUtil.normalizeStringForSearchIndexing(value);
String valueEncoded = theSearchParam.encode(valueNormalized);
if (valueEncoded.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
valueEncoded = valueEncoded.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
}
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(myPartitionSettings, getModelConfig(), theResourceType, searchParamName, valueEncoded, value);
Set params = theParams;
params.add(nextEntity);
}
}
private void createTokenIndexIfNotBlank(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, String theSystem, String theValue) {
ResourceIndexedSearchParamToken nextEntity = createTokenIndexIfNotBlank(theResourceType, theSearchParam, theSystem, theValue);
if (nextEntity != null) {
theParams.add(nextEntity);
}
}
@VisibleForTesting
public void setPartitionSettings(PartitionSettings thePartitionSettings) {
myPartitionSettings = thePartitionSettings;
}
private ResourceIndexedSearchParamToken createTokenIndexIfNotBlank(String theResourceType, RuntimeSearchParam theSearchParam, String theSystem, String theValue) {
String system = theSystem;
String value = theValue;
ResourceIndexedSearchParamToken nextEntity = null;
if (isNotBlank(system) || isNotBlank(value)) {
if (system != null && system.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
system = system.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
}
if (value != null && value.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
value = value.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
}
nextEntity = new ResourceIndexedSearchParamToken(myPartitionSettings, theResourceType, theSearchParam.getName(), system, value);
}
return nextEntity;
}
@Override
public String[] split(String thePaths) {
if (getContext().getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) {
if (!thePaths.contains("|")) {
return new String[]{thePaths};
}
return splitPathsR4(thePaths);
} else {
if (!thePaths.contains("|") && !thePaths.contains(" or ")) {
return new String[]{thePaths};
}
return SPLIT.split(thePaths);
}
}
private BigDecimal normalizeQuantityContainingTimeUnitsIntoDaysForNumberParam(String theSystem, String theCode, BigDecimal theValue) {
if (SearchParamConstants.UCUM_NS.equals(theSystem)) {
if (isNotBlank(theCode)) {
Unit<? extends Quantity> unit = Unit.valueOf(theCode);
javax.measure.converter.UnitConverter dayConverter = unit.getConverterTo(NonSI.DAY);
double dayValue = dayConverter.convert(theValue.doubleValue());
theValue = new BigDecimal(dayValue);
}
}
return theValue;
}
@PostConstruct
public void start() {
myIgnoredForSearchDatatypes = new HashSet<>();
addIgnoredType(getContext(), "Annotation", myIgnoredForSearchDatatypes);
addIgnoredType(getContext(), "Attachment", myIgnoredForSearchDatatypes);
addIgnoredType(getContext(), "Count", myIgnoredForSearchDatatypes);
addIgnoredType(getContext(), "Distance", myIgnoredForSearchDatatypes);
addIgnoredType(getContext(), "Ratio", myIgnoredForSearchDatatypes);
addIgnoredType(getContext(), "SampledData", myIgnoredForSearchDatatypes);
addIgnoredType(getContext(), "Signature", myIgnoredForSearchDatatypes);
/*
* This is building up an internal map of all the various field accessors we'll need in order to work
* with the model. This is kind of ugly, but we want to be as efficient as possible since
* search param extraction happens a whole heck of a lot at runtime..
*/
BaseRuntimeElementCompositeDefinition<?> quantityDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("Quantity");
myQuantityValueValueChild = quantityDefinition.getChildByName("value");
myQuantitySystemValueChild = quantityDefinition.getChildByName("system");
myQuantityCodeValueChild = quantityDefinition.getChildByName("code");
BaseRuntimeElementCompositeDefinition<?> moneyDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("Money");
myMoneyValueChild = moneyDefinition.getChildByName("value");
myMoneyCurrencyChild = moneyDefinition.getChildByName("currency");
BaseRuntimeElementCompositeDefinition<?> locationDefinition = getContext().getResourceDefinition("Location");
BaseRuntimeChildDefinition locationPositionValueChild = locationDefinition.getChildByName("position");
myLocationPositionDefinition = (BaseRuntimeElementCompositeDefinition<?>) locationPositionValueChild.getChildByName("position");
BaseRuntimeElementCompositeDefinition<?> codeSystemDefinition;
if (getContext().getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) {
codeSystemDefinition = getContext().getResourceDefinition("CodeSystem");
assert codeSystemDefinition != null;
myCodeSystemUrlValueChild = codeSystemDefinition.getChildByName("url");
}
BaseRuntimeElementCompositeDefinition<?> rangeDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("Range");
myRangeLowValueChild = rangeDefinition.getChildByName("low");
myRangeHighValueChild = rangeDefinition.getChildByName("high");
BaseRuntimeElementCompositeDefinition<?> addressDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("Address");
myAddressLineValueChild = addressDefinition.getChildByName("line");
myAddressCityValueChild = addressDefinition.getChildByName("city");
myAddressStateValueChild = addressDefinition.getChildByName("state");
myAddressCountryValueChild = addressDefinition.getChildByName("country");
myAddressPostalCodeValueChild = addressDefinition.getChildByName("postalCode");
if (getContext().getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) {
BaseRuntimeElementCompositeDefinition<?> capabilityStatementDefinition = getContext().getResourceDefinition("CapabilityStatement");
BaseRuntimeChildDefinition capabilityStatementRestChild = capabilityStatementDefinition.getChildByName("rest");
BaseRuntimeElementCompositeDefinition<?> capabilityStatementRestDefinition = (BaseRuntimeElementCompositeDefinition<?>) capabilityStatementRestChild.getChildByName("rest");
BaseRuntimeChildDefinition capabilityStatementRestSecurityValueChild = capabilityStatementRestDefinition.getChildByName("security");
BaseRuntimeElementCompositeDefinition<?> capabilityStatementRestSecurityDefinition = (BaseRuntimeElementCompositeDefinition<?>) capabilityStatementRestSecurityValueChild.getChildByName("security");
myCapabilityStatementRestSecurityServiceValueChild = capabilityStatementRestSecurityDefinition.getChildByName("service");
}
BaseRuntimeElementCompositeDefinition<?> periodDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("Period");
myPeriodStartValueChild = periodDefinition.getChildByName("start");
myPeriodEndValueChild = periodDefinition.getChildByName("end");
BaseRuntimeElementCompositeDefinition<?> timingDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("Timing");
myTimingEventValueChild = timingDefinition.getChildByName("event");
myTimingRepeatValueChild = timingDefinition.getChildByName("repeat");
BaseRuntimeElementCompositeDefinition<?> timingRepeatDefinition = (BaseRuntimeElementCompositeDefinition<?>) myTimingRepeatValueChild.getChildByName("repeat");
myTimingRepeatBoundsValueChild = timingRepeatDefinition.getChildByName("bounds[x]");
BaseRuntimeElementCompositeDefinition<?> durationDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("Duration");
myDurationSystemValueChild = durationDefinition.getChildByName("system");
myDurationCodeValueChild = durationDefinition.getChildByName("code");
myDurationValueValueChild = durationDefinition.getChildByName("value");
BaseRuntimeElementCompositeDefinition<?> humanNameDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("HumanName");
myHumanNameFamilyValueChild = humanNameDefinition.getChildByName("family");
myHumanNameGivenValueChild = humanNameDefinition.getChildByName("given");
myHumanNameTextValueChild = humanNameDefinition.getChildByName("text");
myHumanNamePrefixValueChild = humanNameDefinition.getChildByName("prefix");
myHumanNameSuffixValueChild = humanNameDefinition.getChildByName("suffix");
BaseRuntimeElementCompositeDefinition<?> contactPointDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("ContactPoint");
myContactPointValueValueChild = contactPointDefinition.getChildByName("value");
myContactPointSystemValueChild = contactPointDefinition.getChildByName("system");
BaseRuntimeElementCompositeDefinition<?> identifierDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("Identifier");
myIdentifierSystemValueChild = identifierDefinition.getChildByName("system");
myIdentifierValueValueChild = identifierDefinition.getChildByName("value");
myIdentifierTypeValueChild = identifierDefinition.getChildByName("type");
BaseRuntimeElementCompositeDefinition<?> identifierTypeDefinition = (BaseRuntimeElementCompositeDefinition<?>) myIdentifierTypeValueChild.getChildByName("type");
myIdentifierTypeTextValueChild = identifierTypeDefinition.getChildByName("text");
BaseRuntimeElementCompositeDefinition<?> codeableConceptDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("CodeableConcept");
myCodeableConceptCodingValueChild = codeableConceptDefinition.getChildByName("coding");
myCodeableConceptTextValueChild = codeableConceptDefinition.getChildByName("text");
BaseRuntimeElementCompositeDefinition<?> codingDefinition = (BaseRuntimeElementCompositeDefinition<?>) getContext().getElementDefinition("Coding");
myCodingSystemValueChild = codingDefinition.getChildByName("system");
myCodingCodeValueChild = codingDefinition.getChildByName("code");
myCodingDisplayValueChild = codingDefinition.getChildByName("display");
BaseRuntimeElementCompositeDefinition<?> patientDefinition = getContext().getResourceDefinition("Patient");
BaseRuntimeChildDefinition patientCommunicationValueChild = patientDefinition.getChildByName("communication");
BaseRuntimeElementCompositeDefinition<?> patientCommunicationDefinition = (BaseRuntimeElementCompositeDefinition<?>) patientCommunicationValueChild.getChildByName("communication");
myPatientCommunicationLanguageValueChild = patientCommunicationDefinition.getChildByName("language");
}
@FunctionalInterface
public interface IValueExtractor {
List<? extends IBase> get() throws FHIRException;
}
@FunctionalInterface
private interface IExtractor<T> {
void extract(SearchParamSet<T> theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath, boolean theWantLocalReferences);
}
private class ResourceLinkExtractor implements IExtractor<PathAndRef> {
private PathAndRef myPathAndRef = null;
@Override
public void extract(SearchParamSet<PathAndRef> theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath, boolean theWantLocalReferences) {
if (theValue instanceof IBaseResource) {
return;
}
String nextType = toRootTypeName(theValue);
switch (nextType) {
case "uri":
case "canonical":
String typeName = toTypeName(theValue);
IPrimitiveType<?> valuePrimitive = (IPrimitiveType<?>) theValue;
IBaseReference fakeReference = (IBaseReference) myContext.getElementDefinition("Reference").newInstance();
fakeReference.setReference(valuePrimitive.getValueAsString());
// Canonical has a root type of "uri"
if ("canonical".equals(typeName)) {
/*
* See #1583
* Technically canonical fields should not allow local references (e.g.
* Questionnaire/123) but it seems reasonable for us to interpret a canonical
* containing a local reference for what it is, and allow people to search
* based on that.
*/
IIdType parsed = fakeReference.getReferenceElement();
if (parsed.hasIdPart() && parsed.hasResourceType() && !parsed.isAbsolute()) {
myPathAndRef = new PathAndRef(theSearchParam.getName(), thePath, fakeReference, false);
theParams.add(myPathAndRef);
break;
}
if (parsed.isAbsolute()) {
myPathAndRef = new PathAndRef(theSearchParam.getName(), thePath, fakeReference, true);
theParams.add(myPathAndRef);
break;
}
}
theParams.addWarning("Ignoring canonical reference (indexing canonical is not yet supported)");
break;
case "reference":
case "Reference":
IBaseReference valueRef = (IBaseReference) theValue;
IIdType nextId = valueRef.getReferenceElement();
if (nextId.isEmpty() && valueRef.getResource() != null) {
nextId = valueRef.getResource().getIdElement();
}
if (nextId == null ||
nextId.isEmpty() ||
nextId.getValue().startsWith("urn:")) {
return;
}
if (!theWantLocalReferences) {
if (nextId.getValue().startsWith("#"))
return;
}
myPathAndRef = new PathAndRef(theSearchParam.getName(), thePath, valueRef, false);
theParams.add(myPathAndRef);
break;
default:
addUnexpectedDatatypeWarning(theParams, theSearchParam, theValue);
break;
}
}
private boolean isOrCanBeTreatedAsLocal(IIdType theId) {
boolean acceptableAsLocalReference = !theId.isAbsolute() || myModelConfig.getTreatBaseUrlsAsLocal().contains(theId.getBaseUrl());
return acceptableAsLocalReference;
}
public PathAndRef get(IBase theValue, String thePath) {
extract(new SearchParamSet<>(),
new RuntimeSearchParam(null, null, "Reference", null, null, null, null, null, null, null),
theValue, thePath, false);
return myPathAndRef;
}
}
private class DateExtractor implements IExtractor<ResourceIndexedSearchParamDate> {
String myResourceType;
ResourceIndexedSearchParamDate myIndexedSearchParamDate = null;
public DateExtractor(IBaseResource theResource) {
myResourceType = toRootTypeName(theResource);
}
public DateExtractor(String theResourceType) {
myResourceType = theResourceType;
}
@Override
public void extract(SearchParamSet theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath, boolean theWantLocalReferences) {
String nextType = toRootTypeName(theValue);
switch (nextType) {
case "date":
case "dateTime":
case "instant":
addDateTimeTypes(myResourceType, theParams, theSearchParam, theValue);
break;
case "Period":
addDate_Period(myResourceType, theParams, theSearchParam, theValue);
break;
case "Timing":
addDate_Timing(myResourceType, theParams, theSearchParam, theValue);
break;
case "string":
// CarePlan.activitydate can be a string - ignored for now
break;
default:
addUnexpectedDatatypeWarning(theParams, theSearchParam, theValue);
break;
}
}
private void addDate_Period(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
Date start = extractValueAsDate(myPeriodStartValueChild, theValue);
String startAsString = extractValueAsString(myPeriodStartValueChild, theValue);
Date end = extractValueAsDate(myPeriodEndValueChild, theValue);
String endAsString = extractValueAsString(myPeriodEndValueChild, theValue);
if (start != null || end != null) {
if (start == null) {
start = myModelConfig.getPeriodIndexStartOfTime().getValue();
startAsString = myModelConfig.getPeriodIndexStartOfTime().getValueAsString();
}
if (end == null) {
end = myModelConfig.getPeriodIndexEndOfTime().getValue();
endAsString = myModelConfig.getPeriodIndexEndOfTime().getValueAsString();
}
myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), start, startAsString, end, endAsString, startAsString);
theParams.add(myIndexedSearchParamDate);
}
}
private void addDate_Timing(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<IPrimitiveType<Date>> values = extractValuesAsFhirDates(myTimingEventValueChild, theValue);
TreeSet<Date> dates = new TreeSet<>();
String firstValue = null;
String finalValue = null;
for (IPrimitiveType<Date> nextEvent : values) {
if (nextEvent.getValue() != null) {
dates.add(nextEvent.getValue());
if (firstValue == null) {
firstValue = nextEvent.getValueAsString();
}
finalValue = nextEvent.getValueAsString();
}
}
Optional<IBase> repeat = myTimingRepeatValueChild.getAccessor().getFirstValueOrNull(theValue);
if (repeat.isPresent()) {
Optional<IBase> bounds = myTimingRepeatBoundsValueChild.getAccessor().getFirstValueOrNull(repeat.get());
if (bounds.isPresent()) {
String boundsType = toRootTypeName(bounds.get());
if ("Period".equals(boundsType)) {
Date start = extractValueAsDate(myPeriodStartValueChild, bounds.get());
Date end = extractValueAsDate(myPeriodEndValueChild, bounds.get());
if (start != null) {
dates.add(start);
}
if (end != null) {
dates.add(end);
}
}
}
}
if (!dates.isEmpty()) {
myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), dates.first(), firstValue, dates.last(), finalValue, firstValue);
theParams.add(myIndexedSearchParamDate);
}
}
@SuppressWarnings("unchecked")
private void addDateTimeTypes(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
IPrimitiveType<Date> nextBaseDateTime = (IPrimitiveType<Date>) theValue;
if (nextBaseDateTime.getValue() != null) {
myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValueAsString());
theParams.add(myIndexedSearchParamDate);
}
}
public ResourceIndexedSearchParamDate get(IBase theValue, String thePath, boolean theWantLocalReferences) {
extract(new SearchParamSet<>(),
new RuntimeSearchParam(null, null, "date", null, null, null, null, null, null, null),
theValue, thePath, theWantLocalReferences);
return myIndexedSearchParamDate;
}
}
private class TokenExtractor implements IExtractor<BaseResourceIndexedSearchParam> {
private final String myResourceTypeName;
private final String myUseSystem;
public TokenExtractor(String theResourceTypeName, String theUseSystem) {
myResourceTypeName = theResourceTypeName;
myUseSystem = theUseSystem;
}
@Override
public void extract(SearchParamSet<BaseResourceIndexedSearchParam> params, RuntimeSearchParam searchParam, IBase value, String path, boolean theWantLocalReferences) {
// DSTU3+
if (value instanceof IBaseEnumeration<?>) {
IBaseEnumeration<?> obj = (IBaseEnumeration<?>) value;
String system = extractSystem(obj);
String code = obj.getValueAsString();
BaseSearchParamExtractor.this.createTokenIndexIfNotBlank(myResourceTypeName, params, searchParam, system, code);
return;
}
// DSTU2 only
if (value instanceof BoundCodeDt) {
BoundCodeDt boundCode = (BoundCodeDt) value;
Enum valueAsEnum = boundCode.getValueAsEnum();
String system = null;
if (valueAsEnum != null) {
//noinspection unchecked
system = boundCode.getBinder().toSystemString(valueAsEnum);
}
String code = boundCode.getValueAsString();
BaseSearchParamExtractor.this.createTokenIndexIfNotBlank(myResourceTypeName, params, searchParam, system, code);
return;
}
if (value instanceof IPrimitiveType) {
IPrimitiveType<?> nextValue = (IPrimitiveType<?>) value;
String systemAsString = null;
String valueAsString = nextValue.getValueAsString();
if ("CodeSystem.concept.code".equals(path)) {
systemAsString = myUseSystem;
} else if ("ValueSet.codeSystem.concept.code".equals(path)) {
systemAsString = myUseSystem;
}
if (value instanceof IIdType) {
valueAsString = ((IIdType) value).getIdPart();
}
BaseSearchParamExtractor.this.createTokenIndexIfNotBlank(myResourceTypeName, params, searchParam, systemAsString, valueAsString);
return;
}
switch (path) {
case "Patient.communication":
BaseSearchParamExtractor.this.addToken_PatientCommunication(myResourceTypeName, params, searchParam, value);
return;
case "Consent.source":
// Consent#source-identifier has a path that isn't typed - This is a one-off to deal with that
return;
case "Location.position":
BaseSearchParamExtractor.this.addCoords_Position(myResourceTypeName, params, searchParam, value);
return;
case "StructureDefinition.context":
// TODO: implement this
ourLog.warn("StructureDefinition context indexing not currently supported");
return;
case "CapabilityStatement.rest.security":
BaseSearchParamExtractor.this.addToken_CapabilityStatementRestSecurity(myResourceTypeName, params, searchParam, value);
return;
}
String nextType = BaseSearchParamExtractor.this.toRootTypeName(value);
switch (nextType) {
case "Identifier":
BaseSearchParamExtractor.this.addToken_Identifier(myResourceTypeName, params, searchParam, value);
break;
case "CodeableConcept":
BaseSearchParamExtractor.this.addToken_CodeableConcept(myResourceTypeName, params, searchParam, value);
break;
case "Coding":
BaseSearchParamExtractor.this.addToken_Coding(myResourceTypeName, params, searchParam, value);
break;
case "ContactPoint":
BaseSearchParamExtractor.this.addToken_ContactPoint(myResourceTypeName, params, searchParam, value);
break;
default:
BaseSearchParamExtractor.this.addUnexpectedDatatypeWarning(params, searchParam, value);
break;
}
}
}
private static class CompositeExtractor<T> implements IExtractor<T> {
private final IExtractor<T> myExtractor0;
private final IExtractor<T> myExtractor1;
private CompositeExtractor(IExtractor<T> theExtractor0, IExtractor<T> theExtractor1) {
myExtractor0 = theExtractor0;
myExtractor1 = theExtractor1;
}
@Override
public void extract(SearchParamSet<T> theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath, boolean theWantLocalReferences) {
myExtractor0.extract(theParams, theSearchParam, theValue, thePath, theWantLocalReferences);
myExtractor1.extract(theParams, theSearchParam, theValue, thePath, theWantLocalReferences);
}
}
@Nonnull
public static String[] splitPathsR4(@Nonnull String thePaths) {
StringTokenizer tok = new StringTokenizer(thePaths, " |");
tok.setTrimmerMatcher(new StringTrimmingTrimmerMatcher());
return tok.getTokenArray();
}
public static boolean tokenTextIndexingEnabledForSearchParam(ModelConfig theModelConfig, RuntimeSearchParam theSearchParam) {
Optional<Boolean> noSuppressForSearchParam = theSearchParam.getExtensions(HapiExtensions.EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING).stream()
.map(IBaseExtension::getValue)
.map(val -> (IPrimitiveType<?>) val)
.map(IPrimitiveType::getValueAsString)
.map(Boolean::parseBoolean)
.findFirst();
//if the SP doesn't care, use the system default.
if (!noSuppressForSearchParam.isPresent()) {
return !theModelConfig.isSuppressStringIndexingInTokens();
//If the SP does care, use its value.
} else {
boolean suppressForSearchParam = noSuppressForSearchParam.get();
ourLog.trace("Text indexing for SearchParameter {}: {}", theSearchParam.getName(), suppressForSearchParam);
return !suppressForSearchParam;
}
}
private static void addIgnoredType(FhirContext theCtx, String theType, Set<String> theIgnoredTypes) {
BaseRuntimeElementDefinition<?> elementDefinition = theCtx.getElementDefinition(theType);
if (elementDefinition != null) {
theIgnoredTypes.add(elementDefinition.getName());
}
}
private static String extractValueAsString(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
return theChildDefinition
.getAccessor()
.<IPrimitiveType<?>>getFirstValueOrNull(theElement)
.map(IPrimitiveType::getValueAsString)
.orElse(null);
}
private static Date extractValueAsDate(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
return theChildDefinition
.getAccessor()
.<IPrimitiveType<Date>>getFirstValueOrNull(theElement)
.map(IPrimitiveType::getValue)
.orElse(null);
}
private static BigDecimal extractValueAsBigDecimal(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
return theChildDefinition
.getAccessor()
.<IPrimitiveType<BigDecimal>>getFirstValueOrNull(theElement)
.map(IPrimitiveType::getValue)
.orElse(null);
}
@SuppressWarnings("unchecked")
private static List<IPrimitiveType<Date>> extractValuesAsFhirDates(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) {
return (List) theChildDefinition
.getAccessor()
.getValues(theElement);
}
private static List<String> extractValuesAsStrings(BaseRuntimeChildDefinition theChildDefinition, IBase theValue) {
return theChildDefinition
.getAccessor()
.getValues(theValue)
.stream()
.map(t -> (IPrimitiveType) t)
.map(IPrimitiveType::getValueAsString)
.filter(StringUtils::isNotBlank)
.collect(Collectors.toList());
}
private static <T extends Enum<?>> String extractSystem(IBaseEnumeration<T> theBoundCode) {
if (theBoundCode.getValue() != null) {
return theBoundCode.getEnumFactory().toSystem(theBoundCode.getValue());
}
return null;
}
}
|
|
package tests.visual;
import java.awt.Color;
import java.awt.ComponentOrientation;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.Event;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.SystemColor;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.io.File;
import java.io.FileNotFoundException;
import javax.swing.BorderFactory;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.JTextField;
import javax.swing.JToolBar;
import javax.swing.JTree;
import javax.swing.KeyStroke;
import javax.swing.SwingUtilities;
import javax.swing.ToolTipManager;
import javax.swing.UIManager;
import javax.swing.border.BevelBorder;
import javax.swing.border.TitledBorder;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreeModel;
import javax.xml.bind.JAXBException;
import treeIterators.ITreeIterator;
import treeIterators.TreeIteratorOrden;
import treeUtils.BinaryTree;
import treeUtils.BinaryTreeNode;
import treeUtils.IBinaryTree;
import treeUtils.TreeNode;
import util.MyIO;
import util.UtilFunctions;
import util.XMLManagger;
public class VisualTestString {
private JFrame jFrame = null; // @jve:decl-index=0:visual-constraint="10,10"
private JPanel jContentPane = null;
private JMenuBar jJMenuBar = null;
private JMenu fileMenu = null;
private JMenu editMenu = null;
private JMenuItem exitMenuItem = null;
private JMenuItem cutMenuItem = null;
private JMenuItem copyMenuItem = null;
private JMenuItem pasteMenuItem = null;
private JMenuItem saveMenuItem = null;
private TreeModel treeModel= null;
private IBinaryTree<String> arbol; // @jve:decl-index=0:
private JTree jTree = null;
private DefaultMutableTreeNode root1 = null;
private JScrollPane jScrollPane = null;
private JToolBar jJToolBarBar = null;
private JButton jBtnNext = null;
private JButton jBtnPrev = null;
private JButton jBtnRmv = null;
private JButton jBtnAdd = null;
private JPanel jPnlStatus = null;
private JLabel jLblStatus = null;
private JLabel jLabel = null;
private JLabel jLabel1 = null;
private JLabel jLblPrevio = null;
private JLabel jLblID = null;
private ITreeIterator<String> iterArbol = null;
private boolean isLoadedIterator = false;
private boolean justDelete = true;
private JLabel jLabel2 = null;
private JMenuItem loadMenuItem = null;
private JMenu jLoadMenu = null;
private JMenuItem loadMenuItem2 = null;
private JMenuItem makeMenuItem = null;
private JToolBar jToolBar = null;
private JButton jBtnSet = null;
private JMenuItem jMenuItem = null;
private JPanel jPanelAdd = null; // @jve:decl-index=0:visual-constraint="414,79"
private JRadioButton jRadioButton = null;
private JRadioButton jRadioButton1 = null;
private JLabel jLabel3 = null;
private JTextField jTextField = null;
private JPanel jPanel1 = null;
private JPanel jPanel2 = null;
private ButtonGroup jBtnGroup; // @jve:decl-index=0:
protected DefaultMutableTreeNode createNewNode(TreeNode<String> node, boolean isLeft) {
if(node==null)
{
node = new BinaryTreeNode<String>("*");
}
return new DynamicTreeNode(node,isLeft);
}
/**
* This method initializes jTree
*
* @return javax.swing.JTree
*/
private JTree getJTree1() {
if (jTree == null) {
jTree = new JTree();
jTree.setEnabled(true);
jTree.setShowsRootHandles(true);
jTree.setComponentOrientation(ComponentOrientation.LEFT_TO_RIGHT);
jTree.setForeground(SystemColor.control);
jTree.setBackground(SystemColor.control);
jTree.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
}
SampleTreeModel sampleTreeModel = new SampleTreeModel(getRoot1());
jTree.setModel(sampleTreeModel);
jTree.setCellRenderer(new SampleTreeCellRenderer());
return jTree;
}
/**
* This method initializes arbol1
*
* @return treeUtils.BinaryTree
*/
private IBinaryTree getArbol1() {
if (arbol == null) {
arbol = UtilFunctions.getStringBinTree();
}
return arbol;
}
/**
* This method initializes root1
*
* @return javax.swing.tree.DefaultMutableTreeNode
*/
private DefaultMutableTreeNode getRoot1() {
root1 = createNewNode(getArbol1().getRoot(),false);
return root1;
}
/**
* This method initializes jScrollPane
*
* @return javax.swing.JScrollPane
*/
private JScrollPane getJScrollPane() {
if (jScrollPane == null) {
jScrollPane = new JScrollPane();
jScrollPane.setViewportView(getJTree1());
}
return jScrollPane;
}
/**
* This method initializes jJToolBarBar
*
* @return javax.swing.JToolBar
*/
private JToolBar getJJToolBarBar() {
if (jJToolBarBar == null) {
jJToolBarBar = new JToolBar();
jJToolBarBar.setLayout(new BoxLayout(getJJToolBarBar(), BoxLayout.X_AXIS));
jJToolBarBar.add(getJBtnNext());
jJToolBarBar.add(getJBtnPrev());
jJToolBarBar.add(getJBtnRmv());
jJToolBarBar.add(getJBtnAdd());
jJToolBarBar.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
jJToolBarBar.add(getJBtnSet());
}
return jJToolBarBar;
}
/**
* This method initializes jBtnNext
*
* @return javax.swing.JButton
*/
private JButton getJBtnNext() {
if (jBtnNext == null) {
jBtnNext = new JButton();
jBtnNext.setText("Next");
jBtnNext.setEnabled(false);
jBtnNext.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("next"); // TODO Auto-generated Event stub actionPerformed()
next();
}
});
}
return jBtnNext;
}
/**
* This method initializes jBtnPrev
*
* @return javax.swing.JButton
*/
private JButton getJBtnPrev() {
if (jBtnPrev == null) {
jBtnPrev = new JButton();
jBtnPrev.setText("Prev");
jBtnPrev.setEnabled(false);
jBtnPrev.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("prev()"); // TODO Auto-generated Event stub actionPerformed()
previous();
}
});
}
return jBtnPrev;
}
/**
* This method initializes jBtnRmv
*
* @return javax.swing.JButton
*/
private JButton getJBtnRmv() {
if (jBtnRmv == null) {
jBtnRmv = new JButton();
jBtnRmv.setText("Remove");
jBtnRmv.setEnabled(false);
jBtnRmv.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("eliminando nodo()");
String eliminado = remove();
JOptionPane.showMessageDialog(null, "Ha sido eliminado el nodo: "+ eliminado);
//crear mensaje de eliminacion
getJTree1();
}
});
}
return jBtnRmv;
}
/**
* This method initializes jBtnAdd
*
* @return javax.swing.JButton
*/
private ButtonGroup getButtonGroup()
{
if(jBtnGroup == null)
{jBtnGroup = new ButtonGroup();
jBtnGroup.add(jRadioButton);
jBtnGroup.add(jRadioButton1);
}
return jBtnGroup;
}
private JButton getJBtnAdd() {
if (jBtnAdd == null) {
jBtnAdd = new JButton();
jBtnAdd.setText("Add");
jBtnAdd.setEnabled(false);
jBtnAdd.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("agregar nuevo nodo"); // TODO Auto-generated Event stub actionPerformed()
boolean isLeft = true;
String newNode = "";
getJPanelAdd();
boolean cancel = false;
while(!cancel)
{ int value = JOptionPane.showConfirmDialog(null,jPanelAdd,"Crear un nuevo nodo",2);
if(value == 0 && !jTextField.getText().isEmpty())
{
newNode = jTextField.getText();
cancel=true;
}else
if(value!=0) cancel=true;
}
if(jRadioButton1.isSelected())
isLeft=false;
if(!newNode.isEmpty())
{
add(newNode, isLeft);
getJTree1();
}
jTextField.setText("");
}
});
}
return jBtnAdd;
}
/**
* This method initializes jPnlStatus
*
* @return javax.swing.JPanel
*/
private JPanel getJPnlStatus() {
if (jPnlStatus == null) {
GridBagConstraints gridBagConstraints21 = new GridBagConstraints();
gridBagConstraints21.gridx = 1;
gridBagConstraints21.gridy = 1;
jLabel2 = new JLabel();
jLabel2.setText("Orden");
GridBagConstraints gridBagConstraints11 = new GridBagConstraints();
gridBagConstraints11.gridwidth = 4;
GridBagConstraints gridBagConstraints3 = new GridBagConstraints();
gridBagConstraints3.gridx = 2;
gridBagConstraints3.fill = GridBagConstraints.VERTICAL;
gridBagConstraints3.insets = new Insets(2, 16, 0, 16);
gridBagConstraints3.gridwidth = 1;
gridBagConstraints3.gridy = 3;
jLblID = new JLabel();
jLblID.setText("-1");
GridBagConstraints gridBagConstraints2 = new GridBagConstraints();
gridBagConstraints2.gridx = 2;
gridBagConstraints2.fill = GridBagConstraints.VERTICAL;
gridBagConstraints2.insets = new Insets(2, 16, 0, 16);
gridBagConstraints2.gridwidth = 1;
gridBagConstraints2.gridy = 2;
jLblPrevio = new JLabel();
jLblPrevio.setText("ninguno");
GridBagConstraints gridBagConstraints1 = new GridBagConstraints();
gridBagConstraints1.gridx = 0;
gridBagConstraints1.fill = GridBagConstraints.VERTICAL;
gridBagConstraints1.insets = new Insets(2, 16, 0, 16);
gridBagConstraints1.gridwidth = 1;
gridBagConstraints1.gridy = 3;
jLabel1 = new JLabel();
jLabel1.setText("ID");
GridBagConstraints gridBagConstraints = new GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.fill = GridBagConstraints.VERTICAL;
gridBagConstraints.insets = new Insets(2, 16, 0, 16);
gridBagConstraints.gridwidth = 1;
gridBagConstraints.gridy = 2;
jLabel = new JLabel();
jLabel.setText("Previo");
jLblStatus = new JLabel();
jLblStatus.setText("Estado Actual del Recorrido");
jPnlStatus = new JPanel();
jPnlStatus.setLayout(new GridBagLayout());
jPnlStatus.add(jLblStatus, gridBagConstraints11);
jPnlStatus.add(jLabel, gridBagConstraints);
jPnlStatus.add(jLabel1, gridBagConstraints1);
jPnlStatus.add(jLblPrevio, gridBagConstraints2);
jPnlStatus.add(jLblID, gridBagConstraints3);
jPnlStatus.add(jLabel2, gridBagConstraints21);
}
return jPnlStatus;
}
/**
* This method initializes loadMenuItem
*
* @return javax.swing.JMenuItem
*/
private JMenuItem getLoadMenuItem() {
if (loadMenuItem == null) {
loadMenuItem = new JMenuItem();
loadMenuItem.setText("Cargar Arbol");
loadMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, Event.CTRL_MASK, true));
loadMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("cargar arbol from file");
try {
loadXMLTree();
System.out.println("cargado");
} catch (FileNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (JAXBException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
getJTree1();
}
});
}
return loadMenuItem;
}
/**
* This method initializes jLoadMenu
*
* @return javax.swing.JMenu
*/
private JMenu getJLoadMenu() {
if (jLoadMenu == null) {
jLoadMenu = new JMenu();
jLoadMenu.setText("Cargar Arbol");
jLoadMenu.add(getLoadMenuItem());
jLoadMenu.add(getLoadMenuItem2());
}
return jLoadMenu;
}
/**
* This method initializes loadMenuItem2
*
* @return javax.swing.JMenuItem
*/
private JMenuItem getLoadMenuItem2() {
if (loadMenuItem2 == null) {
loadMenuItem2 = new JMenuItem();
loadMenuItem2.setText("Cargar Arbol de Prueba");
loadMenuItem2.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, Event.CTRL_MASK, true));
loadMenuItem2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("load default tree"); // TODO Auto-generated Event stub actionPerformed()
arbol = null;
getJTree1();
}
});
}
return loadMenuItem2;
}
/**
* This method initializes makeMenuItem
*
* @return javax.swing.JMenuItem
*/
private JMenuItem getMakeMenuItem() {
if (makeMenuItem == null) {
makeMenuItem = new JMenuItem();
makeMenuItem.setText("Crear nuevo Arbol");
makeMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_N, Event.CTRL_MASK, true));
makeMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("new tree"); // TODO Auto-generated Event stub actionPerformed()
//pedir a usuario la raiz del arbol
BinaryTreeNode<String> newRoot = new BinaryTreeNode<String>("borrame");
arbol = new BinaryTree<String>(newRoot);
getJTree1();
}
});
}
return makeMenuItem;
}
/**
* This method initializes jToolBar
*
* @return javax.swing.JToolBar
*/
private JToolBar getJToolBar() {
if (jToolBar == null) {
jToolBar = new JToolBar();
jToolBar.add(getJScrollPane());
}
return jToolBar;
}
/**
* This method initializes jBtnSet
*
* @return javax.swing.JButton
*/
private JButton getJBtnSet() {
if (jBtnSet == null) {
jBtnSet = new JButton();
jBtnSet.setEnabled(false);
jBtnSet.setText("Set");
jBtnSet.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("set()"); // TODO Auto-generated Event stub actionPerformed()
String newNode = "";
while(newNode.length()==0)
newNode = JOptionPane.showInputDialog("Inserte el nuevo nodo: ");
String oldNode = set(newNode);
JOptionPane.showMessageDialog(null, "Ha sido sobreescrito el nodo: "+ oldNode);
getJTree1();
}
});
}
return jBtnSet;
}
/**
* This method initializes jMenuItem
*
* @return javax.swing.JMenuItem
*/
private JMenuItem getJMenuItem() {
if (jMenuItem == null) {
jMenuItem = new JMenuItem();
jMenuItem.setText("time test");
jMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("time test()"); // TODO Auto-generated Event stub actionPerformed()
UtilFunctions.traverseIter(arbol);
UtilFunctions.traverseDaemon(arbol);
}
});
}
return jMenuItem;
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private JPanel getJPanelAdd() {
if (jPanelAdd == null) {
GridBagConstraints gridBagConstraints12 = new GridBagConstraints();
gridBagConstraints12.gridx = 0;
gridBagConstraints12.gridwidth = 3;
gridBagConstraints12.fill = GridBagConstraints.BOTH;
gridBagConstraints12.insets = new Insets(11, 6, 0, 6);
gridBagConstraints12.gridy = 3;
GridBagConstraints gridBagConstraints8 = new GridBagConstraints();
gridBagConstraints8.fill = GridBagConstraints.BOTH;
gridBagConstraints8.gridx = 0;
gridBagConstraints8.gridy = 2;
GridBagConstraints gridBagConstraints7 = new GridBagConstraints();
gridBagConstraints7.fill = GridBagConstraints.BOTH;
gridBagConstraints7.gridy = 0;
gridBagConstraints7.weightx = 1.0;
gridBagConstraints7.insets = new Insets(13, 8, 0, 6);
gridBagConstraints7.gridx = 1;
GridBagConstraints gridBagConstraints6 = new GridBagConstraints();
gridBagConstraints6.gridx = 0;
gridBagConstraints6.insets = new Insets(12, 6, 0, 0);
gridBagConstraints6.gridy = 0;
jLabel3 = new JLabel();
jLabel3.setText("Informacion del Nodo");
jPanelAdd = new JPanel();
jPanelAdd.setLayout(new GridBagLayout());
jPanelAdd.setSize(new Dimension(281, 138));
jPanelAdd.setBorder(BorderFactory.createTitledBorder(BorderFactory.createBevelBorder(BevelBorder.RAISED), "Complete el formulario", TitledBorder.DEFAULT_JUSTIFICATION, TitledBorder.DEFAULT_POSITION, new Font("Dialog", Font.BOLD, 12), new Color(51, 51, 51)));
jPanelAdd.add(jLabel3, gridBagConstraints6);
jPanelAdd.add(getJTextField(), gridBagConstraints7);
jPanelAdd.add(getJPanel2(), gridBagConstraints12);
getButtonGroup();
}
return jPanelAdd;
}
/**
* This method initializes jRadioButton
*
* @return javax.swing.JRadioButton
*/
private JRadioButton getJRadioButton() {
if (jRadioButton == null) {
jRadioButton = new JRadioButton();
jRadioButton.setText("Izquierda");
jRadioButton.setSelected(true);
}
return jRadioButton;
}
/**
* This method initializes jRadioButton1
*
* @return javax.swing.JRadioButton
*/
private JRadioButton getJRadioButton1() {
if (jRadioButton1 == null) {
jRadioButton1 = new JRadioButton();
jRadioButton1.setText("Derecha");
}
return jRadioButton1;
}
/**
* This method initializes jTextField
*
* @return javax.swing.JTextField
*/
private JTextField getJTextField() {
if (jTextField == null) {
jTextField = new JTextField();
}
return jTextField;
}
/**
* This method initializes jPanel2
*
* @return javax.swing.JPanel
*/
private JPanel getJPanel2() {
if (jPanel2 == null) {
GridBagConstraints gridBagConstraints10 = new GridBagConstraints();
gridBagConstraints10.gridx = 1;
gridBagConstraints10.insets = new Insets(0, 11, 0, 11);
gridBagConstraints10.gridy = 0;
GridBagConstraints gridBagConstraints9 = new GridBagConstraints();
gridBagConstraints9.gridx = 0;
gridBagConstraints9.gridy = 0;
jPanel2 = new JPanel();
jPanel2.setLayout(new GridBagLayout());
jPanel2.setBorder(BorderFactory.createTitledBorder(null, "Seleccione donde insertar", TitledBorder.DEFAULT_JUSTIFICATION, TitledBorder.DEFAULT_POSITION, new Font("Dialog", Font.BOLD, 12), new Color(51, 51, 51)));
jPanel2.add(getJRadioButton(), gridBagConstraints9);
jPanel2.add(getJRadioButton1(), gridBagConstraints10);
}
return jPanel2;
}
/**
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
SwingUtilities.invokeLater(new Runnable() {
public void run() {
VisualTestString application = new VisualTestString();
application.getJFrame().setVisible(true);
}
});
}
/**
* This method initializes jFrame
*
* @return javax.swing.JFrame
*/
private JFrame getJFrame() {
if (jFrame == null) {
jFrame = new JFrame();
jFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
jFrame.setSize(new Dimension(248, 370));
jFrame.setJMenuBar(getJJMenuBar());
jFrame.setMaximumSize(new Dimension(248, 370));
jFrame.setMinimumSize(new Dimension(248,180));
jFrame.setContentPane(getJContentPane());
jFrame.setTitle("TreeIterator");
jFrame.setResizable(true);
try {
UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName());
ToolTipManager.sharedInstance().registerComponent(jTree);
} catch (Exception exc) {
System.err.println("Error loading L&F: " + exc);
}
}
return jFrame;
}
/**
* This method initializes jContentPane
*
* @return javax.swing.JPanel
*/
private JPanel getJContentPane() {
if (jContentPane == null) {
jContentPane = new JPanel();
jContentPane.setLayout(new BoxLayout(getJContentPane(), BoxLayout.Y_AXIS));
jContentPane.add(getJJToolBarBar(), null);
jContentPane.add(getJPnlStatus(), null);
jContentPane.add(getJToolBar(), null);
}
return jContentPane;
}
/**
* This method initializes jJMenuBar
*
* @return javax.swing.JMenuBar
*/
private JMenuBar getJJMenuBar() {
if (jJMenuBar == null) {
jJMenuBar = new JMenuBar();
jJMenuBar.add(getFileMenu());
jJMenuBar.add(getEditMenu());
}
return jJMenuBar;
}
/**
* This method initializes jMenu
*
* @return javax.swing.JMenu
*/
private JMenu getFileMenu() {
if (fileMenu == null) {
fileMenu = new JMenu();
fileMenu.setText("General");
fileMenu.add(getJLoadMenu());
fileMenu.add(getSaveMenuItem());
fileMenu.add(new JSeparator());
fileMenu.add(getExitMenuItem());
fileMenu.add(getMakeMenuItem());
// fileMenu.add(getJMenuItem());
}
return fileMenu;
}
/**
* This method initializes jMenu
*
* @return javax.swing.JMenu
*/
private JMenu getEditMenu() {
if (editMenu == null) {
editMenu = new JMenu();
editMenu.setText("Activar Iterador");
editMenu.add(getCutMenuItem());
editMenu.add(getCopyMenuItem());
editMenu.add(getPasteMenuItem());
}
return editMenu;
}
/**
* This method initializes jMenuItem
*
* @return javax.swing.JMenuItem
*/
private JMenuItem getExitMenuItem() {
if (exitMenuItem == null) {
exitMenuItem = new JMenuItem();
exitMenuItem.setText("Salir");
exitMenuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
System.exit(0);
}
});
}
return exitMenuItem;
}
/**
* This method initializes jMenuItem
*
* @return javax.swing.JMenuItem
*/
private JMenuItem getCutMenuItem() {
if (cutMenuItem == null) {
cutMenuItem = new JMenuItem();
cutMenuItem.setText("PreOrden");
cutMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_P,
Event.CTRL_MASK, true));
cutMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("activando Preorden"); // TODO Auto-generated Event stub actionPerformed()
loadPREIterator();
}
});
}
return cutMenuItem;
}
/**
* This method initializes jMenuItem
*
* @return javax.swing.JMenuItem
*/
private JMenuItem getCopyMenuItem() {
if (copyMenuItem == null) {
copyMenuItem = new JMenuItem();
copyMenuItem.setText("EntreOrden");
copyMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_T,
Event.CTRL_MASK, true));
copyMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("activando EntreOrden"); // TODO Auto-generated Event stub actionPerformed()
loadENTREIterator();
}
});
}
return copyMenuItem;
}
/**
* This method initializes jMenuItem
*
* @return javax.swing.JMenuItem
*/
private JMenuItem getPasteMenuItem() {
if (pasteMenuItem == null) {
pasteMenuItem = new JMenuItem();
pasteMenuItem.setText("PosOrden");
pasteMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S,
Event.CTRL_MASK, true));
pasteMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("activando PosOrden"); // TODO Auto-generated Event stub actionPerformed()
loadPOSIterator();
}
});
}
return pasteMenuItem;
}
/**
* This method initializes jMenuItem
*
* @return javax.swing.JMenuItem
*/
private JMenuItem getSaveMenuItem() {
if (saveMenuItem == null) {
saveMenuItem = new JMenuItem();
saveMenuItem.setText("Guardar Arbol");
saveMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S,
Event.CTRL_MASK, true));
saveMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
System.out.println("saving tree in xml file"); // TODO Auto-generated Event stub actionPerformed()
try {
saveXMLTree();
System.out.println("saved");
} catch (JAXBException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (FileNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
}
});
}
return saveMenuItem;
}
//-----------Metodos para Interaccion
private String remove()
{
String result = iterArbol.remove();
justDelete = true;
activateControls();
return result;
}
private String next()
{
String result = iterArbol.nextInfo();
justDelete = false;
activateControls();
drawPath(result);
if(jLabel2.getText()=="PRE")
jTree.expandRow(iterArbol.previousIndex());
return result;
}
private String previous()
{
jLblID.setText(String.valueOf(iterArbol.previousIndex()));
String result = iterArbol.previousInfo();
jLblPrevio.setText(result.toString());
justDelete = false;
activateControls();
if(jLabel2.getText()=="PRE")
jTree.collapseRow(iterArbol.nextIndex());
return result;
}
/**
* @param result
*/
private void drawPath(String result) {
jLblPrevio.setText(result.toString());
jLblID.setText(String.valueOf(iterArbol.previousIndex()));
}
private void add(String newNode,boolean isLeft)
{
iterArbol.add(newNode,isLeft);
activateControls();
}
private String set(String newNode) {
String oldInfo = iterArbol.set(newNode);
activateControls();
return oldInfo;
}
/**
* inicializa la interfaz para usar un nuevo iterador
*/
private void init() {
isLoadedIterator =true;
activateControls();
restartPath();
}
private void loadXMLTree() throws FileNotFoundException, JAXBException
{
File file = MyIO.seeDialog(false);
if(file!=null && file.exists() & file.length()!=0)
{
arbol = (BinaryTree<String>) XMLManagger.loadXML(arbol,file.getAbsolutePath());
arbol.getRoot();
}
}
private void saveXMLTree() throws JAXBException, FileNotFoundException
{
File file = MyIO.seeDialog(true);
if(file!=null)
XMLManagger.saveXML(arbol, file.getAbsolutePath());
}
private void restartPath()
{
jLblPrevio.setText("ninguno");
jLblID.setText("-1");
justDelete = true;
}
private void loadPREIterator()
{//new TreeIteratorPRE<String>(arbol);
iterArbol = arbol.treeIterator(TreeIteratorOrden.PreOrden);
jLabel2.setText("PRE");
init();
}
private void loadENTREIterator()
{
iterArbol = arbol.treeIterator(TreeIteratorOrden.EntreOrden);
jLabel2.setText("ENTRE");
init();
}
private void loadPOSIterator()
{
iterArbol = arbol.treeIterator(TreeIteratorOrden.PosOrden);
jLabel2.setText("POS");
init();
}
private void activateControls()
{
jBtnRmv.setEnabled(isLoadedIterator);
jBtnAdd.setEnabled(isLoadedIterator);
jBtnNext.setEnabled(isLoadedIterator);
jBtnPrev.setEnabled(isLoadedIterator);
if(isLoadedIterator)
{
jBtnNext.setEnabled(iterArbol.hasNext());
jBtnPrev.setEnabled(iterArbol.hasPrevious());
if(!jBtnNext.isEnabled() && !jBtnPrev.isEnabled())
justDelete=true;
jBtnRmv.setEnabled(!justDelete);
jBtnSet.setEnabled(!justDelete);
}
}
}
|
|
/*
***************************************************************************
* Mica - the Java(tm) Graphics Framework *
***************************************************************************
* NOTICE: Permission to use, copy, and modify this software and its *
* documentation is hereby granted provided that this notice appears in *
* all copies. *
* *
* Permission to distribute un-modified copies of this software and its *
* documentation is hereby granted provided that no fee is charged and *
* that this notice appears in all copies. *
* *
* SOFTWARE FARM MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE *
* SUITABILITY OF THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT *
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR *
* A PARTICULAR PURPOSE, OR NON-INFRINGEMENT. SOFTWARE FARM SHALL NOT BE *
* LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR *
* CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE, MODIFICATION OR *
* DISTRIBUTION OF THIS SOFTWARE OR ITS DERIVATIVES. *
* *
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND *
* DISTRIBUTORS HAVE NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, *
* UPDATES, ENHANCEMENTS, OR MODIFICATIONS. *
* *
***************************************************************************
* Copyright (c) 1997-2004 Software Farm, Inc. All Rights Reserved. *
***************************************************************************
*/
package com.swfm.mica;
/**----------------------------------------------------------------------------------------------
* This interface specifies all the built-in types of MiiActions.
* <p>
* New MiiAction types can be defined at runtime by using the following
* approach:
* <p><pre>
* protected static final int MY_EDIT_TEXT_ACTION_TYPE = MiActionManager.registerAction("myEditText");
*
* and this can be used thus:
*
* miPart.appendActionHandler(myActionHandler,
* new MiEvent(Mi_LEFT_MOUSE_DBLCLICK_EVENT), MY_EDIT_TEXT_ACTION_TYPE);
*
* </pre>
* MiiActions are dispatched to their MiiActionHandlers in 4 distinct phases:
* <p>
* 1. Mi_REQUEST_ACTION_PHASE
* <p>
* In this phase, the action is sent to all MiiActionHandlers which have requested
* interest in this action during this phase. Each MiiActionHandler has the opportunity
* to veto any further dispatch of the action.
*
* <p>
* 2. Mi_EXECUTE_ACTION_PHASE
* <p>
* In this phase, the action is sent to all MiiActionHandlers which have requested
* interest in this action during this phase. Each MiiActionHandler can decide to
* execute code either before or instead (this is supported only rarely by the
* code that dispatches the MiiAction - for example the MiDragAndDropManager
* or MiPart#copy) of the default code.
*
* <p>
* 3. Mi_COMMIT_ACTION_PHASE
* <p>
* In this phase, the action is sent to all MiiActionHandlers which have requested
* interest in this action during this phase. This is the typical phase during
* which the MiiActionHandler executes some code in response to the action's occurance.
*
* <p>
* 4. Mi_CANCEL_ACTION_PHASE
* <p>
* In this phase, the action is sent to all MiiActionHandlers which have requested
* interest in this action during this phase. This is the phsae that the MiiAction
* enters if it was vetoed during the request phase. This allows all MiiActionHandlers
* to free resources they may have allocated resources during the request phase.
*
* @see MiiAction
* @see MiiActionHandler
* @see MiPart#appendActionHandler
*
* @version %I% %G%
* @author Michael L. Davis
* @release 1.4.1
* @module %M%
* @language Java (JDK 1.4)
*----------------------------------------------------------------------------------------------*/
public interface MiiActionTypes
{
// ---------------------------------------------------------------
// 4 bits for action phases... these values leave room for 1 << 27
// action types.
// ---------------------------------------------------------------
int Mi_COMMIT_ACTION_PHASE = (1 << 28);
int Mi_REQUEST_ACTION_PHASE = (1 << 29);
int Mi_EXECUTE_ACTION_PHASE = (1 << 30);
int Mi_CANCEL_ACTION_PHASE = (1 << 31);
int Mi_ACTION_PHASE_MASK = (0xf << 28);
int Mi_ACTIONS_OF_PARTS_OF_OBSERVED = (1 << 27);
int Mi_ACTIONS_OF_OBSERVED = (1 << 26);
int Mi_ALL_ACTION_TYPES = 1;
int Mi_CREATE_ACTION = 2;
int Mi_DELETE_ACTION = 3;
int Mi_COPY_ACTION = 4;
int Mi_REPLACE_ACTION = 5;
int Mi_REMOVE_FROM_CONTAINER_ACTION = 6;
int Mi_ADD_TO_CONTAINER_ACTION = 7;
int Mi_DRAG_AND_DROP_PICKUP_ACTION = 8;
int Mi_DRAG_AND_DROP_MOVE_ACTION = 9;
int Mi_DRAG_AND_DROP_ENTER_ACTION = 10;
int Mi_DRAG_AND_DROP_EXIT_ACTION = 11;
int Mi_DRAG_AND_DROP_PAUSE_ACTION = 12;
int Mi_DRAG_AND_DROP_CONTINUE_ACTION = 13;
int Mi_DRAG_AND_DROP_CANCEL_ACTION = 14;
int Mi_DRAG_AND_DROP_COMMIT_ACTION = 15;
int Mi_DRAG_AND_DROP_VETO_ACTION = 16;
int Mi_SELECTED_ACTION = 17;
int Mi_DESELECTED_ACTION = 18;
int Mi_ACTIVATED_ACTION = 19;
int Mi_SELECT_REPEATED_ACTION = 20;
int Mi_GOT_MOUSE_FOCUS_ACTION = 21;
int Mi_LOST_MOUSE_FOCUS_ACTION = 22;
int Mi_GOT_KEYBOARD_FOCUS_ACTION = 23;
int Mi_LOST_KEYBOARD_FOCUS_ACTION = 24;
int Mi_GOT_ENTER_KEY_FOCUS_ACTION = 25;
int Mi_LOST_ENTER_KEY_FOCUS_ACTION = 26;
int Mi_MOUSE_ENTER_ACTION = 27;
int Mi_MOUSE_EXIT_ACTION = 28;
int Mi_INVISIBLE_ACTION = 29;
int Mi_VISIBLE_ACTION = 30;
int Mi_PART_SHOWING_ACTION = 31;
int Mi_PART_NOT_SHOWING_ACTION = 32;
int Mi_HIDDEN_ACTION = 33;
int Mi_UNHIDDEN_ACTION = 34;
int Mi_TEXT_CHANGE_ACTION = 35;
int Mi_MENU_POPPED_UP_ACTION = 36;
int Mi_MENU_POPPED_DOWN_ACTION = 37;
int Mi_TABBED_FOLDER_OPENED_ACTION = 38;
int Mi_TABBED_FOLDER_CLOSED_ACTION = 39;
int Mi_INVALID_VALUE_ACTION = 40;
int Mi_VALUE_CHANGED_ACTION = 41;
int Mi_ENTER_KEY_ACTION = 42;
int Mi_NODE_EXPANDED_ACTION = 43;
int Mi_NODE_COLLAPSED_ACTION = 44;
int Mi_ITEM_SELECTED_ACTION = 45;
int Mi_ITEM_DESELECTED_ACTION = 46;
int Mi_ITEM_BROWSED_ACTION = 47;
int Mi_ITEM_DEBROWSED_ACTION = 48;
int Mi_ITEM_ADDED_ACTION = 49;
int Mi_ITEM_REMOVED_ACTION = 50;
int Mi_ALL_ITEMS_SELECTED_ACTION = 51;
int Mi_ALL_ITEMS_DESELECTED_ACTION = 52;
int Mi_NO_ITEMS_SELECTED_ACTION = 53;
int Mi_ONE_ITEM_SELECTED_ACTION = 54;
int Mi_MANY_ITEMS_SELECTED_ACTION = 55;
int Mi_ITEM_SCROLLED_ACTION = 56;
int Mi_ITEMS_SCROLLED_AND_MAGNIFIED_ACTION = 57;
int Mi_EDITOR_VIEWPORT_CHANGED_ACTION = 58;
int Mi_EDITOR_WORLD_TRANSLATED_ACTION = 59;
int Mi_EDITOR_WORLD_RESIZED_ACTION = 60;
int Mi_EDITOR_DEVICE_TRANSLATED_ACTION = 61;
int Mi_EDITOR_DEVICE_RESIZED_ACTION = 62;
int Mi_EDITOR_UNIVERSE_RESIZED_ACTION = 63;
int Mi_EDITOR_CONTENTS_GEOMETRY_CHANGED_ACTION= 64;
int Mi_EDITOR_LAYER_ADDED_ACTION = 65;
int Mi_EDITOR_LAYER_REMOVED_ACTION = 66;
int Mi_EDITOR_LAYER_ORDER_CHANGED_ACTION = 67;
int Mi_EDITOR_CURRENT_LAYER_CHANGED_ACTION = 68;
int Mi_WINDOW_CLOSE_ACTION = 69;
int Mi_WINDOW_ICONIFY_ACTION = 70;
int Mi_WINDOW_DEICONIFY_ACTION = 71;
int Mi_WINDOW_OPEN_ACTION = 72;
int Mi_WINDOW_OK_ACTION = 73;
int Mi_WINDOW_CANCEL_ACTION = 74;
int Mi_WINDOW_HELP_ACTION = 75;
int Mi_WINDOW_FULLSCREEN_ACTION = 76;
int Mi_WINDOW_NORMALSIZE_ACTION = 77;
int Mi_WINDOW_DESTROY_ACTION = 78;
int Mi_CLIPBOARD_NOW_HAS_DATA_ACTION = 79;
int Mi_TRANSACTION_MANAGER_CHANGED_ACTION = 80;
int Mi_TRANSACTION_MANAGER_NEW_TRANSACTION_ACTION = 81;
int Mi_TRANSACTION_MANAGER_EXECUTION_START_UNDO_ACTION = 82;
int Mi_TRANSACTION_MANAGER_EXECUTION_END_UNDO_ACTION = 83;
int Mi_TRANSACTION_MANAGER_EXECUTION_START_REDO_ACTION = 84;
int Mi_TRANSACTION_MANAGER_EXECUTION_END_REDO_ACTION = 85;
int Mi_TRANSACTION_MANAGER_EXECUTION_START_REPEAT_ACTION = 86;
int Mi_TRANSACTION_MANAGER_EXECUTION_END_REPEAT_ACTION = 87;
int Mi_DATA_IMPORT_ACTION = 88;
int Mi_CONNECT_SOURCE_ACTION = 89;
int Mi_CONNECT_DESTINATION_ACTION = 90;
int Mi_CONNECT_ACTION = 91;
int Mi_DISCONNECT_SOURCE_ACTION = 92;
int Mi_DISCONNECT_DESTINATION_ACTION = 93;
int Mi_DISCONNECT_ACTION = 94;
int Mi_CREATED_CONNECTION_CONNECTION_POINT_ACTION = 95;
int Mi_DELETED_CONNECTION_CONNECTION_POINT_ACTION = 96;
int Mi_STATUS_BAR_FOCUS_CHANGED_ACTION = 97;
int Mi_ICONIFY_ACTION = 98;
int Mi_DEICONIFY_ACTION = 99;
int Mi_GROUP_ACTION = 100;
int Mi_UNGROUP_ACTION = 101;
int Mi_FORMAT_ACTION = 102;
int Mi_UNFORMAT_ACTION = 103;
int Mi_GEOMETRY_CHANGE_ACTION = 104;
int Mi_SIZE_CHANGE_ACTION = 105;
int Mi_POSITION_CHANGE_ACTION = 106;
int Mi_APPEARANCE_CHANGE_ACTION = 107;
int Mi_DRAW_ACTION = 108;
int Mi_DOCUMENT_CHANGE_ACTION = 109;
int Mi_DECREASE_ACTION = 110;
int Mi_INCREASE_ACTION = 111;
int Mi_PROPERTY_CHANGE_ACTION = 112;
int Mi_NAME_CHANGE_ACTION = 113;
int Mi_DISPLAY_PROPERTIES_ACTION = 114;
int Mi_INTERACTIVELY_MOVING_PART_ACTION = 115;
int Mi_INTERACTIVELY_COMPLETED_MOVE_PART_ACTION = 116;
int Mi_INTERACTIVELY_CANCELED_MOVE_PART_ACTION = 117;
int Mi_INTERACTIVELY_FAILED_MOVE_PART_ACTION = 118;
int Mi_INTERACTIVELY_CREATING_CONNECTION_ACTION = 119;
int Mi_INTERACTIVELY_COMPLETED_CREATE_CONNECTION_ACTION = 120;
int Mi_INTERACTIVELY_CANCELED_CREATE_CONNECTION_ACTION = 121;
int Mi_INTERACTIVELY_FAILED_CREATE_CONNECTION_ACTION = 122;
int Mi_INTERACTIVELY_COMPLETED_RUBBER_STAMP_PART_ACTION = 123;
int Mi_INTERACTIVELY_COMPLETED_DRAW_NEW_PART_PART_ACTION = 124;
int Mi_ACTION_TYPE_MASK = 0x03ff;
}
|
|
package RemoteApp.view;
import RemoteApp.network.SocketConnection;
import RemoteApp.StartServer;
import RemoteApp.ThreadContainer;
import RemoteApp.network.MultiCastStatus;
import java.awt.AWTException;
import java.awt.Image;
import java.awt.MenuItem;
import java.awt.PopupMenu;
import java.awt.SystemTray;
import java.awt.Toolkit;
import java.awt.TrayIcon;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.DatagramPacket;
import java.net.InetAddress;
import java.net.MulticastSocket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
import RemoteApp.Constants;
import RemoteApp.model.serializables.MultiCast;
import RemoteApp.network.NetworkInter;
import RemoteApp.network.socket.SocketView;
public class RemoteServerForm extends javax.swing.JFrame {
public static PopupMenu popup;
public static ActionListener listener;
private static Thread tempthread;
public RemoteServerForm() throws SocketException {
initComponents();
if(SystemTray.isSupported()) {
SystemTray tray = SystemTray.getSystemTray();
Image image = Toolkit.getDefaultToolkit().getImage("icon.png");
listener = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if(!e.getActionCommand().isEmpty()) {
if(e.getActionCommand().equals("show")) {
setVisible(true);
} else if(e.getActionCommand().equals("hide")) {
setVisible(false);
}
}
}
};
popup = new PopupMenu();
MenuItem showitem = new MenuItem("show");
MenuItem hideitem = new MenuItem("hide");
showitem.addActionListener(listener);
hideitem.addActionListener(listener);
popup.add(showitem);
popup.add(hideitem);
TrayIcon trayicon = new TrayIcon(image,"Remote Server",popup);
trayicon.addActionListener(listener);
try {
tray.add(trayicon);
} catch (AWTException ex) {
Logger.getLogger(RemoteServerForm.class.getName()).log(Level.SEVERE, null, ex);
}
}
ArrayList<String> ip = new ArrayList<>();
ip = NetworkInter.getAllIp();
for(String item : ip) {
serveradapters.addItem(item);
}
ThreadContainer.setMulticast(new MultiCastStatus());
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
startServerbtn = new javax.swing.JButton();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
passwordfield = new javax.swing.JTextField();
serverlabel = new javax.swing.JLabel();
serveradapters = new javax.swing.JComboBox();
jMenuBar1 = new javax.swing.JMenuBar();
jMenu1 = new javax.swing.JMenu();
jMenuItem1 = new javax.swing.JMenuItem();
jMenu2 = new javax.swing.JMenu();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setResizable(false);
startServerbtn.setText("Start Server");
startServerbtn.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
startServerbtnActionPerformed(evt);
}
});
jLabel1.setText("select network interface");
jLabel2.setText("password");
serverlabel.setForeground(new java.awt.Color(255, 0, 0));
jMenu1.setText("File");
jMenuItem1.setText("Exit");
jMenuItem1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jMenuItem1ActionPerformed(evt);
}
});
jMenu1.add(jMenuItem1);
jMenuBar1.add(jMenu1);
jMenu2.setText("Edit");
jMenuBar1.add(jMenu2);
setJMenuBar(jMenuBar1);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jLabel1)
.addComponent(jLabel2)
.addComponent(passwordfield)
.addComponent(startServerbtn, javax.swing.GroupLayout.DEFAULT_SIZE, 118, Short.MAX_VALUE)
.addComponent(serverlabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(serveradapters, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap(26, Short.MAX_VALUE))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(44, 44, 44)
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(serveradapters, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(38, 38, 38)
.addComponent(jLabel2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(passwordfield, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(45, 45, 45)
.addComponent(startServerbtn)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(serverlabel)
.addContainerGap(43, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void startServerbtnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_startServerbtnActionPerformed
if(!(passwordfield.getText().isEmpty())) {
SocketConnection.setPassword(passwordfield.getText());
Constants.setpasswordRequired(true);
} else {
SocketConnection.setPassword("");
Constants.setpasswordRequired(false);
}
Constants.status = false;
serverlabel.setText("*server started");
startServerbtn.setEnabled(false);
SocketConnection.setHost(serveradapters.getSelectedItem().toString());
tempthread = new Thread() {
@Override
public void run() {
try {
String multicast = "224.0.0.1";
int port = 5900;
int bufsize = 1024*4;
InetAddress group = InetAddress.getByName(multicast);
MulticastSocket s = new MulticastSocket(port);
s.joinGroup(group);
while(!Thread.currentThread().isInterrupted()) {
byte[] buffer = new byte[bufsize];
s.receive(new DatagramPacket(buffer,bufsize,group,port));
ByteArrayInputStream bais = new ByteArrayInputStream(buffer);
ObjectInputStream obin = new ObjectInputStream(bais);
Object ob = obin.readObject();
if(ob instanceof MultiCast) {
MultiCast mc = (MultiCast)ob;
System.out.println(mc.toString());
if(!(mc.getIp().equals(InetAddress.getLocalHost().getHostAddress()))) {
Constants.addNetworkMapKey(mc.getIp(), mc);
}
}
}
} catch (UnknownHostException ex) {
Logger.getLogger(RemoteServerForm.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException | ClassNotFoundException ex) {
Logger.getLogger(RemoteServerForm.class.getName()).log(Level.SEVERE, null, ex);
} finally {
//interrupted!
}
}
};
tempthread.start();
try {
Thread.sleep(4000);
} catch (InterruptedException ex) {
Logger.getLogger(RemoteServerForm.class.getName()).log(Level.SEVERE, null, ex);
}
tempthread.interrupt();
setRandomAvailableport();
StartServer.startServer();
MenuItem serverstart = new MenuItem("Server listening on port "+Constants.getServerPort());
popup.addSeparator();
serverstart.setName("start");
serverstart.addActionListener(listener);
popup.add(serverstart);
}//GEN-LAST:event_startServerbtnActionPerformed
public static void startserver() {
if(!(passwordfield.getText().isEmpty())) {
SocketConnection.setPassword(passwordfield.getText());
Constants.setpasswordRequired(true);
} else {
SocketConnection.setPassword("");
Constants.setpasswordRequired(false);
}
serverlabel.setText("*server started");
startServerbtn.setEnabled(false);
SocketConnection.setHost(serveradapters.getSelectedItem().toString());
tempthread = new Thread() {
@Override
public void run() {
try {
String multicast = "224.0.0.1";
int port = 5900;
int bufsize = 1024*4;
InetAddress group = InetAddress.getByName(multicast);
MulticastSocket s = new MulticastSocket(port);
s.joinGroup(group);
while(!Thread.currentThread().isInterrupted()) {
byte[] buffer = new byte[bufsize];
s.receive(new DatagramPacket(buffer,bufsize,group,port));
ByteArrayInputStream bais = new ByteArrayInputStream(buffer);
ObjectInputStream obin = new ObjectInputStream(bais);
Object ob = obin.readObject();
if(ob instanceof MultiCast) {
MultiCast mc = (MultiCast)ob;
System.out.println(mc.toString());
if(!(mc.getIp().equals(InetAddress.getLocalHost().getHostAddress()))) {
Constants.addNetworkMapKey(mc.getIp(), mc);
}
}
}
} catch (UnknownHostException ex) {
Logger.getLogger(RemoteServerForm.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException | ClassNotFoundException ex) {
Logger.getLogger(RemoteServerForm.class.getName()).log(Level.SEVERE, null, ex);
} finally {
//interrupted!
}
}
};
tempthread.start();
try {
Thread.sleep(4000);
} catch (InterruptedException ex) {
Logger.getLogger(RemoteServerForm.class.getName()).log(Level.SEVERE, null, ex);
}
tempthread.interrupt();
setRandomAvailableport();
StartServer.startServer();
Constants.status = false;
}
private static void setRandomAvailableport() {
boolean isfound = true;
while(isfound) {
SocketView.setDefaultServerSocket();
int port = SocketView.getServerSocketPort();
Iterator<Entry<String,MultiCast>> it = Constants.getnetworkMap().entrySet().iterator();
boolean same = false;
while(it.hasNext()) {
Entry<String,MultiCast> pair = it.next();
if(pair.getValue().getPort() == port) {
same = true;
}
}
if(same == false) {
isfound = false;
Constants.setServerPort(port);
}
}
try {
SocketView.closeServerSocket();
} catch (IOException ex) {
Logger.getLogger(RemoteServerForm.class.getName()).log(Level.SEVERE, null, ex);
}
}
private void jMenuItem1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItem1ActionPerformed
System.exit(0);
}//GEN-LAST:event_jMenuItem1ActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JMenu jMenu1;
private javax.swing.JMenu jMenu2;
private javax.swing.JMenuBar jMenuBar1;
private javax.swing.JMenuItem jMenuItem1;
private static javax.swing.JTextField passwordfield;
public static javax.swing.JComboBox serveradapters;
public static javax.swing.JLabel serverlabel;
public static javax.swing.JButton startServerbtn;
// End of variables declaration//GEN-END:variables
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.io.File;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.ConditionalWriterConfig;
import org.apache.accumulo.core.client.Durability;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.conf.ClientProperty;
import org.apache.accumulo.core.conf.ConfigurationTypeHelper;
import org.apache.accumulo.core.conf.Property;
import org.junit.BeforeClass;
import org.junit.Test;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
public class ClientContextTest {
private static final String keystoreName = "/site-cfg.jceks";
// site-cfg.jceks={'ignored.property'=>'ignored', 'instance.secret'=>'mysecret',
// 'general.rpc.timeout'=>'timeout'}
private static File keystore;
@SuppressFBWarnings(value = "PATH_TRAVERSAL_IN",
justification = "provided keystoreUrl path isn't user provided")
@BeforeClass
public static void setUpBeforeClass() {
URL keystoreUrl = ClientContextTest.class.getResource(keystoreName);
assertNotNull("Could not find " + keystoreName, keystoreUrl);
keystore = new File(keystoreUrl.getFile());
}
protected String getKeyStoreUrl(File absoluteFilePath) {
return "jceks://file" + absoluteFilePath.getAbsolutePath();
}
@Test
public void loadSensitivePropertyFromCredentialProvider() {
String absPath = getKeyStoreUrl(keystore);
Properties props = new Properties();
props.setProperty(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey(), absPath);
AccumuloConfiguration accClientConf = ClientConfConverter.toAccumuloConf(props);
assertEquals("mysecret", accClientConf.get(Property.INSTANCE_SECRET));
}
@Test
public void defaultValueForSensitiveProperty() {
Properties props = new Properties();
AccumuloConfiguration accClientConf = ClientConfConverter.toAccumuloConf(props);
assertEquals(Property.INSTANCE_SECRET.getDefaultValue(),
accClientConf.get(Property.INSTANCE_SECRET));
}
@Test
public void sensitivePropertiesIncludedInProperties() {
String absPath = getKeyStoreUrl(keystore);
Properties clientProps = new Properties();
clientProps.setProperty(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey(), absPath);
AccumuloConfiguration accClientConf = ClientConfConverter.toAccumuloConf(clientProps);
Map<String,String> props = new HashMap<>();
accClientConf.getProperties(props, x -> true);
// Only sensitive properties are added
assertEquals(Property.GENERAL_RPC_TIMEOUT.getDefaultValue(),
props.get(Property.GENERAL_RPC_TIMEOUT.getKey()));
// Only known properties are added
assertFalse(props.containsKey("ignored.property"));
assertEquals("mysecret", props.get(Property.INSTANCE_SECRET.getKey()));
}
@Test
public void testGetBatchWriterConfigUsingDefaults() {
Properties props = new Properties();
BatchWriterConfig batchWriterConfig = ClientContext.getBatchWriterConfig(props);
assertNotNull(batchWriterConfig);
long expectedMemory = ConfigurationTypeHelper
.getMemoryAsBytes(ClientProperty.BATCH_WRITER_MEMORY_MAX.getDefaultValue());
assertEquals(expectedMemory, batchWriterConfig.getMaxMemory());
// If the value of BATCH_WRITE_LATENCY_MAX or BATCH_WRITER_TIMEOUT_MAX, is set to zero,
// Long.MAX_VALUE is returned. Effectively, this will cause data to be held in memory
// indefinitely for BATCH_WRITE_LATENCY_MAX and for no timeout, for BATCH_WRITER_TIMEOUT_MAX.
// Due to this behavior, the test compares the return values differently. If a value of
// 0 is used, compare the return value using TimeUnit.MILLISECONDS, otherwise the value
// should be converted to seconds in order to match the value set in ClientProperty.
long expectedLatency = ConfigurationTypeHelper
.getTimeInMillis(ClientProperty.BATCH_WRITER_LATENCY_MAX.getDefaultValue());
if (expectedLatency == 0) {
expectedLatency = Long.MAX_VALUE;
assertEquals(expectedLatency, batchWriterConfig.getMaxLatency(TimeUnit.MILLISECONDS));
} else {
assertEquals(expectedLatency, batchWriterConfig.getMaxLatency(TimeUnit.SECONDS));
}
long expectedTimeout = ConfigurationTypeHelper
.getTimeInMillis(ClientProperty.BATCH_WRITER_TIMEOUT_MAX.getDefaultValue());
if (expectedTimeout == 0) {
expectedTimeout = Long.MAX_VALUE;
assertEquals(expectedTimeout, batchWriterConfig.getTimeout(TimeUnit.MILLISECONDS));
} else {
assertEquals(expectedTimeout, batchWriterConfig.getTimeout(TimeUnit.SECONDS));
}
int expectedThreads =
Integer.parseInt(ClientProperty.BATCH_WRITER_THREADS_MAX.getDefaultValue());
assertEquals(expectedThreads, batchWriterConfig.getMaxWriteThreads());
Durability expectedDurability =
Durability.valueOf(ClientProperty.BATCH_WRITER_DURABILITY.getDefaultValue().toUpperCase());
assertEquals(expectedDurability, batchWriterConfig.getDurability());
}
@Test
public void testGetBatchWriterConfigNotUsingDefaults() {
Properties props = new Properties();
// set properties to non-default values
props.setProperty(ClientProperty.BATCH_WRITER_MEMORY_MAX.getKey(), "10M");
props.setProperty(ClientProperty.BATCH_WRITER_LATENCY_MAX.getKey(), "0");
props.setProperty(ClientProperty.BATCH_WRITER_TIMEOUT_MAX.getKey(), "15");
props.setProperty(ClientProperty.BATCH_WRITER_THREADS_MAX.getKey(), "12");
props.setProperty(ClientProperty.BATCH_WRITER_DURABILITY.getKey(), Durability.FLUSH.name());
BatchWriterConfig batchWriterConfig = ClientContext.getBatchWriterConfig(props);
assertNotNull(batchWriterConfig);
long expectedMemory = ConfigurationTypeHelper
.getMemoryAsBytes(ClientProperty.BATCH_WRITER_MEMORY_MAX.getValue(props));
assertEquals(expectedMemory, batchWriterConfig.getMaxMemory());
assertEquals(Long.MAX_VALUE, batchWriterConfig.getMaxLatency(TimeUnit.MILLISECONDS));
// getTimeout returns time in milliseconds, therefore the 15 becomes 15000.
assertEquals(15000, batchWriterConfig.getTimeout(TimeUnit.SECONDS));
long expectedThreads = ClientProperty.BATCH_WRITER_THREADS_MAX.getInteger(props);
assertEquals(expectedThreads, batchWriterConfig.getMaxWriteThreads());
Durability expectedDurability =
Durability.valueOf(ClientProperty.BATCH_WRITER_DURABILITY.getValue(props).toUpperCase());
assertEquals(expectedDurability, batchWriterConfig.getDurability());
}
@Test
public void testGetConditionalWriterConfigUsingDefaults() {
Properties props = new Properties();
ConditionalWriterConfig conditionalWriterConfig =
ClientContext.getConditionalWriterConfig(props);
assertNotNull(conditionalWriterConfig);
// If the value of CONDITIONAL_WRITER_TIMEOUT_MAX is set to zero, Long.MAX_VALUE is returned.
// Effectively, this indicates there is no timeout for CONDITIONAL_WRITER_TIMEOUT_MAX. Due to
// this behavior, the test compares the return values differently. If a value of 0 is used,
// compare the return value using TimeUnit.MILLISECONDS, otherwise the value should be
// converted to seconds in order to match the value set in ClientProperty.
long expectedTimeout = ConfigurationTypeHelper
.getTimeInMillis(ClientProperty.CONDITIONAL_WRITER_TIMEOUT_MAX.getDefaultValue());
if (expectedTimeout == 0) {
assertEquals(Long.MAX_VALUE, conditionalWriterConfig.getTimeout(TimeUnit.MILLISECONDS));
} else {
assertEquals(expectedTimeout, conditionalWriterConfig.getTimeout(TimeUnit.SECONDS));
}
int expectedThreads =
Integer.parseInt(ClientProperty.CONDITIONAL_WRITER_THREADS_MAX.getDefaultValue());
assertEquals(expectedThreads, conditionalWriterConfig.getMaxWriteThreads());
Durability expectedDurability = Durability
.valueOf(ClientProperty.CONDITIONAL_WRITER_DURABILITY.getDefaultValue().toUpperCase());
assertEquals(expectedDurability, conditionalWriterConfig.getDurability());
}
@Test
public void testGetConditionalWriterConfigNotUsingDefaults() {
Properties props = new Properties();
// set properties to non-default values
props.setProperty(ClientProperty.CONDITIONAL_WRITER_TIMEOUT_MAX.getKey(), "17");
props.setProperty(ClientProperty.CONDITIONAL_WRITER_THREADS_MAX.getKey(), "14");
props.setProperty(ClientProperty.CONDITIONAL_WRITER_DURABILITY.getKey(),
Durability.SYNC.name());
ConditionalWriterConfig conditionalWriterConfig =
ClientContext.getConditionalWriterConfig(props);
assertNotNull(conditionalWriterConfig);
// getTimeout returns time in milliseconds, therefore the 17 becomes 17000.
assertEquals(17000, conditionalWriterConfig.getTimeout(TimeUnit.SECONDS));
long expectedThreads = ClientProperty.CONDITIONAL_WRITER_THREADS_MAX.getInteger(props);
assertEquals(expectedThreads, conditionalWriterConfig.getMaxWriteThreads());
Durability expectedDurability = Durability
.valueOf(ClientProperty.CONDITIONAL_WRITER_DURABILITY.getValue(props).toUpperCase());
assertEquals(expectedDurability, conditionalWriterConfig.getDurability());
}
}
|
|
/**
* Created by WONG,Kin Fat on 21st March 2017
* Copyright: For educational purpose
* Developer info: WONG,Kin Fat
*
* Usage of(MainApp.java):This class is for instantiate of other classes.
* You can only modify this class for adding, removing
* Nodes or changing the adjacency_matrix, other classes are implemented for the search algorithm
* Last modified: 25 March, 2017
*/
package codepackage;
import java.util.ArrayList;
public class MainApp {
public static void clearVisitedCount(ArrayList<Node<String>> nodes)
{
for (int i = 0; i < nodes.size(); i++) {
nodes.get(i).setVisitedCount(0);;
}
}
public static void clearVisitedFlags(ArrayList<Node<String>> nodes)
{
for (int i = 0; i < nodes.size(); i++) {
nodes.get(i).setVisited(false);
}
}
@SuppressWarnings("unchecked")
public static void main(String[] args) {
// TODO Auto-generated method stub
//TODO: Generating Graph 1
//Node<T> NODE = new Node<T>( T name);
Node<String> G1_nodeS = new Node<String>("S");
Node<String> G1_nodeA = new Node<String>("A");
Node<String> G1_nodeB = new Node<String>("B");
Node<String> G1_nodeC = new Node<String>("C");
Node<String> G1_nodeG = new Node<String>("G");
ArrayList<Node<String>> G1_nodes=new ArrayList<Node<String>>();
G1_nodes.add(G1_nodeS);
G1_nodes.add(G1_nodeA);
G1_nodes.add(G1_nodeB);
G1_nodes.add(G1_nodeC);
G1_nodes.add(G1_nodeG);
int adjacency_matrix_G1[][] = {
{0,1,1,0,0}, // Node 1: S
{0,0,1,0,1}, // Node 2: A
{0,0,0,1,0}, // Node 3: B
{0,0,0,0,1}, // Node 4: C
{0,0,0,0,0}, // Node 5: G
};
// Graph 1 (BFS, DFS)
System.out.println("------------Results of Graph 1-------------");
System.out.println("\nThe BFS tree search of Graph 1 is ");
BreadthFirstSearch<String> Graph1_BFS = new BreadthFirstSearch<String>();
Graph1_BFS.bfs(adjacency_matrix_G1, G1_nodeS, G1_nodes);
clearVisitedCount(G1_nodes);
System.out.println("\nThe DFS tree search of Graph 1 is ");
DepthFirstSearch<String> Graph1_DFS = new DepthFirstSearch<String>();
Graph1_DFS.dfsUsingStack(adjacency_matrix_G1, G1_nodeS, G1_nodes);
//TODO: Generating Graph 2
//Node<T> NODE = new Node<T>( T name);
Node<String> G2_nodeS = new Node<String>("S");
Node<String> G2_nodeA = new Node<String>("A");
Node<String> G2_nodeB = new Node<String>("B");
Node<String> G2_nodeC = new Node<String>("C");
Node<String> G2_nodeG = new Node<String>("G");
ArrayList<Node<String>> G2_nodes=new ArrayList<Node<String>>();
G2_nodes.add(G2_nodeS);
G2_nodes.add(G2_nodeA);
G2_nodes.add(G2_nodeB);
G2_nodes.add(G2_nodeC);
G2_nodes.add(G2_nodeG);
int adjacency_matrix_G2[][] = {
{0,0,1,1,0}, // Node 1: S
{0,0,0,0,1}, // Node 2: A
{0,1,0,0,0}, // Node 3: B
{0,0,1,0,0}, // Node 4: C
{0,0,0,0,0}, // Node 5: G
};
// Graph 2 (BFS, DFS)
System.out.println("\n\n------------Results of Graph 2-------------");
System.out.println("\nThe BFS tree search of Graph 2 is ");
BreadthFirstSearch<String> Graph2_BFS = new BreadthFirstSearch<String>();
Graph2_BFS.bfs(adjacency_matrix_G2, G2_nodeS, G2_nodes);
clearVisitedCount(G2_nodes);
System.out.println("\n\nThe DFS tree search of Graph 2 is ");
DepthFirstSearch<String> Graph2_DFS = new DepthFirstSearch<String>();
Graph2_DFS.dfsUsingStack(adjacency_matrix_G2, G2_nodeS, G2_nodes);
//TODO: Generating Graph 3
//Node<T> NODE = new Node<T>( T name);
Node<String> G3_nodeS = new Node<String>("S");
Node<String> G3_nodeA = new Node<String>("A");
Node<String> G3_nodeB = new Node<String>("B");
Node<String> G3_nodeC = new Node<String>("C");
Node<String> G3_nodeG = new Node<String>("G");
ArrayList<Node<String>> G3_nodes=new ArrayList<Node<String>>();
G3_nodes.add(G3_nodeS);
G3_nodes.add(G3_nodeA);
G3_nodes.add(G3_nodeB);
G3_nodes.add(G3_nodeC);
G3_nodes.add(G3_nodeG);
int adjacency_matrix_G3[][] = {
{0,0,1,1,0}, // Node 1: S
{0,0,0,0,0}, // Node 2: A
{0,1,0,0,1}, // Node 3: B
{0,1,0,0,1}, // Node 4: C
{0,0,0,0,0}, // Node 5: G
};
// Graph 3 (BFS, DFS)
System.out.println("\n\n------------Results of Graph 3-------------");
System.out.println("\nThe BFS tree search of Graph 3 is ");
BreadthFirstSearch<String> Graph3_BFS = new BreadthFirstSearch<String>();
Graph3_BFS.bfs(adjacency_matrix_G3, G3_nodeS, G3_nodes);
clearVisitedCount(G3_nodes);
System.out.println("\nThe DFS tree search of Graph 3 is ");
DepthFirstSearch<String> Graph3_DFS = new DepthFirstSearch<String>();
Graph3_DFS.dfsUsingStack(adjacency_matrix_G3, G3_nodeS, G3_nodes);
//Generating Graph 4 (Part 1)
//TODO: (BFS, DFS, UCS-when h=0)
//Node<T> NODE = new Node<T>( T name,double h_scores);
Node<String> G4_nodeS = new Node<String>("S",0);
Node<String> G4_nodeA = new Node<String>("A",0);
Node<String> G4_nodeB = new Node<String>("B",0);
Node<String> G4_nodeC = new Node<String>("C",0);
Node<String> G4_nodeD = new Node<String>("D",0);
Node<String> G4_nodeE = new Node<String>("E",0);
Node<String> G4_nodeF = new Node<String>("F",0);
Node<String> G4_nodeG1 = new Node<String>("G1",0);
Node<String> G4_nodeG2 = new Node<String>("G2",0);
//TODO: Generating Graph 4 (Part 2)
//TODO: (A* tree, A* Graph)
Node<String> G4_2_nodeS = new Node<String>("S",7);
Node<String> G4_2_nodeA = new Node<String>("A",8);
Node<String> G4_2_nodeB = new Node<String>("B",6);
Node<String> G4_2_nodeC = new Node<String>("C",3);
Node<String> G4_2_nodeD = new Node<String>("D",1);
Node<String> G4_2_nodeE = new Node<String>("E",3);
Node<String> G4_2_nodeF = new Node<String>("F",2);
Node<String> G4_2_nodeG1 = new Node<String>("G1",0);
Node<String> G4_2_nodeG2 = new Node<String>("G2",0);
G4_2_nodeS.adjacencies = new Edge[]{
new Edge<String>(G4_2_nodeA, 1),
new Edge<String>(G4_2_nodeB, 3)
};
G4_2_nodeA.adjacencies = new Edge[]{
new Edge<String>(G4_2_nodeS, 1),
new Edge<String>(G4_2_nodeD, 2)
// new Edge<String>(G4_2_nodeC, 1)
};
G4_2_nodeB.adjacencies = new Edge[]{
new Edge<String>(G4_2_nodeS, 3),
new Edge<String>(G4_2_nodeD, 8),
new Edge<String>(G4_2_nodeE, 5)
};
G4_2_nodeC.adjacencies = new Edge[]{
new Edge<String>(G4_2_nodeA, 1),
new Edge<String>(G4_2_nodeD, 5),
new Edge<String>(G4_2_nodeG1, 4)
};
G4_2_nodeD.adjacencies = new Edge[]{
new Edge<String>(G4_2_nodeA, 2),
new Edge<String>(G4_2_nodeB, 8),
new Edge<String>(G4_2_nodeC, 5),
new Edge<String>(G4_2_nodeG1, 14),
new Edge<String>(G4_2_nodeG2, 6),
};
G4_2_nodeE.adjacencies = new Edge[]{
new Edge<String>(G4_2_nodeB, 5),
new Edge<String>(G4_2_nodeF, 1),
new Edge<String>(G4_2_nodeG2, 4)
};
G4_2_nodeF.adjacencies = new Edge[]{
new Edge<String>(G4_2_nodeE, 1),
new Edge<String>(G4_2_nodeG2, 2)
};
G4_2_nodeG1.adjacencies = new Edge[]{
new Edge<String>(G4_2_nodeC, 4),
new Edge<String>(G4_2_nodeD, 14),
new Edge<String>(G4_2_nodeG2, 0)
};
G4_2_nodeG2.adjacencies = new Edge[]{
new Edge<String>(G4_2_nodeD, 6),
new Edge<String>(G4_2_nodeE, 4),
new Edge<String>(G4_2_nodeF, 2),
new Edge<String>(G4_2_nodeG1, 0)
};
//For UCS
G4_nodeS.adjacencies = new Edge[]{
new Edge<String>(G4_nodeA, 1),
new Edge<String>(G4_nodeB, 3)
};
G4_nodeA.adjacencies = new Edge[]{
new Edge<String>(G4_nodeS, 1),
new Edge<String>(G4_nodeD, 2)
};
G4_nodeB.adjacencies = new Edge[]{
new Edge<String>(G4_nodeS, 3),
new Edge<String>(G4_nodeD, 8),
new Edge<String>(G4_nodeE, 5)
};
G4_nodeC.adjacencies = new Edge[]{
new Edge<String>(G4_nodeA, 1),
new Edge<String>(G4_nodeD, 5),
new Edge<String>(G4_nodeG1, 4)
};
G4_nodeD.adjacencies = new Edge[]{
new Edge<String>(G4_nodeA, 2),
new Edge<String>(G4_nodeB, 8),
new Edge<String>(G4_nodeC, 5),
new Edge<String>(G4_nodeG1, 14),
new Edge<String>(G4_nodeG2, 6),
};
G4_nodeE.adjacencies = new Edge[]{
new Edge<String>(G4_nodeB, 5),
new Edge<String>(G4_nodeF, 1),
new Edge<String>(G4_nodeG2, 4)
};
G4_nodeF.adjacencies = new Edge[]{
new Edge<String>(G4_nodeE, 1),
new Edge<String>(G4_nodeG2, 2)
};
G4_nodeG1.adjacencies = new Edge[]{
new Edge<String>(G4_nodeC, 4),
new Edge<String>(G4_nodeD, 14),
new Edge<String>(G4_nodeG2, 0)
};
G4_nodeG2.adjacencies = new Edge[]{
new Edge<String>(G4_nodeD, 6),
new Edge<String>(G4_nodeE, 4),
new Edge<String>(G4_nodeF, 2),
new Edge<String>(G4_nodeG1, 0)
};
ArrayList<Node<String>> G4_nodes=new ArrayList<Node<String>>();
G4_nodes.add(G4_nodeS);
G4_nodes.add(G4_nodeA);
G4_nodes.add(G4_nodeB);
G4_nodes.add(G4_nodeC);
G4_nodes.add(G4_nodeD);
G4_nodes.add(G4_nodeE);
G4_nodes.add(G4_nodeF);
G4_nodes.add(G4_nodeG1);
G4_nodes.add(G4_nodeG2);
int adjacency_matrix_G4[][] = {
{0,1,1,0,0,0,0,0,0}, // Node 1: S
{0,0,0,0,1,0,0,0,0}, // Node 2: A
{0,0,0,0,1,1,0,0,0}, // Node 3: B
{0,1,0,0,0,0,0,1,0}, // Node 4: C
{0,0,0,1,0,0,0,1,1}, // Node 5: D
{0,0,0,0,0,0,1,0,1}, // Node 6: E
{0,0,0,0,0,0,0,0,1}, // Node 7: F
{0,0,0,0,0,0,0,0,1}, // Node 8: G1
{0,0,0,0,0,0,0,0,0}, // Node 9: G2
};
// Graph 4 (BFS, DFS, UCS, A* tree, A* Graph)
System.out.println("\n\n------------Results of Graph 4-------------");
System.out.println("\nThe BFS tree search of Graph 4 is ");
BreadthFirstSearch<String> Graph4_BFS = new BreadthFirstSearch<String>();
Graph4_BFS.bfs(adjacency_matrix_G4, G4_nodeS, G4_nodes);
clearVisitedCount(G4_nodes);
System.out.println("\nThe DFS tree search of Graph 4 is ");
DepthFirstSearch<String> Graph4_DFS = new DepthFirstSearch<String>();
Graph4_DFS.dfsUsingStack(adjacency_matrix_G4, G4_nodeS, G4_nodes);
System.out.println("\nThe UCS tree search of Graph 4 is ");
AStarGraphSearch<String> Graph4_UCS = new AStarGraphSearch<String>();
Graph4_UCS.AstarSearch(G4_nodeS, G4_nodeG2);
System.out.println(Graph4_UCS.printPath(G4_nodeG2) + " is shortest path");
System.out.println("The A* tree search of Graph 4 is ");
AStarGraphSearch<String> Graph4_AStarTree = new AStarGraphSearch<String>();
Graph4_AStarTree.AstarSearch(G4_2_nodeS, G4_2_nodeG2);
System.out.println(Graph4_AStarTree.printPath(G4_2_nodeG2) + " is shortest path");
System.out.println("The A* Graph search of Graph 4 is ");
AStarGraphSearch<String> Graph4_AStarGraph = new AStarGraphSearch<String>();
Graph4_AStarGraph.AstarSearch(G4_2_nodeS, G4_2_nodeG2);
System.out.println(Graph4_AStarGraph.printPath(G4_2_nodeG2) + " is shortest path");
//TODO: Generating Graph 5 (Part 1)
//TODO: (BFS, DFS, UCS)
//Node<T> NODE = new Node<T>( T name,double h_scores);
Node<String> G5_nodeS = new Node<String>("S",0);
Node<String> G5_nodeA = new Node<String>("A",0);
Node<String> G5_nodeB = new Node<String>("B",0);
Node<String> G5_nodeC = new Node<String>("C",0);
Node<String> G5_nodeD = new Node<String>("D",0);
Node<String> G5_nodeG = new Node<String>("G",0);
//TODO: Generating Graph 5 (Part 2)
//TODO: (A* tree, A* Graph)
Node<String> G5_2_nodeS = new Node<String>("S",0);
Node<String> G5_2_nodeA = new Node<String>("A",3);
Node<String> G5_2_nodeB = new Node<String>("B",3);
Node<String> G5_2_nodeC = new Node<String>("C",1);
Node<String> G5_2_nodeD = new Node<String>("D",2);
Node<String> G5_2_nodeG = new Node<String>("G",0);
G5_2_nodeS.adjacencies = new Edge[]{
new Edge<String>(G5_2_nodeA, 2),
new Edge<String>(G5_2_nodeB, 1)
};
G5_2_nodeA.adjacencies = new Edge[]{
new Edge<String>(G5_2_nodeB, 1),
new Edge<String>(G5_2_nodeC, 3),
new Edge<String>(G5_2_nodeD, 1),
new Edge<String>(G5_2_nodeS, 2)
};
G5_2_nodeB.adjacencies = new Edge[]{
new Edge<String>(G5_2_nodeD, 5),
new Edge<String>(G5_2_nodeG, 10),
new Edge<String>(G5_2_nodeA, 1),
new Edge<String>(G5_2_nodeS, 1)
};
G5_2_nodeC.adjacencies = new Edge[]{
new Edge<String>(G5_2_nodeG, 7),
new Edge<String>(G5_2_nodeA, 3)
};
G5_2_nodeD.adjacencies = new Edge[]{
new Edge<String>(G5_2_nodeG, 4),
new Edge<String>(G5_2_nodeA, 1),
new Edge<String>(G5_2_nodeB, 5)
};
G5_2_nodeG.adjacencies = new Edge[]{
new Edge<String>(G5_2_nodeB, 10),
new Edge<String>(G5_2_nodeC, 7),
new Edge<String>(G5_2_nodeD, 4)
};
// For UCS
G5_nodeS.adjacencies = new Edge[]{
new Edge<String>(G5_nodeB, 1),
new Edge<String>(G5_nodeA, 2)
};
G5_nodeA.adjacencies = new Edge[]{
new Edge<String>(G5_nodeB, 1),
new Edge<String>(G5_nodeC, 3),
new Edge<String>(G5_nodeD, 1),
new Edge<String>(G5_nodeS, 2)
};
G5_nodeB.adjacencies = new Edge[]{
new Edge<String>(G5_nodeD, 5),
new Edge<String>(G5_nodeG, 10),
new Edge<String>(G5_nodeA, 1),
new Edge<String>(G5_nodeS, 1)
};
G5_nodeC.adjacencies = new Edge[]{
new Edge<String>(G5_nodeG, 7),
new Edge<String>(G5_nodeA, 3)
};
G5_nodeD.adjacencies = new Edge[]{
new Edge<String>(G5_nodeG, 4),
new Edge<String>(G5_nodeA, 1),
new Edge<String>(G5_nodeB, 5)
};
G5_nodeG.adjacencies = new Edge[]{
new Edge<String>(G5_nodeB, 10),
new Edge<String>(G5_nodeC, 7),
new Edge<String>(G5_nodeD, 4)
};
//For BFS and DFS
ArrayList<Node<String>> G5_nodes=new ArrayList<Node<String>>();
G5_nodes.add(G5_nodeS);
G5_nodes.add(G5_nodeA);
G5_nodes.add(G5_nodeB);
G5_nodes.add(G5_nodeC);
G5_nodes.add(G5_nodeD);
G5_nodes.add(G5_nodeG);
//For BFS and DFS
int adjacency_matrix_G5[][] = {
{0,1,1,0,0,0}, // Node 1: S
{0,0,1,1,1,0}, // Node 2: A
{0,0,0,0,1,1}, // Node 3: B
{0,0,0,0,0,1}, // Node 4: C
{0,0,0,0,0,1}, // Node 5: D
{0,0,0,0,0,0}, // Node 9: G
};
// Graph 5 (BFS, DFS, UCS- when h=0, A* tree, A* Graph)
System.out.println("\n\n------------Results of Graph 5-------------");
System.out.println("\nThe BFS tree search of Graph 5 is ");
BreadthFirstSearch<String> Graph5_BFS = new BreadthFirstSearch<String>();
Graph5_BFS.bfs(adjacency_matrix_G5, G5_nodeS, G5_nodes);
clearVisitedCount(G5_nodes);
System.out.println("\nThe DFS tree search of Graph 5 is ");
DepthFirstSearch<String> Graph5_DFS = new DepthFirstSearch<String>();
Graph5_DFS.dfsUsingStack(adjacency_matrix_G5, G5_nodeS, G5_nodes);
clearVisitedCount(G5_nodes);
clearVisitedFlags(G5_nodes);
System.out.println("\nThe UCS tree search of Graph 5 is ");
AStarGraphSearch<String> Graph5_UCS = new AStarGraphSearch<String>();
Graph5_UCS.AstarSearch(G5_nodeS, G5_nodeG);
System.out.println(Graph5_UCS.printPath(G5_nodeG) + " is shortest path");
System.out.println("The A* tree search of Graph 5 is ");
AStarGraphSearch<String> Graph5_AStarTree = new AStarGraphSearch<String>();
Graph5_AStarTree.AstarSearch(G5_2_nodeS, G5_2_nodeG);
System.out.println(Graph5_AStarTree.printPath(G5_2_nodeG) + " is shortest path");
System.out.println("The A* Graph search of Graph 5 is ");
AStarGraphSearch<String> Graph5_AStarGraph = new AStarGraphSearch<String>();
Graph5_AStarGraph.AstarSearch(G5_2_nodeS, G5_2_nodeG);
System.out.println(Graph5_AStarGraph.printPath(G5_2_nodeG) + " is shortest path");
}
}
|
|
package com.singhpk.cricketscore;
import android.app.Activity;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.net.http.SslError;
import android.os.Bundle;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.webkit.SslErrorHandler;
import android.webkit.WebSettings;
import android.webkit.WebSettings.PluginState;
import android.webkit.WebSettings.RenderPriority;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.Toast;
public class ScoreBoardActivity extends Activity {
private WebView wView ;
private boolean appLaunch = true;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_score_board);
wView = (WebView)findViewById(R.id.webView1);
wView.loadUrl(AppConstant.STARTUP_LINK);
wView.setBackgroundColor(getResources().getColor(android.R.color.background_light));
wView.getSettings().setJavaScriptEnabled(true);
wView.getSettings().setBuiltInZoomControls(true);
wView.getSettings().setAllowFileAccess(true);
wView.requestFocus(View.FOCUS_DOWN);
wView.getSettings().setDatabaseEnabled(true);
wView.getSettings().setDomStorageEnabled(true);
wView.getSettings().setRenderPriority(RenderPriority.HIGH);
wView.getSettings().setCacheMode(WebSettings.LOAD_NO_CACHE);
wView.setWebViewClient(new SSLTolerentWebViewClient());
}
private class SSLTolerentWebViewClient extends WebViewClient {
@Override
public void onReceivedSslError(WebView view, SslErrorHandler handler, SslError error) {
handler.proceed(); // Ignore SSL certificate errors
}
@Override
public void onPageFinished(WebView view, String url) {
Progresss.stop();
}
public void onPageStarted(WebView view, String url, android.graphics.Bitmap favicon) {
if(appLaunch){Progresss.start(ScoreBoardActivity.this);appLaunch = false;}
if(!isNetworkAvailable(ScoreBoardActivity.this))
{
Toast.makeText(getApplicationContext(), "Oops!Check your network connection", Toast.LENGTH_LONG);
}
};
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
menu.add("LiveScores");//0
menu.add("Results");//1
menu.add("Point Table");//2
menu.add("Rankings");//3
menu.add("Records");//4
menu.add("Photos");//5
menu.add("Videos");//6
menu.add("Players");//7
menu.add("Countries");//8
menu.add("News");//9
//menu.add("LiveScores");//10
getMenuInflater().inflate(R.menu.score_board, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
Progresss.start(ScoreBoardActivity.this);
if(item.getTitle().equals("LiveScores"))
{
wView.loadUrl(AppConstant.STARTUP_LINK);
}else if(item.getTitle().equals("Results"))
{
wView.loadUrl(AppConstant.RESULTS_LINK);
}else if(item.getTitle().equals("Point Table"))
{
wView.loadUrl(AppConstant.POINT_TABLE_LINK);
}else if(item.getTitle().equals("Rankings"))
{
wView.loadUrl(AppConstant.RANKINGS_LINK);
}else if(item.getTitle().equals("Records"))
{
wView.loadUrl(AppConstant.RECORDS_LINK);
}else if(item.getTitle().equals("Photos"))
{
wView.loadUrl(AppConstant.PHOTOS_LINK);
}else if(item.getTitle().equals("Videos"))
{
wView.loadUrl(AppConstant.VIDEOS_LINK);
}else if(item.getTitle().equals("Players"))
{
wView.loadUrl(AppConstant.PLAYERS_LINK);
}else if(item.getTitle().equals("Countries"))
{
wView.loadUrl(AppConstant.COUNTRIES_LINK);
}else if(item.getTitle().equals("News"))
{
wView.loadUrl(AppConstant.NEWS_LINK);
}
else
{
Progresss.stop();
return super.onOptionsItemSelected(item);
}
return true;
/*
switch(item.getItemId())
{
case 0:
wView.loadUrl(AppConstant.STARTUP_LINK);
return true;
case 1:
wView.loadUrl(AppConstant.RESULTS_LINK);
return true;
case 2:
wView.loadUrl(AppConstant.POINT_TABLE_LINK);
return true;
case 3:
wView.loadUrl(AppConstant.RANKINGS_LINK);
return true;
case 4:
wView.loadUrl(AppConstant.RECORDS_LINK);
return true;
case 5:
wView.loadUrl(AppConstant.PHOTOS_LINK);
return true;
case 6:
wView.loadUrl(AppConstant.VIDEOS_LINK);
return true;
case 7:
wView.loadUrl(AppConstant.PLAYERS_LINK);
return true;
case 8:
wView.loadUrl(AppConstant.COUNTRIES_LINK);
return true;
case 9:
wView.loadUrl(AppConstant.NEWS_LINK);
return true;
default:
return super.onOptionsItemSelected(item);
}
*/
}
//checkNetwork Connection
public static boolean isNetworkAvailable(Context context) {
ConnectivityManager connectivityManager = (ConnectivityManager) context
.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetworkInfo = connectivityManager
.getActiveNetworkInfo();
return activeNetworkInfo != null && activeNetworkInfo.isConnected();
}
@Override
public void onBackPressed() {
if(wView.canGoBack()){
wView.goBack();
}else{
finish();
}
}
}
|
|
package com.fasterxml.jackson.jr.ob.impl;
import java.io.IOException;
import java.util.Map;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.jr.ob.JSON;
import com.fasterxml.jackson.jr.ob.JSONObjectException;
import com.fasterxml.jackson.jr.ob.TestBase;
import com.fasterxml.jackson.jr.ob.api.ReaderWriterProvider;
import com.fasterxml.jackson.jr.ob.api.ValueReader;
public class CustomValueReadersTest extends TestBase
{
static class CustomValue {
public int value;
// 2nd arg just to avoid discovery
public CustomValue(int v, boolean b) {
// and to ensure it goes through constructor, add 1
value = v + 1;
}
}
static class CustomValueBean {
public CustomValue custom;
protected CustomValueBean() { }
public CustomValueBean(int v) {
custom = new CustomValue(v, false);
}
}
enum ABC {
A, B, C, DEF;
}
static class CustomValueReader extends ValueReader {
private final int delta;
public CustomValueReader(int d) {
super(CustomValue.class);
delta = d;
}
@Override
public Object read(JSONReader reader, JsonParser p) throws IOException {
return new CustomValue(p.getIntValue() + delta, true);
}
// Base class impl should be fine, although we'd use this for optimal
/*
@Override
public Object readNext(JSONReader reader, JsonParser p) throws IOException {
return new CustomValue(p.nextIntValue(-1), true);
}
*/
}
static class ABCValueReader extends ValueReader {
public ABCValueReader() {
super(ABC.class);
}
@Override
public Object read(JSONReader reader, JsonParser p) throws IOException {
final String str = p.getText();
if ("n/a".equals(str)) {
return ABC.DEF;
}
return ABC.valueOf(str);
}
}
static class CapStringReader extends ValueReader {
public CapStringReader() {
super(String.class);
}
@Override
public Object read(JSONReader reader, JsonParser p) throws IOException {
return p.getText().toUpperCase();
}
}
static class OverrideStringReader extends ValueReader {
final String _value;
public OverrideStringReader(String str) {
super(String.class);
_value = str;
}
@Override
public Object read(JSONReader reader, JsonParser p) throws IOException {
p.skipChildren();
return _value;
}
}
static class CustomReaders extends ReaderWriterProvider {
final int delta;
public CustomReaders(int d) {
delta = d;
}
@Override
public ValueReader findValueReader(JSONReader readContext, Class<?> type) {
if (type.equals(CustomValue.class)) {
return new CustomValueReader(delta);
} else if (type.equals(ABC.class)) {
return new ABCValueReader();
}
return null;
}
}
static class CapStringReaderProvider extends ReaderWriterProvider {
@Override
public ValueReader findValueReader(JSONReader readContext, Class<?> type) {
if (type.equals(String.class)) {
return new CapStringReader();
}
return null;
}
}
static class OverrideStringReaderProvider extends ReaderWriterProvider {
final ValueReader vr;
public OverrideStringReaderProvider(String str) {
vr = new OverrideStringReader(str);
}
@Override
public ValueReader findValueReader(JSONReader readContext, Class<?> type) {
if (type.equals(String.class)) {
return vr;
}
return null;
}
}
static class Point {
public int _x, _y;
public Point(int x, int y) {
_x = x;
_y = y;
}
}
static class PointReader extends ValueReader {
public PointReader() { super(Point.class); }
@Override
public Object read(JSONReader reader, JsonParser p) throws IOException {
Map<String, Object> map = reader.readMap();
return new Point((Integer) map.get("x"), (Integer) map.get("y"));
}
}
static class PointReaderProvider extends ReaderWriterProvider {
@Override
public ValueReader findValueReader(JSONReader readContext, Class<?> type) {
if (type == Point.class) {
return new PointReader();
}
return null;
}
}
static class NoOpProvider extends ReaderWriterProvider {
}
/*
/**********************************************************************
/* Test methdods
/**********************************************************************
*/
public void testCustomBeanReader() throws Exception
{
// First: without handler, will fail to map
try {
JSON.std.beanFrom(CustomValue.class, "123");
fail("Should not pass");
} catch (JSONObjectException e) {
verifyException(e, ".CustomValue");
verifyException(e, "constructor to use");
}
// then with custom, should be fine
JSON json = jsonWithProvider(new CustomReaders(0));
CustomValue v = json.beanFrom(CustomValue.class, "123");
assertEquals(124, v.value);
// similarly with wrapper
CustomValueBean bean = json.beanFrom(CustomValueBean.class,
aposToQuotes("{ 'custom' : 137 }"));
assertEquals(138, bean.custom.value);
// but also ensure we can change registered handler(s)
JSON json2 = jsonWithProvider(new CustomReaders(100));
v = json2.beanFrom(CustomValue.class, "123");
assertEquals(224, v.value);
}
public void testChainedCustomBeanReaders() throws Exception
{
JSON json = jsonWithProviders(new CustomReaders(0),
new CustomReaders(100));
CustomValue v = json.beanFrom(CustomValue.class, "69");
assertEquals(70, v.value);
json = jsonWithProviders(new CustomReaders(100),
new CustomReaders(0));
v = json.beanFrom(CustomValue.class, "72");
assertEquals(173, v.value);
}
public void testCustomEnumReader() throws Exception
{
// First: without handler, will fail to map
try {
JSON.std.beanFrom(ABC.class, quote("n/a"));
fail("Should not pass");
} catch (JSONObjectException e) {
verifyException(e, "Failed to find Enum of type");
}
// then with custom, should be fine
JSON json = jsonWithProvider(new CustomReaders(0));
ABC v = json.beanFrom(ABC.class, quote("n/a"));
assertEquals(ABC.DEF, v);
// but if we remove, again error
JSON json2 = jsonWithProvider((ReaderWriterProvider) null);
try {
json2.beanFrom(ABC.class, quote("n/a"));
fail("Should not pass");
} catch (JSONObjectException e) {
verifyException(e, "Failed to find Enum of type");
}
}
// Even more fun, override default String deserializer!
public void testCustomStringReader() throws Exception
{
String allCaps = jsonWithProvider(new CapStringReaderProvider())
.beanFrom(String.class, quote("Some text"));
assertEquals("SOME TEXT", allCaps);
}
public void testChainedStringReaders() throws Exception {
String result = jsonWithProviders(new CapStringReaderProvider(),
new OverrideStringReaderProvider("foo"))
.beanFrom(String.class, quote("Some text"));
assertEquals("SOME TEXT", result);
result = jsonWithProviders(new NoOpProvider(), new OverrideStringReaderProvider("foo"))
.beanFrom(String.class, quote("Some text"));
assertEquals("foo", result);
// and ok not to have anything, too
result = jsonWithProviders(new NoOpProvider(), new NoOpProvider())
.beanFrom(String.class, quote("Some text"));
assertEquals("Some text", result);
// Plus nulls fine too
result = jsonWithProviders(null, new OverrideStringReaderProvider("foo"))
.beanFrom(String.class, quote("Some text"));
assertEquals("foo", result);
result = jsonWithProviders(new OverrideStringReaderProvider("foo"), null)
.beanFrom(String.class, quote("Some text"));
assertEquals("foo", result);
}
// But also can use methods from "JSONReader" for convenience
public void testCustomDelegatingReader() throws Exception
{
// First: without handler, will fail to map
final String doc = "{\"y\" : 3, \"x\": 2 }";
try {
JSON.std.beanFrom(Point.class, doc);
fail("Should not pass");
} catch (JSONObjectException e) {
verifyException(e, "$Point");
verifyException(e, "constructor to use");
}
// then with custom, should be fine
JSON json = jsonWithProvider(new PointReaderProvider());
Point v = json.beanFrom(Point.class, doc);
assertEquals(2, v._x);
assertEquals(3, v._y);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.wan.misc;
import static org.junit.Assert.fail;
import java.util.Set;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.wan.GatewaySender;
import org.apache.geode.internal.cache.BucketRegion;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.RegionQueue;
import org.apache.geode.internal.cache.wan.AbstractGatewaySender;
import org.apache.geode.internal.cache.wan.WANTestBase;
import org.apache.geode.internal.cache.wan.parallel.ConcurrentParallelGatewaySenderQueue;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.AsyncInvocation;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.Wait;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.junit.categories.WanTest;
@Category({WanTest.class})
public class CommonParallelGatewaySenderDUnitTest extends WANTestBase {
@Test
public void testSameSenderWithNonColocatedRegions() throws Exception {
IgnoredException.addIgnoredException("cannot have the same parallel");
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
vm4.invoke(() -> WANTestBase.createCache(lnPort));
vm4.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", "ln", 1, 100,
isOffHeap()));
try {
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", "ln", 1,
100, isOffHeap()));
fail("Expected IllegalStateException : cannot have the same parallel gateway sender");
} catch (Exception e) {
if (!(e.getCause() instanceof IllegalStateException) || !(e.getCause().getMessage()
.contains("cannot have the same parallel gateway sender id"))) {
Assert.fail("Expected IllegalStateException", e);
}
}
}
/**
* Simple scenario. Two regions attach the same PGS
*
* @throws Exception Below test is disabled intentionally 1> In this release 8.0, for rolling
* upgrade support queue name is changed to old style 2>Common parallel sender for
* different non colocated regions is not supported in 8.0 so no need to bother about
* ParallelGatewaySenderQueue#convertPathToName 3> We have to enabled it in next release
* 4> Version based rolling upgrade support should be provided. based on the version of
* the gemfire QSTRING should be used between 8.0 and version prior to 8.0
*/
@Test
@Ignore("TODO")
public void testParallelPropagation() throws Exception {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true));
vm5.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true));
vm6.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true));
vm7.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, false, null, true));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", "ln", 1, 100,
isOffHeap()));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", "ln", 1, 100,
isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", null, 1, 100,
isOffHeap()));
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", null, 1, 100,
isOffHeap()));
// before doing any puts, let the senders be running in order to ensure that
// not a single event will be lost
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR1", 1000));
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR2", 1000));
// verify all buckets drained on all sender nodes.
vm4.invoke(() -> CommonParallelGatewaySenderDUnitTest
.validateParallelSenderQueueAllBucketsDrained("ln"));
vm5.invoke(() -> CommonParallelGatewaySenderDUnitTest
.validateParallelSenderQueueAllBucketsDrained("ln"));
vm6.invoke(() -> CommonParallelGatewaySenderDUnitTest
.validateParallelSenderQueueAllBucketsDrained("ln"));
vm7.invoke(() -> CommonParallelGatewaySenderDUnitTest
.validateParallelSenderQueueAllBucketsDrained("ln"));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR1", 1000));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR2", 1000));
}
/**
* The PGS is persistence enabled but not the Regions Below test is disabled intentionally 1> In
* this release 8.0, for rolling upgrade support queue name is changed to old style 2>Common
* parallel sender for different non colocated regions is not supported in 8.0 so no need to
* bother about ParallelGatewaySenderQueue#convertPathToName 3> We have to enabled it in next
* release 4> Version based rolling upgrade support should be provided. based on the version of
* the gemfire QSTRING should be used between 8.0 and version prior to 8.0
*/
@Test
@Ignore("TODO")
public void testParallelPropagationPersistenceEnabled() throws Exception {
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
vm4.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, true, null, true));
vm5.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, true, null, true));
vm6.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, true, null, true));
vm7.invoke(() -> WANTestBase.createSender("ln", 2, true, 100, 10, false, true, null, true));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", "ln", 1, 100,
isOffHeap()));
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", "ln", 1, 100,
isOffHeap()));
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR1", null, 1, 100,
isOffHeap()));
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "_PR2", null, 1, 100,
isOffHeap()));
// before doing any puts, let the senders be running in order to ensure that
// not a single event will be lost
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR1", 1000));
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_PR2", 1000));
// verify all buckets drained on all sender nodes.
vm4.invoke(() -> CommonParallelGatewaySenderDUnitTest
.validateParallelSenderQueueAllBucketsDrained("ln"));
vm5.invoke(() -> CommonParallelGatewaySenderDUnitTest
.validateParallelSenderQueueAllBucketsDrained("ln"));
vm6.invoke(() -> CommonParallelGatewaySenderDUnitTest
.validateParallelSenderQueueAllBucketsDrained("ln"));
vm7.invoke(() -> CommonParallelGatewaySenderDUnitTest
.validateParallelSenderQueueAllBucketsDrained("ln"));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR1", 1000));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "_PR2", 1000));
}
/**
* Enable persistence for GatewaySender. Pause the sender and do some puts in local region. Close
* the local site and rebuild the region and sender from disk store. Dispatcher should not start
* dispatching events recovered from persistent sender. Check if the remote site receives all the
* events. Below test is disabled intentionally 1> In this release 8.0, for rolling upgrade
* support queue name is changed to old style 2>Common parallel sender for different non colocated
* regions is not supported in 8.0 so no need to bother about
* ParallelGatewaySenderQueue#convertPathToName 3> We have to enabled it in next release 4>
* Version based rolling upgrade support should be provided. based on the version of the gemfire
* QSTRING should be used between 8.0 and version prior to 8.0
*/
@Test
@Ignore("TODO")
public void testPRWithGatewaySenderPersistenceEnabled_Restart() {
// create locator on local site
Integer lnPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1));
// create locator on remote site
Integer nyPort = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnPort));
// create receiver on remote site
createCacheInVMs(nyPort, vm2, vm3);
createReceiverInVMs(vm2, vm3);
// create cache in local site
createCacheInVMs(lnPort, vm4, vm5, vm6, vm7);
// create senders with disk store
String diskStore1 = (String) vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore2 = (String) vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore3 = (String) vm6.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
String diskStore4 = (String) vm7.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2,
true, 100, 10, false, true, null, null, true));
LogWriterUtils.getLogWriter()
.info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
// create PR on remote site
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR1", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR1", null, 1, 100,
isOffHeap()));
// create PR on remote site
vm2.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2", null, 1, 100,
isOffHeap()));
vm3.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2", null, 1, 100,
isOffHeap()));
// create PR on local site
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR1", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR1", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR1", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR1", "ln", 1, 100,
isOffHeap()));
// create PR on local site
vm4.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2", "ln", 1, 100,
isOffHeap()));
vm5.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2", "ln", 1, 100,
isOffHeap()));
vm6.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2", "ln", 1, 100,
isOffHeap()));
vm7.invoke(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2", "ln", 1, 100,
isOffHeap()));
// start the senders on local site
startSenderInVMs("ln", vm4, vm5, vm6, vm7);
// wait for senders to become running
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
// pause the senders
vm4.invoke(() -> WANTestBase.pauseSender("ln"));
vm5.invoke(() -> WANTestBase.pauseSender("ln"));
vm6.invoke(() -> WANTestBase.pauseSender("ln"));
vm7.invoke(() -> WANTestBase.pauseSender("ln"));
// start puts in region on local site
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "PR1", 3000));
vm4.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "PR2", 5000));
LogWriterUtils.getLogWriter().info("Completed puts in the region");
// --------------------close and rebuild local site
// -------------------------------------------------
// kill the senders
vm4.invoke(() -> WANTestBase.killSender());
vm5.invoke(() -> WANTestBase.killSender());
vm6.invoke(() -> WANTestBase.killSender());
vm7.invoke(() -> WANTestBase.killSender());
LogWriterUtils.getLogWriter().info("Killed all the senders.");
// restart the vm
vm4.invoke(() -> WANTestBase.createCache(lnPort));
vm5.invoke(() -> WANTestBase.createCache(lnPort));
vm6.invoke(() -> WANTestBase.createCache(lnPort));
vm7.invoke(() -> WANTestBase.createCache(lnPort));
LogWriterUtils.getLogWriter().info("Created back the cache");
// create senders with disk store
vm4.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore1, true));
vm5.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore2, true));
vm6.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore3, true));
vm7.invoke(() -> WANTestBase.createSenderWithDiskStore("ln", 2, true, 100, 10, false, true,
null, diskStore4, true));
LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
// create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(() -> WANTestBase
.createPartitionedRegion(getTestMethodName() + "PR1", "ln", 1, 100, isOffHeap()));
AsyncInvocation inv2 = vm5.invokeAsync(() -> WANTestBase
.createPartitionedRegion(getTestMethodName() + "PR1", "ln", 1, 100, isOffHeap()));
AsyncInvocation inv3 = vm6.invokeAsync(() -> WANTestBase
.createPartitionedRegion(getTestMethodName() + "PR1", "ln", 1, 100, isOffHeap()));
AsyncInvocation inv4 = vm7.invokeAsync(() -> WANTestBase
.createPartitionedRegion(getTestMethodName() + "PR1", "ln", 1, 100, isOffHeap()));
try {
inv1.join();
inv2.join();
inv3.join();
inv4.join();
} catch (InterruptedException e) {
e.printStackTrace();
fail();
}
inv1 = vm4.invokeAsync(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2",
"ln", 1, 100, isOffHeap()));
inv2 = vm5.invokeAsync(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2",
"ln", 1, 100, isOffHeap()));
inv3 = vm6.invokeAsync(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2",
"ln", 1, 100, isOffHeap()));
inv4 = vm7.invokeAsync(() -> WANTestBase.createPartitionedRegion(getTestMethodName() + "PR2",
"ln", 1, 100, isOffHeap()));
try {
inv1.join();
inv2.join();
inv3.join();
inv4.join();
} catch (InterruptedException e) {
e.printStackTrace();
fail();
}
LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
// start the senders in async mode. This will ensure that the
// node of shadow PR that went down last will come up first
startSenderInVMsAsync("ln", vm4, vm5, vm6, vm7);
LogWriterUtils.getLogWriter().info("Waiting for senders running.");
// wait for senders running
vm4.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm5.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm6.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
vm7.invoke(() -> WANTestBase.waitForSenderRunningState("ln"));
LogWriterUtils.getLogWriter().info("All the senders are now running...");
// ----------------------------------------------------------------------------------------------------
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "PR1", 3000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "PR1", 3000));
vm2.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "PR2", 5000));
vm3.invoke(() -> WANTestBase.validateRegionSize(getTestMethodName() + "PR2", 5000));
}
public static void validateParallelSenderQueueAllBucketsDrained(final String senderId) {
Set<GatewaySender> senders = cache.getGatewaySenders();
GatewaySender sender = null;
for (GatewaySender s : senders) {
if (s.getId().equals(senderId)) {
sender = s;
break;
}
}
ConcurrentParallelGatewaySenderQueue regionQueue =
(ConcurrentParallelGatewaySenderQueue) ((AbstractGatewaySender) sender).getQueues()
.toArray(new RegionQueue[1])[0];
Set<PartitionedRegion> shadowPRs = (Set<PartitionedRegion>) regionQueue.getRegions();
for (PartitionedRegion shadowPR : shadowPRs) {
Set<BucketRegion> buckets = shadowPR.getDataStore().getAllLocalBucketRegions();
for (final BucketRegion bucket : buckets) {
WaitCriterion wc = new WaitCriterion() {
public boolean done() {
if (bucket.keySet().size() == 0) {
LogWriterUtils.getLogWriter().info("Bucket " + bucket.getId() + " is empty");
return true;
}
return false;
}
public String description() {
return "Expected bucket entries for bucket: " + bucket.getId()
+ " is: 0 but actual entries: " + bucket.keySet().size()
+ " This bucket isPrimary: " + bucket.getBucketAdvisor().isPrimary() + " KEYSET: "
+ bucket.keySet();
}
};
Wait.waitForCriterion(wc, 180000, 50, true);
} // for loop ends
}
}
}
|
|
/*
* 11/19/04 1.0 moved to LGPL.
*-----------------------------------------------------------------------
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published
* by the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*----------------------------------------------------------------------
*/
package javazoom.jl.player.advanced;
import java.io.InputStream;
import javazoom.jl.decoder.Bitstream;
import javazoom.jl.decoder.BitstreamException;
import javazoom.jl.decoder.Decoder;
import javazoom.jl.decoder.Header;
import javazoom.jl.decoder.JavaLayerException;
import javazoom.jl.decoder.SampleBuffer;
import javazoom.jl.player.AudioDevice;
import javazoom.jl.player.FactoryRegistry;
/**
* a hybrid of javazoom.jl.player.Player tweeked to include <code>play(startFrame, endFrame)</code>
* hopefully this will be included in the api
*/
public class AdvancedPlayer
{
/** The MPEG audio bitstream.*/
private Bitstream bitstream;
/** The MPEG audio decoder. */
private Decoder decoder;
/** The AudioDevice the audio samples are written to. */
private AudioDevice audio;
/** Has the player been closed? */
private boolean closed = false;
/** Has the player played back all frames from the stream? */
@SuppressWarnings("unused")
private boolean complete = false;
@SuppressWarnings("unused")
private int lastPosition = 0;
/** Listener for the playback process */
private PlaybackListener listener;
/**
* Creates a new <code>Player</code> instance.
*/
public AdvancedPlayer(InputStream stream) throws JavaLayerException
{
this(stream, null);
}
public AdvancedPlayer(InputStream stream, AudioDevice device) throws JavaLayerException
{
bitstream = new Bitstream(stream);
if (device!=null) audio = device;
else audio = FactoryRegistry.systemRegistry().createAudioDevice();
audio.open(decoder = new Decoder());
}
public void play() throws JavaLayerException
{
play(Integer.MAX_VALUE);
}
/**
* Plays a number of MPEG audio frames.
*
* @param frames The number of frames to play.
* @return true if the last frame was played, or false if there are
* more frames.
*/
public boolean play(int frames) throws JavaLayerException
{
boolean ret = true;
// report to listener
if(listener != null) listener.playbackStarted(createEvent(PlaybackEvent.STARTED));
while (frames-- > 0 && ret)
{
ret = decodeFrame();
}
// if (!ret)
{
// last frame, ensure all data flushed to the audio device.
AudioDevice out = audio;
if (out != null)
{
// System.out.println(audio.getPosition());
out.flush();
// System.out.println(audio.getPosition());
synchronized (this)
{
complete = (!closed);
close();
}
// report to listener
if(listener != null) listener.playbackFinished(createEvent(out, PlaybackEvent.STOPPED));
}
}
return ret;
}
/**
* Cloases this player. Any audio currently playing is stopped
* immediately.
*/
public synchronized void close()
{
AudioDevice out = audio;
if (out != null)
{
closed = true;
audio = null;
// this may fail, so ensure object state is set up before
// calling this method.
out.close();
lastPosition = out.getPosition();
try
{
bitstream.close();
}
catch (BitstreamException ex)
{}
}
}
/**
* Decodes a single frame.
*
* @return true if there are no more frames to decode, false otherwise.
*/
protected boolean decodeFrame() throws JavaLayerException
{
try
{
AudioDevice out = audio;
if (out == null) return false;
Header h = bitstream.readFrame();
if (h == null) return false;
// sample buffer set when decoder constructed
SampleBuffer output = (SampleBuffer) decoder.decodeFrame(h, bitstream);
synchronized (this)
{
out = audio;
if(out != null)
{
out.write(output.getBuffer(), 0, output.getBufferLength());
}
}
bitstream.closeFrame();
}
catch (RuntimeException ex)
{
throw new JavaLayerException("Exception decoding audio frame", ex);
}
return true;
}
/**
* skips over a single frame
* @return false if there are no more frames to decode, true otherwise.
*/
protected boolean skipFrame() throws JavaLayerException
{
Header h = bitstream.readFrame();
if (h == null) return false;
bitstream.closeFrame();
return true;
}
/**
* Plays a range of MPEG audio frames
* @param start The first frame to play
* @param end The last frame to play
* @return true if the last frame was played, or false if there are more frames.
*/
public boolean play(final int start, final int end) throws JavaLayerException
{
boolean ret = true;
int offset = start;
while (offset-- > 0 && ret) ret = skipFrame();
return play(end - start);
}
/**
* Constructs a <code>PlaybackEvent</code>
*/
private PlaybackEvent createEvent(int id)
{
return createEvent(audio, id);
}
/**
* Constructs a <code>PlaybackEvent</code>
*/
private PlaybackEvent createEvent(AudioDevice dev, int id)
{
return new PlaybackEvent(this, id, dev.getPosition());
}
/**
* sets the <code>PlaybackListener</code>
*/
public void setPlayBackListener(PlaybackListener listener)
{
this.listener = listener;
}
/**
* gets the <code>PlaybackListener</code>
*/
public PlaybackListener getPlayBackListener()
{
return listener;
}
/**
* closes the player and notifies <code>PlaybackListener</code>
*/
public void stop()
{
listener.playbackFinished(createEvent(PlaybackEvent.STOPPED));
close();
}
}
|
|
/**
* Copyright 2012 MARSEC-XL International Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package org.marssa.footprint.datatypes;
import org.marssa.footprint.datatypes.composite.APosition;
import org.marssa.footprint.datatypes.composite.Attitude;
import org.marssa.footprint.datatypes.composite.Coordinate;
import org.marssa.footprint.datatypes.composite.Latitude;
import org.marssa.footprint.datatypes.composite.Longitude;
import org.marssa.footprint.datatypes.composite.Pitch;
import org.marssa.footprint.datatypes.composite.Roll;
import org.marssa.footprint.datatypes.composite.Yaw;
import org.marssa.footprint.datatypes.decimal.DegreesDecimal;
import org.marssa.footprint.datatypes.decimal.MDecimal;
import org.marssa.footprint.datatypes.decimal.UnsignedDecimal;
import org.marssa.footprint.datatypes.decimal.distance.ADistance;
import org.marssa.footprint.datatypes.decimal.distance.Fathoms;
import org.marssa.footprint.datatypes.decimal.distance.KM;
import org.marssa.footprint.datatypes.decimal.distance.Metres;
import org.marssa.footprint.datatypes.decimal.distance.Miles;
import org.marssa.footprint.datatypes.decimal.distance.NM;
import org.marssa.footprint.datatypes.decimal.electrical.charge.ACharge;
import org.marssa.footprint.datatypes.decimal.electrical.charge.Ah;
import org.marssa.footprint.datatypes.decimal.electrical.charge.Coulombs;
import org.marssa.footprint.datatypes.decimal.electrical.charge.mAh;
import org.marssa.footprint.datatypes.decimal.electrical.current.ACurrent;
import org.marssa.footprint.datatypes.decimal.electrical.current.Amps;
import org.marssa.footprint.datatypes.decimal.electrical.current.MilliAmps;
import org.marssa.footprint.datatypes.decimal.electrical.energy.AEnergy;
import org.marssa.footprint.datatypes.decimal.electrical.energy.Joules;
import org.marssa.footprint.datatypes.decimal.electrical.energy.KJoules;
import org.marssa.footprint.datatypes.decimal.electrical.energy.MJoules;
import org.marssa.footprint.datatypes.decimal.electrical.impedance.AImpedance;
import org.marssa.footprint.datatypes.decimal.electrical.impedance.KOhms;
import org.marssa.footprint.datatypes.decimal.electrical.impedance.MOhms;
import org.marssa.footprint.datatypes.decimal.electrical.impedance.Ohms;
import org.marssa.footprint.datatypes.decimal.electrical.power.APower;
import org.marssa.footprint.datatypes.decimal.electrical.power.KWatts;
import org.marssa.footprint.datatypes.decimal.electrical.power.MWatts;
import org.marssa.footprint.datatypes.decimal.electrical.voltage.AVoltage;
import org.marssa.footprint.datatypes.decimal.electrical.voltage.MilliVolts;
import org.marssa.footprint.datatypes.decimal.electrical.voltage.Volts;
import org.marssa.footprint.datatypes.decimal.flow.AVolumeFlow;
import org.marssa.footprint.datatypes.decimal.flow.CFPH;
import org.marssa.footprint.datatypes.decimal.flow.CFPM;
import org.marssa.footprint.datatypes.decimal.flow.CFPS;
import org.marssa.footprint.datatypes.decimal.flow.LPH;
import org.marssa.footprint.datatypes.decimal.flow.LPM;
import org.marssa.footprint.datatypes.decimal.flow.LPS;
import org.marssa.footprint.datatypes.decimal.flow.MCPH;
import org.marssa.footprint.datatypes.decimal.flow.MCPM;
import org.marssa.footprint.datatypes.decimal.flow.MCPS;
import org.marssa.footprint.datatypes.decimal.frequency.AFrequency;
import org.marssa.footprint.datatypes.decimal.frequency.Hz;
import org.marssa.footprint.datatypes.decimal.frequency.KHz;
import org.marssa.footprint.datatypes.decimal.pressure.APressure;
import org.marssa.footprint.datatypes.decimal.pressure.Bar;
import org.marssa.footprint.datatypes.decimal.pressure.KPa;
import org.marssa.footprint.datatypes.decimal.pressure.MBars;
import org.marssa.footprint.datatypes.decimal.pressure.MMHg;
import org.marssa.footprint.datatypes.decimal.pressure.PSI;
import org.marssa.footprint.datatypes.decimal.pressure.Pascals;
import org.marssa.footprint.datatypes.decimal.speed.ASpeed;
import org.marssa.footprint.datatypes.decimal.speed.KPH;
import org.marssa.footprint.datatypes.decimal.speed.Knots;
import org.marssa.footprint.datatypes.decimal.speed.MPH;
import org.marssa.footprint.datatypes.decimal.speed.MPS;
import org.marssa.footprint.datatypes.decimal.temperature.ATemperature;
import org.marssa.footprint.datatypes.decimal.temperature.DegreesCelcius;
import org.marssa.footprint.datatypes.decimal.temperature.Fahrenheit;
import org.marssa.footprint.datatypes.decimal.temperature.Kelvin;
import org.marssa.footprint.datatypes.decimal.volume.AVolume;
import org.marssa.footprint.datatypes.decimal.volume.ImpGallons;
import org.marssa.footprint.datatypes.decimal.volume.Litres;
import org.marssa.footprint.datatypes.decimal.volume.USGallonsDry;
import org.marssa.footprint.datatypes.decimal.volume.USGallonsLiquid;
import org.marssa.footprint.datatypes.integer.DegreesInteger;
import org.marssa.footprint.datatypes.integer.MInteger;
import org.marssa.footprint.datatypes.integer.PercentageInteger;
import org.marssa.footprint.datatypes.time.ATime;
import org.marssa.footprint.datatypes.time.Hours;
import org.marssa.footprint.datatypes.time.Milliseconds;
import org.marssa.footprint.datatypes.time.Minutes;
import org.marssa.footprint.datatypes.time.Seconds;
import org.marssa.footprint.exceptions.OutOfRange;
/**
* @author Clayton Tabone
*
*/
public class TypeFactory {
public static Hours getHoursInstance() {
return new Hours(100l);
}
public static Minutes getMinutesInstance() {
return new Minutes(100l);
}
public static Seconds getSecondsInstance() {
return new Seconds(100l);
}
public static Milliseconds getMillisecondsInstance() {
return new Milliseconds(100l);
}
public static ATime getATimeInstance() {
return new Milliseconds(100l);
}
// Voltage
public static AVoltage getAVoltageInstance() {
return new Volts(100l);
}
public static MilliVolts getMilliVoltsInstance() {
return new MilliVolts(100l);
}
/**
* Frequency
*/
public static AFrequency getAFrequencyInstance() throws OutOfRange {
return new Hz(100l);
}
public static KHz getKHzInstance() throws OutOfRange {
return new KHz(100l);
}
/**
* Flow
*/
/*
* TODO mass flow datatypes public static AMassFlow getAMassFlowInstance()
* throws OutOfRange { return new (100l); }
*/
public static AVolumeFlow getAVolumeFlowInstance() throws OutOfRange {
return new CFPH(100l);
}
public static CFPH getCFPHInstance() throws OutOfRange {
return new CFPH(100l);
}
public static CFPM getCFPMInstance() throws OutOfRange {
return new CFPM(100l);
}
public static CFPS getCFPSInstance() throws OutOfRange {
return new CFPS(100l);
}
public static LPH getLPHInstance() throws OutOfRange {
return new LPH(100l);
}
public static LPM getLPMInstance() throws OutOfRange {
return new LPM(100l);
}
public static LPS getLPSInstance() throws OutOfRange {
return new LPS(100l);
}
public static MCPH getMCPHInstance() throws OutOfRange {
return new MCPH(100l);
}
public static MCPM getMCPMInstance() throws OutOfRange {
return new MCPM(100l);
}
public static MCPS getMCPSInstance() throws OutOfRange {
return new MCPS(100l);
}
/**
* Pressure
*/
public static APressure getAPressureInstance() throws OutOfRange {
return new Bar(100l);
}
public static Bar getBarInstance() throws OutOfRange {
return new Bar(100l);
}
public static KPa getKPaInstance() throws OutOfRange {
return new KPa(100l);
}
public static MBars getMBarsInstance() throws OutOfRange {
return new MBars(100l);
}
public static MMHg getMMHgInstance() throws OutOfRange {
return new MMHg(100l);
}
public static Pascals getPascalsInstance() throws OutOfRange {
return new Pascals(100l);
}
public static PSI getPSIInstance() throws OutOfRange {
return new PSI(100l);
}
/**
* Speed
*/
public static ASpeed getASpeedInstance() throws OutOfRange {
return new Knots(100l);
}
public static Knots getKnotsInstance() throws OutOfRange {
return new Knots(100l);
}
public static KPH getKPHInstance() throws OutOfRange {
return new KPH(100l);
}
public static MPH getMPHInstance() throws OutOfRange {
return new MPH(100l);
}
public static MPS getMPSInstance() throws OutOfRange {
return new MPS(100l);
}
/**
* Temperature
*/
public static ATemperature getATemperatureInstance() throws OutOfRange {
return new DegreesCelcius(100l);
}
public static Fahrenheit getFahrenheitInstance() throws OutOfRange {
return new Fahrenheit(100l);
}
public static Kelvin getKelvinInstance() throws OutOfRange {
return new Kelvin(100l);
}
/**
* Volume
*/
public static AVolume getAVolumeInstance() throws OutOfRange {
return new Litres(100l);
}
public static USGallonsLiquid getUSGallonsLiquidInstance()
throws OutOfRange {
return new USGallonsLiquid(100l);
}
public static USGallonsDry getUSGallonsDryInstance() throws OutOfRange {
return new USGallonsDry(100l);
}
public static ImpGallons getImpGallonsInstance() throws OutOfRange {
return new ImpGallons(100l);
}
public static Litres getLitresInstance() throws OutOfRange {
return new Litres(100l);
}
/**
* Native DataTypes
*/
public static MBoolean getMBooleanInstance() throws OutOfRange {
return new MBoolean(true);
}
public static MDate getMDateInstance() throws OutOfRange {
return new MDate(100l);
}
public static MString getMStringInstance() throws OutOfRange {
return new MString("");
}
/**
* Composite DataTypes
*/
public static APosition getAPositionInstance() throws OutOfRange {
return new Latitude(getDegreesDecimalInstance());
}
public static Attitude getAttitudeInstance() throws OutOfRange {
return new Attitude(getPitchInstance(), getRollInstance(),
getYawInstance());
}
public static Coordinate getCoordinateInstance() throws OutOfRange {
return new Coordinate(getLatitudeInstance(), getLongitudeInstance());
}
public static Latitude getLatitudeInstance() throws OutOfRange {
return new Latitude(getDegreesDecimalInstance());
}
public static Longitude getLongitudeInstance() throws OutOfRange {
return new Longitude(getDegreesDecimalInstance());
}
public static Pitch getPitchInstance() throws OutOfRange {
return new Pitch(getDegreesDecimalInstance());
}
public static Roll getRollInstance() throws OutOfRange {
return new Roll(getDegreesDecimalInstance());
}
public static Yaw getYawInstance() throws OutOfRange {
return new Yaw(getDegreesDecimalInstance());
}
/**
* DataTypes
*/
public static DegreesDecimal getDegreesDecimalInstance() throws OutOfRange {
return new DegreesDecimal(0);
}
public static MDecimal getMDecimalInstance() throws OutOfRange {
return new MDecimal(0);
}
public static UnsignedDecimal getUnsignedDecimalInstance()
throws OutOfRange {
return new UnsignedDecimal(0);
}
/**
* Distance
*/
public static ADistance getADistanceInstance() throws OutOfRange {
return new NM(0);
}
public static Fathoms getFathomsInstance() throws OutOfRange {
return new Fathoms(0);
}
public static KM getKMInstance() throws OutOfRange {
return new KM(0);
}
public static Metres getMetresInstance() throws OutOfRange {
return new Metres(0);
}
public static Miles getMilesInstance() throws OutOfRange {
return new Miles(0);
}
public static NM getNmInstance() throws OutOfRange {
return new NM(0);
}
/**
* Electrical Charge
*/
public static ACharge getAChargeInstance() throws OutOfRange {
return new Ah(0);
}
public static Coulombs getCoulombsInstance() throws OutOfRange {
return new Coulombs(0);
}
public static mAh getmAhInstance() throws OutOfRange {
return new mAh(0);
}
/**
* Electrical Current
*/
public static ACurrent getACurrentInstance() throws OutOfRange {
return new Amps(0);
}
public static MilliAmps getMilliAmpsInstance() throws OutOfRange {
return new MilliAmps(0);
}
/**
* Impedance
*/
public static AImpedance getAImpedanceInstance() throws OutOfRange {
return new Ohms(0);
}
public static KOhms getKOhmsInstance() throws OutOfRange {
return new KOhms(0);
}
public static MOhms getMOhmsInstance() throws OutOfRange {
return new MOhms(0);
}
/**
* Power
*/
public static APower getAPowerInstance() throws OutOfRange {
return new KWatts(0);
}
public static MWatts getMWattsInstance() throws OutOfRange {
return new MWatts(0);
}
/**
* Integer DataTypes
*/
public static DegreesInteger getDegreesIntegerInstance() throws OutOfRange {
return new DegreesInteger(0);
}
public static MInteger getMIntegerInstance() throws OutOfRange {
return new MInteger(0);
}
public static PercentageInteger getPercentageIntegerInstance()
throws OutOfRange {
return new PercentageInteger(0);
}
/**
* Electrical Energy DataTypes
*/
public static AEnergy getAEnergyInstance() throws OutOfRange {
return new Joules(0);
}
public static KJoules getKJoulesInstance() throws OutOfRange {
return new KJoules(0);
}
public static MJoules getMJoulesInstance() throws OutOfRange {
return new MJoules(0);
}
}
|
|
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.test.perf;
import java.io.File;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import junit.framework.Test;
import junit.framework.TestSuite;
import junit.textui.TestRunner;
import org.hibernate.SessionFactory;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.classic.Session;
import org.hibernate.testing.junit.UnitTestCase;
/**
* Test of configuration, specifically "cacheable files".
*
* @author Max Andersen
* @author Steve Ebersole
*/
public class ConfigurationPerformanceTest extends UnitTestCase {
private final String workPackageName = "org.hibernate.test.cfg.work";
private File compilationBaseDir;
private File workPackageDir;
protected void setUp() throws Exception {
compilationBaseDir = getTestComplileDirectory();
workPackageDir = new File( compilationBaseDir, workPackageName.replace( '.', '/' ) );
if ( workPackageDir.exists() ) {
//noinspection ResultOfMethodCallIgnored
workPackageDir.delete();
}
boolean created = workPackageDir.mkdirs();
if ( !created ) {
System.err.println( "Unable to create workPackageDir during setup" );
}
}
protected void tearDown() throws Exception {
super.tearDown();
}
public ConfigurationPerformanceTest(String string) {
super( string );
}
public static Test suite() {
return new TestSuite( ConfigurationPerformanceTest.class );
}
public static void main(String[] args) throws Exception {
TestRunner.run( suite() );
}
public void testSessionFactoryCreationTime() throws Throwable {
generateTestFiles();
if ( !workPackageDir.exists() ) {
System.err.println( workPackageDir.getAbsoluteFile() + " not found" );
return;
}
long start = System.currentTimeMillis();
Configuration configuration = buildConfigurationFromCacheableFiles(
workPackageDir,
workPackageDir.list(
new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.endsWith( ".hbm.xml" );
}
}
)
);
SessionFactory factory = configuration.buildSessionFactory();
long initial = System.currentTimeMillis() - start;
factory.close();
start = System.currentTimeMillis();
configuration = buildConfigurationFromCacheableFiles(
workPackageDir,
workPackageDir.list(
new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.endsWith( ".hbm.xml" );
}
}
)
);
factory = configuration.buildSessionFactory();
long subsequent = System.currentTimeMillis() - start;
// Let's make sure the mappings were read in correctly (in termas of they are operational).
Session session = factory.openSession();
session.beginTransaction();
session.createQuery( "from Test1" ).list();
session.getTransaction().commit();
session.close();
factory.close();
System.err.println( "Initial SessionFactory load time : " + initial );
System.err.println( "Subsequent SessionFactory load time : " + subsequent );
}
private Configuration buildConfigurationFromCacheableFiles(File mappingFileBase, String[] files) {
long start = System.currentTimeMillis();
Configuration cfg = new Configuration();
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
System.err.println(
"Created configuration: " + ( System.currentTimeMillis() - start ) / 1000.0 + " sec."
);
start = System.currentTimeMillis();
//noinspection ForLoopReplaceableByForEach
for ( int i = 0; i < files.length; i++ ) {
cfg.addCacheableFile( new File( mappingFileBase, files[i] ) );
}
System.err.println(
"Added " + ( files.length ) + " resources: " +
( System.currentTimeMillis() - start ) / 1000.0 + " sec."
);
return cfg;
}
public void generateTestFiles() throws Throwable {
String filesToCompile = "";
for ( int count = 0; count < 100; count++ ) {
String name = "Test" + count;
File javaFile = new File( workPackageDir, name + ".java" );
File hbmFile = new File( workPackageDir, name + ".hbm.xml" );
filesToCompile += ( javaFile.getAbsolutePath() + " " );
System.out.println( "Generating " + javaFile.getAbsolutePath() );
PrintWriter javaWriter = null;
PrintWriter hbmWriter = null;
try {
javaWriter = new PrintWriter( new FileWriter( javaFile ) );
hbmWriter = new PrintWriter( new FileWriter( hbmFile ) );
javaWriter.println( "package " + workPackageName + ";" );
hbmWriter.println(
"<?xml version=\"1.0\"?>\r\n" +
"<!DOCTYPE hibernate-mapping PUBLIC \r\n" +
" \"-//Hibernate/Hibernate Mapping DTD 3.0//EN\"\r\n" +
" \"http://www.hibernate.org/dtd/hibernate-mapping-3.0.dtd\">\r\n"
);
hbmWriter.println( "<hibernate-mapping package=\"" + workPackageName + "\">" );
javaWriter.println( "public class " + name + " {" );
javaWriter.println( " static { System.out.println(\"" + name + " initialized!\"); }" );
hbmWriter.println( "<class name=\"" + name + "\">" );
hbmWriter.println( "<id type=\"long\"><generator class=\"assigned\"/></id>" );
for ( int propCount = 0; propCount < 100; propCount++ ) {
String propName = "Prop" + propCount;
writeJavaProperty( javaWriter, propName );
hbmWriter.println( "<property name=\"" + propName + "\" type=\"string\"/>" );
}
hbmWriter.println( "</class>" );
javaWriter.println( "}" );
hbmWriter.println( "</hibernate-mapping>" );
}
finally {
if ( javaWriter != null ) {
javaWriter.flush();
javaWriter.close();
}
if ( hbmWriter != null ) {
hbmWriter.flush();
hbmWriter.close();
}
}
}
String javac = "javac -version -d " + compilationBaseDir + " " + filesToCompile;
System.err.println( "JAVAC : " + javac );
Process process = Runtime.getRuntime().exec( javac );
process.waitFor();
System.err.println( "********************* JAVAC OUTPUT **********************" );
pullStream( process.getInputStream() );
System.err.println( "---------------------------------------------------------" );
pullStream( process.getErrorStream() );
System.err.println( "*********************************************************" );
}
private void pullStream(InputStream stream) throws IOException {
if ( stream == null || stream.available() <= 0 ) {
return;
}
byte[] buffer = new byte[256];
while ( true ) {
int read = stream.read( buffer );
if ( read == -1 ) {
break;
}
System.err.write( buffer, 0, read );
}
// System.err.println( "" );
}
private void writeJavaProperty(PrintWriter javaWriter, String propName) {
javaWriter.println( " String " + propName + ";" );
javaWriter.println( " String get" + propName + "() { return " + propName + "; }" );
javaWriter.println( " void set" + propName + "(String newVal) { " + propName + "=newVal; }" );
}
private File getTestComplileDirectory() {
String resourceName = "org/hibernate/test/legacy/ABC.hbm.xml";
String prefix = getClass().getClassLoader().getResource( resourceName ).getFile();
prefix = prefix.substring( 0, prefix.lastIndexOf( '/' ) ); // ABC.hbm.xml
prefix = prefix.substring( 0, prefix.lastIndexOf( '/' ) ); // legacy/
prefix = prefix.substring( 0, prefix.lastIndexOf( '/' ) ); // test/
prefix = prefix.substring( 0, prefix.lastIndexOf( '/' ) ); // hibernate/
prefix = prefix.substring( 0, prefix.lastIndexOf( '/' ) ); // org/
return new File( prefix + '/' );
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.facet.impl;
import com.intellij.facet.*;
import com.intellij.facet.impl.ui.FacetEditorImpl;
import com.intellij.facet.impl.ui.FacetTreeModel;
import com.intellij.facet.impl.ui.ProjectConfigurableContext;
import com.intellij.facet.ui.FacetEditorContext;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.roots.ui.configuration.FacetsProvider;
import com.intellij.openapi.roots.ui.configuration.ModuleConfigurationState;
import com.intellij.openapi.roots.ui.configuration.ModuleEditor;
import com.intellij.openapi.roots.ui.configuration.ModulesConfigurator;
import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesContainer;
import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesContainerFactory;
import com.intellij.openapi.roots.ui.configuration.projectRoot.StructureConfigurableContext;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.UserDataHolder;
import com.intellij.openapi.util.UserDataHolderBase;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class ProjectFacetsConfigurator implements FacetsProvider {
private static final Logger LOG = Logger.getInstance(ProjectFacetsConfigurator.class);
private final Map<Module, ModifiableFacetModel> myModifiableModels = new HashMap<>();
private final Map<Facet, FacetEditorImpl> myEditors = new LinkedHashMap<>();
private final Map<Module, FacetTreeModel> myTreeModels = new HashMap<>();
private final Map<FacetInfo, Facet> myInfo2Facet = new HashMap<>();
private final Map<Facet, FacetInfo> myFacet2Info = new HashMap<>();
private final Map<Module, UserDataHolder> mySharedModuleData = new HashMap<>();
private final Set<Facet> myFacetsToDispose = new HashSet<>();
private final Set<Facet> myChangedFacets = new HashSet<>();
private final Set<Facet> myCreatedFacets = new HashSet<>();
private final StructureConfigurableContext myContext;
private UserDataHolderBase myProjectData = new UserDataHolderBase();
public ProjectFacetsConfigurator(final StructureConfigurableContext context, ProjectFacetsConfigurator facetsConfigurator) {
myContext = context;
if (facetsConfigurator != null) {
initFrom(facetsConfigurator);
}
}
private void initFrom(ProjectFacetsConfigurator facetsConfigurator) {
myFacet2Info.putAll(facetsConfigurator.myFacet2Info);
myInfo2Facet.putAll(facetsConfigurator.myInfo2Facet);
myTreeModels.putAll(facetsConfigurator.myTreeModels);
myEditors.putAll(facetsConfigurator.myEditors);
}
public List<Facet> removeFacet(Facet facet) {
FacetTreeModel treeModel = getTreeModel(facet.getModule());
FacetInfo facetInfo = myFacet2Info.get(facet);
if (facetInfo == null) return Collections.emptyList();
final List<Facet> removed = new ArrayList<>();
List<FacetInfo> childrenList = treeModel.getChildren(facetInfo);
FacetInfo[] children = childrenList.toArray(FacetInfo.EMPTY_ARRAY);
for (FacetInfo child : children) {
Facet childInfo = myInfo2Facet.get(child);
if (childInfo != null) {
removed.addAll(removeFacet(childInfo));
}
}
treeModel.removeFacetInfo(facetInfo);
getOrCreateModifiableModel(facet.getModule()).removeFacet(facet);
myChangedFacets.remove(facet);
if (myCreatedFacets.contains(facet)) {
Disposer.dispose(facet);
}
final FacetEditorImpl facetEditor = myEditors.remove(facet);
if (facetEditor != null) {
facetEditor.disposeUIResources();
}
myFacet2Info.remove(facet);
myInfo2Facet.remove(facetInfo);
removed.add(facet);
return removed;
}
public Facet createAndAddFacet(Module module, FacetType<?, ?> type, final @Nullable Facet underlying) {
final Collection<? extends Facet> facets = getFacetsByType(module, type.getId());
String facetName = type.getDefaultFacetName();
int i = 2;
while (facetExists(facetName, facets)) {
facetName = type.getDefaultFacetName() + i;
i++;
}
final Facet facet = FacetManager.getInstance(module).createFacet(type, facetName, underlying);
myCreatedFacets.add(facet);
addFacetInfo(facet);
getOrCreateModifiableModel(module).addFacet(facet);
return facet;
}
private boolean facetExists(final String facetName, final Collection<? extends Facet> facets) {
for (Facet facet : facets) {
if (getFacetName(facet).equals(facetName)) {
return true;
}
}
return false;
}
public void addFacetInfo(final Facet facet) {
final FacetInfo exiting = myFacet2Info.get(facet);
if (exiting != null) {
LOG.assertTrue(exiting.getName().equals(facet.getName()));
LOG.assertTrue(exiting.getFacetType().equals(facet.getType()));
LOG.assertTrue(exiting.getConfiguration().equals(facet.getConfiguration()));
return;
}
FacetInfo info = new FacetInfo(facet.getType(), facet.getName(), facet.getConfiguration(), myFacet2Info.get(facet.getUnderlyingFacet()));
myFacet2Info.put(facet, info);
myInfo2Facet.put(info, facet);
getTreeModel(facet.getModule()).addFacetInfo(info);
}
public void addFacetInfos(final Module module) {
final Facet[] facets = getFacetModel(module).getSortedFacets();
for (Facet facet : facets) {
addFacetInfo(facet);
}
}
public void clearMaps() {
myModifiableModels.clear();
myEditors.clear();
myTreeModels.clear();
myInfo2Facet.clear();
myFacet2Info.clear();
myChangedFacets.clear();
mySharedModuleData.clear();
}
private boolean isNewFacet(Facet facet) {
final ModifiableFacetModel model = myModifiableModels.get(facet.getModule());
return model != null && model.isNewFacet(facet);
}
@NotNull
public ModifiableFacetModel getOrCreateModifiableModel(final Module module) {
ModifiableFacetModel model = myModifiableModels.get(module);
if (model == null) {
model = FacetManager.getInstance(module).createModifiableModel();
myModifiableModels.put(module, model);
}
return model;
}
@Nullable
public FacetEditorImpl getEditor(Facet facet) {
return myEditors.get(facet);
}
@NotNull
public FacetEditorImpl getOrCreateEditor(Facet facet) {
FacetEditorImpl editor = myEditors.get(facet);
if (editor == null) {
final Facet underlyingFacet = facet.getUnderlyingFacet();
final FacetEditorContext parentContext = underlyingFacet != null ? getOrCreateEditor(underlyingFacet).getContext() : null;
final FacetEditorContext context = createContext(facet, parentContext);
editor = new FacetEditorImpl(context, facet.getConfiguration());
editor.getComponent();
editor.reset();
myEditors.put(facet, editor);
}
return editor;
}
protected FacetEditorContext createContext(final @NotNull Facet facet, final @Nullable FacetEditorContext parentContext) {
Module module = facet.getModule();
ModulesConfigurator modulesConfigurator = myContext.getModulesConfigurator();
ModuleEditor moduleEditor = modulesConfigurator.getModuleEditor(module);
if (moduleEditor == null) {
LOG.error("ModuleEditor[" + module.getName() + "]==null: disposed = " + module.isDisposed() + ", is in model = "
+ Arrays.asList(modulesConfigurator.getModules()).contains(module));
}
final ModuleConfigurationState state = moduleEditor.createModuleConfigurationState();
return new MyProjectConfigurableContext(facet, parentContext, state);
}
private UserDataHolder getSharedModuleData(final Module module) {
UserDataHolder dataHolder = mySharedModuleData.get(module);
if (dataHolder == null) {
dataHolder = new UserDataHolderBase();
mySharedModuleData.put(module, dataHolder);
}
return dataHolder;
}
@NotNull
public FacetModel getFacetModel(Module module) {
final ModifiableFacetModel model = myModifiableModels.get(module);
if (model != null) {
return model;
}
return FacetManager.getInstance(module);
}
public void commitFacets() {
for (ModifiableFacetModel model : myModifiableModels.values()) {
model.commit();
}
for (Map.Entry<Facet, FacetEditorImpl> entry : myEditors.entrySet()) {
entry.getValue().onFacetAdded(entry.getKey());
}
myModifiableModels.clear();
for (Facet facet : myChangedFacets) {
Module module = facet.getModule();
if (!module.isDisposed()) {
FacetManager.getInstance(module).facetConfigurationChanged(facet);
}
}
myChangedFacets.clear();
}
public void resetEditors() {
for (FacetEditorImpl editor : myEditors.values()) {
editor.reset();
}
}
public void applyEditors() throws ConfigurationException {
for (Map.Entry<Facet, FacetEditorImpl> entry : myEditors.entrySet()) {
final FacetEditorImpl editor = entry.getValue();
if (editor.isModified()) {
myChangedFacets.add(entry.getKey());
}
editor.apply();
}
}
public boolean isModified() {
for (ModifiableFacetModel model : myModifiableModels.values()) {
if (model.isModified()) {
return true;
}
}
for (FacetEditorImpl editor : myEditors.values()) {
if (editor.isModified()) {
return true;
}
}
return false;
}
public FacetTreeModel getTreeModel(Module module) {
FacetTreeModel treeModel = myTreeModels.get(module);
if (treeModel == null) {
treeModel = new FacetTreeModel();
myTreeModels.put(module, treeModel);
}
return treeModel;
}
public FacetInfo getFacetInfo(final Facet facet) {
return myFacet2Info.get(facet);
}
public Facet getFacet(final FacetInfo facetInfo) {
return myInfo2Facet.get(facetInfo);
}
public void disposeEditors() {
for (Facet facet : myFacetsToDispose) {
Disposer.dispose(facet);
}
myFacetsToDispose.clear();
myCreatedFacets.clear();
for (FacetEditorImpl editor : myEditors.values()) {
editor.disposeUIResources();
}
myProjectData = null;
}
@Override
public Facet @NotNull [] getAllFacets(final Module module) {
return getFacetModel(module).getAllFacets();
}
@Override
@NotNull
public <F extends Facet> Collection<F> getFacetsByType(final Module module, final FacetTypeId<F> type) {
return getFacetModel(module).getFacetsByType(type);
}
@Override
@Nullable
public <F extends Facet> F findFacet(final Module module, final FacetTypeId<F> type, final String name) {
return getFacetModel(module).findFacet(type, name);
}
private UserDataHolder getProjectData() {
if (myProjectData == null) {
myProjectData = new UserDataHolderBase();
}
return myProjectData;
}
public String getFacetName(Facet facet) {
final ModifiableFacetModel model = myModifiableModels.get(facet.getModule());
if (model != null) {
final String newName = model.getNewName(facet);
if (newName != null) {
return newName;
}
}
return facet.getName();
}
public List<Facet> removeAllFacets(final Module module) {
List<Facet> facets = new ArrayList<>();
FacetModel facetModel = getOrCreateModifiableModel(module);
for (Facet facet : facetModel.getAllFacets()) {
if (!myCreatedFacets.contains(facet)) {
myFacetsToDispose.add(facet);
}
LOG.assertTrue(facet.getModule().equals(module), module + " expected but " + facet.getModule() + " found");
facets.addAll(removeFacet(facet));
}
mySharedModuleData.remove(module);
myModifiableModels.remove(module);
return facets;
}
public boolean hasFacetOfType(Module module, @Nullable Facet parent, FacetTypeId<?> typeId) {
final FacetTreeModel treeModel = getTreeModel(module);
final FacetInfo parentInfo = getFacetInfo(parent);
return treeModel.hasFacetOfType(parentInfo, typeId);
}
private class MyProjectConfigurableContext extends ProjectConfigurableContext {
private final LibrariesContainer myContainer;
MyProjectConfigurableContext(final Facet facet, final FacetEditorContext parentContext, final ModuleConfigurationState state) {
super(facet, ProjectFacetsConfigurator.this.isNewFacet(facet), parentContext, state,
ProjectFacetsConfigurator.this.getSharedModuleData(facet.getModule()), getProjectData());
myContainer = LibrariesContainerFactory.createContainer(myContext);
}
@Override
public LibrariesContainer getContainer() {
return myContainer;
}
}
}
|
|
/*
* Copyright 2014 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.mist.plot;
import android.content.Context;
import android.graphics.*;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import java.util.Date;
import java.util.ArrayList;
/**
* Plots a graph of data in split screen view.
* Created by smus on 4/30/14.
*/
public class VRPlotView extends View {
String TAG = "VRPlotView";
ArrayList<float[]> mAxisData;
Paint COLORS[] = {new Paint(), new Paint(), new Paint(), new Paint()};
Paint mDividerPaint = new Paint();
Paint mLabelPaint = new Paint();
int PADDING_X = 100;
int PADDING_Y = 200;
// Number of samples (depending on the size of the canvas).
int mMaxSamples = 0;
// Temporary bitmap and canvas.
Bitmap mBitmap;
Canvas mCanvas;
// Min and max values of the plot.
float mMin = 0;
float mMax = 0;
// Magneto.
private Bitmap mMagnetoIcon;
// An optional label to persistently show front-and-center.
private String mLabel;
// Whether or not to flash the screen.
private Date mFlashStart = null;
int mIteration = 0;
public VRPlotView(Context context, AttributeSet attrs) {
super(context, attrs);
mAxisData = new ArrayList<float[]>();
COLORS[0].setColor(Color.RED);
COLORS[1].setColor(Color.GREEN);
COLORS[2].setColor(Color.BLUE);
COLORS[3].setColor(Color.BLACK);
mDividerPaint.setColor(Color.DKGRAY);
mDividerPaint.setStrokeWidth(2);
mLabelPaint.setColor(Color.LTGRAY);
mLabelPaint.setStrokeWidth(2);
mLabelPaint.setTextSize(34);
mLabelPaint.setTextAlign(Paint.Align.CENTER);
for (Paint p : COLORS) {
p.setStrokeWidth(3);
}
mMagnetoIcon = BitmapFactory.decodeResource(getResources(), R.drawable.magneto);
}
public void flashScreen() {
mFlashStart = new Date();
}
public void showLabel(String label) {
mLabel = label;
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh){
super.onSizeChanged(w, h, oldw, oldh);
Log.i(TAG, String.format("Size changed to %d x %d px.", w, h));
mBitmap = Bitmap.createBitmap(w/2, h, Bitmap.Config.ARGB_8888);
mCanvas = new Canvas(mBitmap);
mMaxSamples = mBitmap.getWidth() - PADDING_X *2;
}
@Override
protected void onDraw(Canvas canvas) {
// Clear the temporary canvas.
mCanvas.drawColor(Color.WHITE);
if (mIteration % 10 == 0) {
reCalculateBounds();
}
// Draw each line
for (int i = 0; i < mAxisData.size() - 1; i++) {
float[] point = mAxisData.get(i);
float[] nextPoint = mAxisData.get(i+1);
for (int j = 0; j < point.length; j++) {
drawLine(point, nextPoint, j, i);
}
}
// Draw magneto in the lower middle part.
int magnetoX = mCanvas.getWidth()/2 - mMagnetoIcon.getWidth()/2;
mCanvas.drawBitmap(mMagnetoIcon, magnetoX, mCanvas.getHeight() - PADDING_X, null);
// Draw the left and right canvases (they are identical).
canvas.drawBitmap(mBitmap, 0, 0, null);
canvas.drawBitmap(mBitmap, mCanvas.getWidth(), 0, null);
// Draw a line for the middle divider.
canvas.drawLine(mCanvas.getWidth()-1, 0, mCanvas.getWidth()-1, canvas.getHeight(), mDividerPaint);
// Draw the label if there is one.
if (mLabel != null) {
canvas.drawText(mLabel, mCanvas.getWidth(), 60, mLabelPaint);
}
setBackgroundColor(Color.BLACK);
setAlpha(0.5f);
// Draw an overlay.
mCanvas.drawPaint(getMagnetOverlay());
mIteration++;
}
private Paint getMagnetOverlay() {
Paint out = new Paint();
out.setAlpha(0);
if (mFlashStart == null) {
return out;
}
int maxAlpha = 200;
int fadeInTime = 100;
int fadeOutTime = 100;
// How far we are into the animation
long duration = new Date().getTime() - mFlashStart.getTime();
int color = Color.BLACK;
int alpha = 0;
if (duration < fadeInTime) {
// Fading in. Calculate the alpha.
float percent = (float)duration / (float)fadeInTime;
alpha = (int) (maxAlpha * percent);
} else if (duration < fadeOutTime) {
// Fading out. Calculate the alpha.
float percent = (float)(duration - fadeInTime) / (float)fadeOutTime;
alpha = (int) (maxAlpha * (1 - percent));
}
Log.d(TAG, String.format("Alpha: %d", alpha));
out.setColor(color);
out.setAlpha(alpha);
return out;
}
private void reCalculateBounds() {
mMin = Float.POSITIVE_INFINITY;
mMax = Float.NEGATIVE_INFINITY;
// Go through all data points computing min and max.
for (float[] point : mAxisData) {
for (float datum : point) {
if (datum < mMin) {
mMin = datum;
}
if (datum > mMax) {
mMax = datum;
}
}
}
}
private void drawLine(float[] point, float[] nextPoint, int axis, int time) {
float range = mMax - mMin;
// Calculate the percentages of the available space to render.
float p1 = ((point[axis] - mMin) / range);
float p2 = ((nextPoint[axis] - mMin) / range);
// Convert percent into coordinates.
float y1 = PADDING_Y + p1 * (mCanvas.getHeight() - PADDING_Y *2);
float y2 = PADDING_Y + p2 * (mCanvas.getHeight() - PADDING_Y *2);
float x = time + PADDING_X;
// Draw the line.
mCanvas.drawLine(x, y1, x + 1, y2, getPaint(axis));
}
public void addData(float[] data) {
mAxisData.add(data);
if (mAxisData.size() >= mMaxSamples) {
mAxisData.remove(0);
}
postInvalidate();
}
private Paint getPaint(int index) {
if (index < COLORS.length) {
return COLORS[index];
}
return null;
}
}
|
|
package org.limeprotocol.network;
import org.junit.Test;
import org.limeprotocol.*;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.limeprotocol.testHelpers.Dummy.*;
import static org.mockito.Mockito.*;
public class ChannelBaseTest {
private TestTransport transport;
private SessionChannel.SessionChannelListener sessionChannelListener;
private ChannelBase getTarget(Session.SessionState state) {
return getTarget(state, false);
}
private ChannelBase getTarget(Session.SessionState state, boolean fillEnvelopeRecipients) {
return getTarget(state, fillEnvelopeRecipients, null, null);
}
private ChannelBase getTarget(Session.SessionState state, boolean fillEnvelopeRecipients, Node remoteNode, Node localNode) {
return getTarget(state, fillEnvelopeRecipients, remoteNode, localNode, null);
}
private ChannelBase getTarget(Session.SessionState state, boolean fillEnvelopeRecipients, Node remoteNode, Node localNode, String sessionId) {
return getTarget(state, fillEnvelopeRecipients, false, remoteNode, localNode, sessionId);
}
private ChannelBase getTarget(Session.SessionState state, boolean fillEnvelopeRecipients, boolean autoReplyPings, Node remoteNode, Node localNode, String sessionId) {
return getTarget(state, fillEnvelopeRecipients, autoReplyPings, 0, 0, remoteNode, localNode, sessionId);
}
private ChannelBase getTarget(Session.SessionState state, boolean fillEnvelopeRecipients, boolean autoReplyPings, long pingInterval, long pingDisconnectionInterval, Node remoteNode, Node localNode, String sessionId) {
transport = new TestTransport();
sessionChannelListener = mock(SessionChannel.SessionChannelListener.class);
ChannelBase channelBase = new TestChannel(transport, state, fillEnvelopeRecipients, autoReplyPings, pingInterval, pingDisconnectionInterval, remoteNode, localNode, sessionId);
channelBase.enqueueSessionListener(sessionChannelListener);
return channelBase;
}
@Test
public void sendCommand_establishedState_callsTransport() throws IOException {
// Arrange
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
target.sendCommand(command);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(command, transport.sentEnvelopes.remove());
}
@Test(expected = IllegalArgumentException.class)
public void sendCommand_nullCommand_throwsIllegalArgumentException() throws IOException {
// Arrange
Command command = null;
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
target.sendCommand(command);
}
@Test(expected = IllegalStateException.class)
public void sendCommand_newCommand_throwsIllegalStateException() throws IOException {
// Arrange
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.NEW);
// Act
target.sendCommand(command);
}
@Test
public void sendCommand_moduleReturnsCommand_sendsModuleCommand() throws IOException {
// Arrange
Command command = createCommand(createPlainDocument());
Command moduleCommand = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
ChannelModule<Command> module = mock(ChannelModule.class);
when(module.onSending(command)).thenReturn(moduleCommand);
target.getCommandModules().add(module);
// Act
target.sendCommand(command);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(moduleCommand, transport.sentEnvelopes.remove());
verify(module, times(1)).onSending(command);
}
@Test
public void sendCommand_moduleReturnsNull_doNotCallTransport() throws IOException {
// Arrange
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
ChannelModule<Command> module = mock(ChannelModule.class);
when(module.onSending(command)).thenReturn(null);
target.getCommandModules().add(module);
// Act
target.sendCommand(command);
// Assert
assertEquals(0, transport.sentEnvelopes.size());
}
@Test
public void sendCommand_multipleRegisteredModules_callsEachModuleOnce() throws IOException {
// Arrange
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
int modulesCount = createRandomInt(10) + 1;
for (int i = 0; i < modulesCount; i++) {
ChannelModule<Command> module = mock(ChannelModule.class);
when(module.onSending(command)).thenReturn(command);
target.getCommandModules().add(module);
}
// Act
target.sendCommand(command);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(command, transport.sentEnvelopes.remove());
for (ChannelModule<Command> module : target.getCommandModules()) {
verify(module, times(1)).onSending(command);
}
}
@Test
public void addCommandListener_callsTwiceForSameInstance_registerOnce() {
// Arrange
CommandChannel.CommandChannelListener listener = mock(CommandChannel.CommandChannelListener.class);
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
target.addCommandListener(listener, true);
target.addCommandListener(listener, true);
transport.raiseOnReceive(command);
// Assert
verify(listener, times(1)).onReceiveCommand(command);
}
@Test
public void onReceiveCommand_registeredListenerTwoReceives_callsListenerAndUnregister() throws InterruptedException {
// Arrange
CommandChannel.CommandChannelListener listener = mock(CommandChannel.CommandChannelListener.class);
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addCommandListener(listener, true);
// Act
transport.raiseOnReceive(command);
transport.raiseOnReceive(command);
// Assert
verify(listener, times(1)).onReceiveCommand(command);
}
@Test
public void onReceiveCommand_registeredListenersMultipleReceives_callsListenersMultipleTimes() throws InterruptedException {
// Arrange
int commandCount = createRandomInt(100) + 1;
int listenersCount = createRandomInt(10) + 1;
// Arrange
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
List<CommandChannel.CommandChannelListener> listeners = new ArrayList<>();
for (int i = 0; i < listenersCount; i++) {
CommandChannel.CommandChannelListener listener = mock(CommandChannel.CommandChannelListener.class);
target.addCommandListener(listener, false);
listeners.add(listener);
}
// Act
for (int i = 0; i < commandCount; i++) {
transport.raiseOnReceive(command);
}
// Assert
for (int i = 0; i < listenersCount; i++) {
verify(listeners.get(i), times(commandCount)).onReceiveCommand(command);
}
}
@Test
public void onReceiveCommand_autoReplyPings_callsSendCommandWithPingResponse() throws InterruptedException {
// Arrange
Command command = new Command(EnvelopeId.newId());
command.setFrom(createNode());
command.setMethod(Command.CommandMethod.GET);
command.setUri(new LimeUri("/ping"));
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED, false, true, null, null, null);
// Act
transport.raiseOnReceive(command);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
Envelope sentEnvelope = transport.sentEnvelopes.remove();
assertTrue(sentEnvelope instanceof Command);
Command sentCommand = (Command)sentEnvelope;
assertEquals(command.getId(), sentCommand.getId());
assertEquals(command.getFrom(), sentCommand.getTo());
assertEquals(Command.CommandStatus.SUCCESS, sentCommand.getStatus());
assertNotNull(sentCommand.getType());
assertEquals("application/vnd.lime.ping+json", sentCommand.getType().toString());
}
@Test
public void onReceiveCommand_moduleReturnsCommand_receivesModuleCommand() {
// Arrange
CommandChannel.CommandChannelListener listener = mock(CommandChannel.CommandChannelListener.class);
Command command = createCommand(createPlainDocument());
Command moduleCommand = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addCommandListener(listener, true);
ChannelModule<Command> module = mock(ChannelModule.class);
when(module.onReceiving(command)).thenReturn(moduleCommand);
target.getCommandModules().add(module);
// Act
transport.raiseOnReceive(command);
// Assert
verify(module, times(1)).onReceiving(command);
verify(listener, times(1)).onReceiveCommand(moduleCommand);
}
@Test
public void onReceiveCommand_moduleReturnsNull_ignoresCommand() {
// Arrange
CommandChannel.CommandChannelListener listener = mock(CommandChannel.CommandChannelListener.class);
Command command1 = createCommand(createPlainDocument());
Command command2 = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addCommandListener(listener, true);
ChannelModule<Command> module = mock(ChannelModule.class);
when(module.onReceiving(command1)).thenReturn(null);
when(module.onReceiving(command2)).thenReturn(command2);
target.getCommandModules().add(module);
// Act
transport.raiseOnReceive(command1);
transport.raiseOnReceive(command2);
// Assert
verify(module, times(1)).onReceiving(command1);
verify(module, times(1)).onReceiving(command2);
verify(listener, never()).onReceiveCommand(command1);
verify(listener, times(1)).onReceiveCommand(command2);
}
@Test
public void onReceiveCommand_multipleRegisteredModules_callsEachModuleOnce() {
// Arrange
CommandChannel.CommandChannelListener listener = mock(CommandChannel.CommandChannelListener.class);
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addCommandListener(listener, true);
int modulesCount = createRandomInt(10) + 1;
for (int i = 0; i < modulesCount; i++) {
ChannelModule<Command> module = mock(ChannelModule.class);
when(module.onReceiving(command)).thenReturn(command);
target.getCommandModules().add(module);
}
// Act
transport.raiseOnReceive(command);
// Assert
verify(listener, times(1)).onReceiveCommand(command);
for (ChannelModule<Command> module : target.getCommandModules()) {
verify(module, times(1)).onReceiving(command);
}
}
@Test
public void sendMessage_establishedState_callsTransport() throws IOException {
// Arrange
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
target.sendMessage(message);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(message, transport.sentEnvelopes.remove());
}
@Test(expected = IllegalArgumentException.class)
public void sendMessage_nullMessage_throwsIllegalArgumentException() throws IOException {
// Arrange
Message message = null;
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
target.sendMessage(message);
}
@Test(expected = IllegalStateException.class)
public void sendMessage_newMessage_throwsIllegalStateException() throws IOException {
// Arrange
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.NEW);
// Act
target.sendMessage(message);
}
@Test
public void sendMessage_moduleReturnsMessage_sendsModuleMessage() throws IOException {
// Arrange
Message message = createMessage(createPlainDocument());
Message moduleMessage = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
ChannelModule<Message> module = mock(ChannelModule.class);
when(module.onSending(message)).thenReturn(moduleMessage);
target.getMessageModules().add(module);
// Act
target.sendMessage(message);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(moduleMessage, transport.sentEnvelopes.remove());
}
@Test
public void sendMessage_moduleReturnsNull_doNotCallTransport() throws IOException {
// Arrange
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
ChannelModule<Message> module = mock(ChannelModule.class);
when(module.onSending(message)).thenReturn(null);
target.getMessageModules().add(module);
// Act
target.sendMessage(message);
// Assert
assertEquals(0, transport.sentEnvelopes.size());
}
@Test
public void sendMessage_multipleRegisteredModules_callsEachModuleOnce() throws IOException {
// Arrange
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
int modulesCount = createRandomInt(10) + 1;
for (int i = 0; i < modulesCount; i++) {
ChannelModule<Message> module = mock(ChannelModule.class);
when(module.onSending(message)).thenReturn(message);
target.getMessageModules().add(module);
}
// Act
target.sendMessage(message);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(message, transport.sentEnvelopes.remove());
for (ChannelModule<Message> module : target.getMessageModules()) {
verify(module, times(1)).onSending(message);
}
}
@Test
public void onReceiveMessage_registeredListenerTwoReceives_callsListenerAndUnregister() throws InterruptedException {
// Arrange
MessageChannel.MessageChannelListener listener = mock(MessageChannel.MessageChannelListener.class);
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addMessageListener(listener, true);
// Act
transport.raiseOnReceive(message);
transport.raiseOnReceive(message);
// Assert
verify(listener, times(1)).onReceiveMessage(message);
}
@Test
public void onReceiveMessage_registeredListenersMultipleReceives_callsListenersMultipleTimes() throws InterruptedException {
// Arrange
int messageCount = createRandomInt(100) + 1;
int listenersCount = createRandomInt(10) + 1;
// Arrange
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
List<MessageChannel.MessageChannelListener> listeners = new ArrayList<>();
for (int i = 0; i < listenersCount; i++) {
MessageChannel.MessageChannelListener listener = mock(MessageChannel.MessageChannelListener.class);
target.addMessageListener(listener, false);
listeners.add(listener);
}
// Act
for (int i = 0; i < messageCount; i++) {
transport.raiseOnReceive(message);
}
// Assert
for (int i = 0; i < listenersCount; i++) {
verify(listeners.get(i), times(messageCount)).onReceiveMessage(message);
}
}
@Test
public void onReceiveMessage_noRecipients_fillsFromTheSession() throws InterruptedException {
// Arrange
Node remoteNode = createNode();
Node localNode = createNode();
Message message = createMessage(createPlainDocument());
message.setFrom(null);
message.setTo(null);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED, true, remoteNode, localNode);
final Semaphore semaphore = new Semaphore(1);
semaphore.acquire();
final List<Message> actual = new ArrayList<>();
target.addMessageListener(new MessageChannel.MessageChannelListener() {
@Override
public void onReceiveMessage(Message message) {
actual.add(message);
synchronized (semaphore) {
semaphore.release();
}
}
}, true);
// Act
transport.raiseOnReceive(message);
synchronized (semaphore) {
semaphore.tryAcquire(1, 1000, TimeUnit.MILLISECONDS);
}
// Assert
assertEquals(1, actual.size());
assertEquals(message, actual.get(0));
assertEquals(localNode, actual.get(0).getTo());
assertEquals(remoteNode, actual.get(0).getFrom());
assertNull(actual.get(0).getPp());
}
@Test
public void onReceiveMessage_incompleteRecipients_fillsFromTheSession() throws InterruptedException {
// Arrange
Node remoteNode = createNode();
Node localNode = createNode();
Message message = createMessage(createPlainDocument());
message.setFrom(remoteNode.copy());
message.setTo(localNode.copy());
message.getFrom().setDomain(null);
message.getTo().setDomain(null);
message.getFrom().setInstance(null);
message.getTo().setInstance(null);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED, true, remoteNode, localNode);
final Semaphore semaphore = new Semaphore(1);
semaphore.acquire();
final List<Message> actual = new ArrayList<>();
target.addMessageListener(new MessageChannel.MessageChannelListener() {
@Override
public void onReceiveMessage(Message message) {
actual.add(message);
synchronized (semaphore) {
semaphore.release();
}
}
}, true);
// Act
transport.raiseOnReceive(message);
synchronized (semaphore) {
semaphore.tryAcquire(1, 1000, TimeUnit.MILLISECONDS);
}
// Assert
assertEquals(1, actual.size());
assertEquals(message, actual.get(0));
assertEquals(localNode.toIdentity(), actual.get(0).getTo().toIdentity());
assertEquals(remoteNode.toIdentity(), actual.get(0).getFrom().toIdentity());
assertNull(actual.get(0).getPp());
}
@Test
public void onReceiveMessage_moduleReturnsMessage_receivesModuleMessage() {
// Arrange
MessageChannel.MessageChannelListener listener = mock(MessageChannel.MessageChannelListener.class);
Message message = createMessage(createPlainDocument());
Message moduleMessage = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addMessageListener(listener, true);
ChannelModule<Message> module = mock(ChannelModule.class);
when(module.onReceiving(message)).thenReturn(moduleMessage);
target.getMessageModules().add(module);
// Act
transport.raiseOnReceive(message);
// Assert
verify(module, times(1)).onReceiving(message);
verify(listener, times(1)).onReceiveMessage(moduleMessage);
}
@Test
public void onReceiveMessage_moduleReturnsNull_ignoresMessage() {
// Arrange
MessageChannel.MessageChannelListener listener = mock(MessageChannel.MessageChannelListener.class);
Message message1 = createMessage(createPlainDocument());
Message message2 = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addMessageListener(listener, true);
ChannelModule<Message> module = mock(ChannelModule.class);
when(module.onReceiving(message1)).thenReturn(null);
when(module.onReceiving(message2)).thenReturn(message2);
target.getMessageModules().add(module);
// Act
transport.raiseOnReceive(message1);
transport.raiseOnReceive(message2);
// Assert
verify(module, times(1)).onReceiving(message1);
verify(module, times(1)).onReceiving(message2);
verify(listener, never()).onReceiveMessage(message1);
verify(listener, times(1)).onReceiveMessage(message2);
}
@Test
public void onReceiveMessage_multipleRegisteredModules_callsEachModuleOnce() {
// Arrange
MessageChannel.MessageChannelListener listener = mock(MessageChannel.MessageChannelListener.class);
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addMessageListener(listener, true);
int modulesCount = createRandomInt(10) + 1;
for (int i = 0; i < modulesCount; i++) {
ChannelModule<Message> module = mock(ChannelModule.class);
when(module.onReceiving(message)).thenReturn(message);
target.getMessageModules().add(module);
}
// Act
transport.raiseOnReceive(message);
// Assert
verify(listener, times(1)).onReceiveMessage(message);
for (ChannelModule<Message> module : target.getMessageModules()) {
verify(module, times(1)).onReceiving(message);
}
}
@Test
public void sendNotification_establishedState_callsTransport() throws IOException {
// Arrange
Notification notification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
target.sendNotification(notification);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(notification, transport.sentEnvelopes.remove());
}
@Test(expected = IllegalArgumentException.class)
public void sendNotification_nullNotification_throwsIllegalArgumentException() throws IOException {
// Arrange
Notification notification = null;
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
target.sendNotification(notification);
}
@Test(expected = IllegalStateException.class)
public void sendNotification_newNotification_throwsIllegalStateException() throws IOException {
// Arrange
Notification notification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.NEW);
// Act
target.sendNotification(notification);
}
@Test
public void sendNotification_moduleReturnsNotification_sendsModuleNotification() throws IOException {
// Arrange
Notification notification = createNotification(Notification.Event.AUTHORIZED);
Notification moduleNotification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
ChannelModule<Notification> module = mock(ChannelModule.class);
when(module.onSending(notification)).thenReturn(moduleNotification);
target.getNotificationModules().add(module);
// Act
target.sendNotification(notification);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(moduleNotification, transport.sentEnvelopes.remove());
}
@Test
public void sendNotification_moduleReturnsNull_doNotCallTransport() throws IOException {
// Arrange
Notification notification = createNotification(Notification.Event.AUTHORIZED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
ChannelModule<Notification> module = mock(ChannelModule.class);
when(module.onSending(notification)).thenReturn(null);
target.getNotificationModules().add(module);
// Act
target.sendNotification(notification);
// Assert
assertEquals(0, transport.sentEnvelopes.size());
}
@Test
public void sendNotification_multipleRegisteredModules_callsEachModuleOnce() throws IOException {
// Arrange
Notification notification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
int modulesCount = createRandomInt(10) + 1;
for (int i = 0; i < modulesCount; i++) {
ChannelModule<Notification> module = mock(ChannelModule.class);
when(module.onSending(notification)).thenReturn(notification);
target.getNotificationModules().add(module);
}
// Act
target.sendNotification(notification);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(notification, transport.sentEnvelopes.remove());
for (ChannelModule<Notification> module : target.getNotificationModules()) {
verify(module, times(1)).onSending(notification);
}
}
@Test
public void onReceiveNotification_registeredListenerTwoReceives_callsListenerAndUnregister() throws InterruptedException {
// Arrange
NotificationChannel.NotificationChannelListener listener = mock(NotificationChannel.NotificationChannelListener.class);
Notification notification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addNotificationListener(listener, true);
// Act
transport.raiseOnReceive(notification);
transport.raiseOnReceive(notification);
// Assert
verify(listener, times(1)).onReceiveNotification(notification);
}
@Test
public void onReceiveNotification_registeredListenersMultipleReceives_callsListenersMultipleTimes() throws InterruptedException {
// Arrange
int notificationCount = createRandomInt(100) + 1;
int listenersCount = createRandomInt(10) + 1;
// Arrange
Notification notification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
List<NotificationChannel.NotificationChannelListener> listeners = new ArrayList<>();
for (int i = 0; i < listenersCount; i++) {
NotificationChannel.NotificationChannelListener listener = mock(NotificationChannel.NotificationChannelListener.class);
target.addNotificationListener(listener, false);
listeners.add(listener);
}
// Act
for (int i = 0; i < notificationCount; i++) {
transport.raiseOnReceive(notification);
}
// Assert
for (int i = 0; i < listenersCount; i++) {
verify(listeners.get(i), times(notificationCount)).onReceiveNotification(notification);
}
}
@Test
public void onReceiveNotification_moduleReturnsNotification_receivesModuleNotification() {
// Arrange
NotificationChannel.NotificationChannelListener listener = mock(NotificationChannel.NotificationChannelListener.class);
Notification notification = createNotification(Notification.Event.AUTHORIZED);
Notification moduleNotification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addNotificationListener(listener, true);
ChannelModule<Notification> module = mock(ChannelModule.class);
when(module.onReceiving(notification)).thenReturn(moduleNotification);
target.getNotificationModules().add(module);
// Act
transport.raiseOnReceive(notification);
// Assert
verify(module, times(1)).onReceiving(notification);
verify(listener, times(1)).onReceiveNotification(moduleNotification);
}
@Test
public void onReceiveNotification_moduleReturnsNull_ignoresNotification() {
// Arrange
NotificationChannel.NotificationChannelListener listener = mock(NotificationChannel.NotificationChannelListener.class);
Notification notification1 = createNotification(Notification.Event.AUTHORIZED);
Notification notification2 = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addNotificationListener(listener, true);
ChannelModule<Notification> module = mock(ChannelModule.class);
when(module.onReceiving(notification1)).thenReturn(null);
when(module.onReceiving(notification2)).thenReturn(notification2);
target.getNotificationModules().add(module);
// Act
transport.raiseOnReceive(notification1);
transport.raiseOnReceive(notification2);
// Assert
verify(module, times(1)).onReceiving(notification1);
verify(module, times(1)).onReceiving(notification2);
verify(listener, never()).onReceiveNotification(notification1);
verify(listener, times(1)).onReceiveNotification(notification2);
}
@Test
public void onReceiveNotification_multipleRegisteredModules_callsEachModuleOnce() {
// Arrange
NotificationChannel.NotificationChannelListener listener = mock(NotificationChannel.NotificationChannelListener.class);
Notification notification = createNotification(Notification.Event.AUTHORIZED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addNotificationListener(listener, true);
int modulesCount = createRandomInt(10) + 1;
for (int i = 0; i < modulesCount; i++) {
ChannelModule<Notification> module = mock(ChannelModule.class);
when(module.onReceiving(notification)).thenReturn(notification);
target.getNotificationModules().add(module);
}
// Act
transport.raiseOnReceive(notification);
// Assert
verify(listener, times(1)).onReceiveNotification(notification);
for (ChannelModule<Notification> module : target.getNotificationModules()) {
verify(module, times(1)).onReceiving(notification);
}
}
@Test
public void sendSession_establishedState_callsTransport() throws IOException {
// Arrange
Session session = createSession(Session.SessionState.ESTABLISHED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
target.sendSession(session);
// Assert
assertEquals(1, transport.sentEnvelopes.size());
assertEquals(session, transport.sentEnvelopes.remove());
}
@Test(expected = IllegalArgumentException.class)
public void sendSession_nullSession_throwsIllegalArgumentException() throws IOException {
// Arrange
Session session = null;
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
target.sendSession(session);
}
@Test
public void onReceiveSession_registeredListenerTwoReceives_callsListenerAndUnregister() throws InterruptedException {
// Arrange
SessionChannel.SessionChannelListener listener = mock(SessionChannel.SessionChannelListener.class);
Session session = createSession(Session.SessionState.ESTABLISHED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.enqueueSessionListener(listener);
// Act
transport.raiseOnReceive(session);
transport.raiseOnReceive(session);
// Assert
verify(listener, times(1)).onReceiveSession(session);
}
@Test
public void getTransport_anyInstance_returnsInstance() {
// Arrange
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
Transport actual = target.getTransport();
// Assert
assertEquals(transport, actual);
}
@Test
public void getRemoteNode_nullInstance_returnsNull() {
// Arrange
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
Node actual = target.getRemoteNode();
// Assert
assertNull(actual);
}
@Test
public void getRemoteNode_anyInstance_returnsInstance() {
// Arrange
Node remoteNode = createNode();
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED, false, remoteNode, null);
// Act
Node actual = target.getRemoteNode();
// Assert
assertEquals(remoteNode, actual);
}
@Test
public void getLocalNode_nullInstance_returnsNull() {
// Arrange
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
Node actual = target.getLocalNode();
// Assert
assertNull(actual);
}
@Test
public void getLocalNode_anyInstance_returnsInstance() {
// Arrange
Node localNode = createNode();
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED, false, null, localNode);
// Act
Node actual = target.getLocalNode();
// Assert
assertEquals(localNode, actual);
}
@Test
public void getSessionId_nullInstance_returnsNull() {
// Arrange
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
// Act
String actual = target.getSessionId();
// Assert
assertNull(actual);
}
@Test
public void getSessionId_anyInstance_returnsInstance() {
// Arrange
String sessionId = EnvelopeId.newId();
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED, false, null, null, sessionId);
// Act
String actual = target.getSessionId();
// Assert
assertEquals(sessionId, actual);
}
@Test
public void getState_new_returnsInstance() {
// Arrange
Session.SessionState sessionState = Session.SessionState.NEW;
ChannelBase target = getTarget(sessionState);
// Act
Session.SessionState actual = target.getState();
// Assert
assertEquals(sessionState, actual);
}
/*
@Test
public void setState_negotiation_setsValueAndStartsRemovableListener() {
// Arrange
Session.SessionState state = Session.SessionState.NEGOTIATING;
ChannelBase target = getTarget(Session.SessionState.NEW);
// Act
((TestChannel)target).setState(state);
// Assert
assertEquals(state, target.getState());
assertEquals(1, transport.addedListeners.size());
TransportListenerRemoveAfterReceive listener = transport.addedListeners.remove();
assertNotNull(listener.transportListener);
assertEquals(true, listener.removeAfterReceive);
}
*/
@Test(expected = IllegalArgumentException.class)
public void setState_null_throwsIllegalArgumentException() {
// Arrange
Session.SessionState state = null;
ChannelBase target = getTarget(Session.SessionState.NEW);
// Act
((TestChannel)target).setState(state);
}
@Test
public void raiseOnReceiveMessage_registeredRemovableListener_callsListenerOnceAndRemove() {
// Arrange
MessageChannel.MessageChannelListener listener = mock(MessageChannel.MessageChannelListener.class);
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addMessageListener(listener, true);
// Act
((TestChannel)target).raiseOnReceiveMessage(message);
((TestChannel)target).raiseOnReceiveMessage(message);
// Assert
verify(listener, times(1)).onReceiveMessage(message);
}
@Test
public void raiseOnReceiveMessage_registeredListener_callsListenerTwice() {
// Arrange
MessageChannel.MessageChannelListener listener = mock(MessageChannel.MessageChannelListener.class);
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addMessageListener(listener, false);
// Act
((TestChannel)target).raiseOnReceiveMessage(message);
((TestChannel)target).raiseOnReceiveMessage(message);
// Assert
verify(listener, times(2)).onReceiveMessage(message);
}
@Test
public void raiseOnReceiveMessage_twoRegisteredListeners_callsFirstOnceAndRemoveAndSecondTwice() {
// Arrange
MessageChannel.MessageChannelListener listener1 = mock(MessageChannel.MessageChannelListener.class);
MessageChannel.MessageChannelListener listener2 = mock(MessageChannel.MessageChannelListener.class);
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addMessageListener(listener1, true);
target.addMessageListener(listener2, false);
// Act
((TestChannel)target).raiseOnReceiveMessage(message);
((TestChannel)target).raiseOnReceiveMessage(message);
// Assert
verify(listener1, times(1)).onReceiveMessage(message);
verify(listener2, times(2)).onReceiveMessage(message);
}
@Test(expected = IllegalStateException.class)
public void raiseOnReceiveMessage_finishedSessionSate_throwsIllegalOperationException() {
// Arrange
MessageChannel.MessageChannelListener listener = mock(MessageChannel.MessageChannelListener.class);
Message message = createMessage(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.FINISHED);
target.addMessageListener(listener, true);
// Act
((TestChannel)target).raiseOnReceiveMessage(message);
}
@Test
public void raiseOnReceiveCommand_registeredRemovableListener_callsListenerOnceAndRemove() {
// Arrange
CommandChannel.CommandChannelListener listener = mock(CommandChannel.CommandChannelListener.class);
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addCommandListener(listener, true);
// Act
((TestChannel)target).raiseOnReceiveCommand(command);
((TestChannel)target).raiseOnReceiveCommand(command);
// Assert
verify(listener, times(1)).onReceiveCommand(command);
}
@Test
public void raiseOnReceiveCommand_registeredListener_callsListenerTwice() {
// Arrange
CommandChannel.CommandChannelListener listener = mock(CommandChannel.CommandChannelListener.class);
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addCommandListener(listener, false);
// Act
((TestChannel)target).raiseOnReceiveCommand(command);
((TestChannel)target).raiseOnReceiveCommand(command);
// Assert
verify(listener, times(2)).onReceiveCommand(command);
}
@Test
public void raiseOnReceiveCommand_twoRegisteredListeners_callsFirstOnceAndRemoveAndSecondTwice() {
// Arrange
CommandChannel.CommandChannelListener listener1 = mock(CommandChannel.CommandChannelListener.class);
CommandChannel.CommandChannelListener listener2 = mock(CommandChannel.CommandChannelListener.class);
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addCommandListener(listener1, true);
target.addCommandListener(listener2, false);
// Act
((TestChannel)target).raiseOnReceiveCommand(command);
((TestChannel)target).raiseOnReceiveCommand(command);
// Assert
verify(listener1, times(1)).onReceiveCommand(command);
verify(listener2, times(2)).onReceiveCommand(command);
}
@Test(expected = IllegalStateException.class)
public void raiseOnReceiveCommand_finishedSessionSate_throwsIllegalOperationException() {
// Arrange
CommandChannel.CommandChannelListener listener = mock(CommandChannel.CommandChannelListener.class);
Command command = createCommand(createPlainDocument());
ChannelBase target = getTarget(Session.SessionState.FINISHED);
target.addCommandListener(listener, true);
// Act
((TestChannel)target).raiseOnReceiveCommand(command);
}
@Test
public void raiseOnReceiveNotification_registeredRemovableListener_callsListenerOnceAndRemove() {
// Arrange
NotificationChannel.NotificationChannelListener listener = mock(NotificationChannel.NotificationChannelListener.class);
Notification notification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addNotificationListener(listener, true);
// Act
((TestChannel)target).raiseOnReceiveNotification(notification);
((TestChannel)target).raiseOnReceiveNotification(notification);
// Assert
verify(listener, times(1)).onReceiveNotification(notification);
}
@Test
public void raiseOnReceiveNotification_registeredListener_callsListenerTwice() {
// Arrange
NotificationChannel.NotificationChannelListener listener = mock(NotificationChannel.NotificationChannelListener.class);
Notification notification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addNotificationListener(listener, false);
// Act
((TestChannel)target).raiseOnReceiveNotification(notification);
((TestChannel)target).raiseOnReceiveNotification(notification);
// Assert
verify(listener, times(2)).onReceiveNotification(notification);
}
@Test
public void raiseOnReceiveNotification_twoRegisteredListeners_callsFirstOnceAndRemoveAndSecondTwice() {
// Arrange
NotificationChannel.NotificationChannelListener listener1 = mock(NotificationChannel.NotificationChannelListener.class);
NotificationChannel.NotificationChannelListener listener2 = mock(NotificationChannel.NotificationChannelListener.class);
Notification notification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED);
target.addNotificationListener(listener1, true);
target.addNotificationListener(listener2, false);
// Act
((TestChannel)target).raiseOnReceiveNotification(notification);
((TestChannel)target).raiseOnReceiveNotification(notification);
// Assert
verify(listener1, times(1)).onReceiveNotification(notification);
verify(listener2, times(2)).onReceiveNotification(notification);
}
@Test(expected = IllegalStateException.class)
public void raiseOnReceiveNotification_finishedSessionSate_throwsIllegalOperationException() {
// Arrange
NotificationChannel.NotificationChannelListener listener = mock(NotificationChannel.NotificationChannelListener.class);
Notification notification = createNotification(Notification.Event.RECEIVED);
ChannelBase target = getTarget(Session.SessionState.FINISHED);
target.addNotificationListener(listener, true);
// Act
((TestChannel)target).raiseOnReceiveNotification(notification);
}
@Test
public void schedulePing_inactiveEstablishedChannel_sendPings() throws InterruptedException {
// Arrange
ChannelBase target = getTarget(Session.SessionState.ESTABLISHED, false, true, 100, 600, null, null, EnvelopeId.newId());
// Act
Thread.sleep(350);
// Assert
assertEquals(3, ((TestTransport) target.getTransport()).sentEnvelopes.size());
}
private class TestChannel extends ChannelBase {
protected TestChannel(Transport transport, Session.SessionState state, boolean fillEnvelopeRecipients, boolean autoReplyPings, long pingInterval, long pingDisconnectionInterval, Node remoteNode, Node localNode, String sessionId) {
super(transport, fillEnvelopeRecipients, autoReplyPings, pingInterval, pingDisconnectionInterval);
setRemoteNode(remoteNode);
setLocalNode(localNode);
setState(state);
setSessionId(sessionId);
}
public Session lastReceivedSession;
@Override
protected synchronized void raiseOnReceiveSession(Session session) {
lastReceivedSession = session;
super.raiseOnReceiveSession(session);
}
}
private class TestTransport extends TransportBase implements Transport {
public URI openUri;
public Queue<Envelope> sentEnvelopes;
public boolean closeInvoked;
public TestTransport() {
sentEnvelopes = new LinkedBlockingQueue<>();
}
/**
* Closes the transport.
*/
@Override
protected void performClose() throws IOException {
closeInvoked = true;
}
@Override
protected void performOpen(URI uri) throws IOException {
}
/**
* Sends an envelope to the remote node.
*
* @param envelope
*/
@Override
public void send(Envelope envelope) throws IOException {
sentEnvelopes.add(envelope);
}
/**
* Opens the transport connection with the specified Uri.
*
* @param uri
*/
@Override
public void open(URI uri) throws IOException {
openUri = uri;
}
@Override
public boolean isConnected() {
return true;
}
@Override
public void setStateListener(TransportStateListener listener) {
super.setStateListener(listener);
}
}
}
|
|
package com.fasterxml.jackson.core.json.async;
import java.io.IOException;
import com.fasterxml.jackson.core.*;
import com.fasterxml.jackson.core.async.AsyncTestBase;
import com.fasterxml.jackson.core.testsupport.AsyncReaderWrapper;
public class AsyncNonStdParsingTest extends AsyncTestBase
{
public void testLargeUnquotedNames() throws Exception
{
JsonFactory f = new JsonFactory();
f.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
StringBuilder sb = new StringBuilder(5000);
sb.append("[\n");
final int REPS = 1050;
for (int i = 0; i < REPS; ++i) {
if (i > 0) {
sb.append(',');
if ((i & 7) == 0) {
sb.append('\n');
}
}
sb.append("{");
sb.append("abc").append(i&127).append(':');
sb.append((i & 1) != 0);
sb.append("}\n");
}
sb.append("]");
String doc = sb.toString();
_testLargeUnquoted(f, REPS, doc, 0, 99);
_testLargeUnquoted(f, REPS, doc, 0, 5);
_testLargeUnquoted(f, REPS, doc, 0, 3);
_testLargeUnquoted(f, REPS, doc, 0, 2);
_testLargeUnquoted(f, REPS, doc, 0, 1);
_testLargeUnquoted(f, REPS, doc, 1, 99);
_testLargeUnquoted(f, REPS, doc, 1, 1);
}
private void _testLargeUnquoted(JsonFactory f, int reps, String doc,
int offset, int readSize) throws Exception
{
AsyncReaderWrapper p = createParser(f, doc, offset, readSize);
assertToken(JsonToken.START_ARRAY, p.nextToken());
for (int i = 0; i < reps; ++i) {
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("abc"+(i&127), p.currentName());
assertToken(((i&1) != 0) ? JsonToken.VALUE_TRUE : JsonToken.VALUE_FALSE, p.nextToken());
assertToken(JsonToken.END_OBJECT, p.nextToken());
}
assertToken(JsonToken.END_ARRAY, p.nextToken());
p.close();
}
public void testSimpleUnquotedNames() throws Exception
{
final JsonFactory f = new JsonFactory();
f.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
_testSimpleUnquoted(f, 0, 99);
_testSimpleUnquoted(f, 0, 5);
_testSimpleUnquoted(f, 0, 3);
_testSimpleUnquoted(f, 0, 2);
_testSimpleUnquoted(f, 0, 1);
_testSimpleUnquoted(f, 1, 99);
_testSimpleUnquoted(f, 1, 3);
_testSimpleUnquoted(f, 1, 1);
}
private void _testSimpleUnquoted(JsonFactory f,
int offset, int readSize) throws Exception
{
String doc = "{ a : 1, _foo:true, $:\"money!\", \" \":null }";
AsyncReaderWrapper p = createParser(f, doc, offset, readSize);
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("a", p.currentName());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("_foo", p.currentName());
assertToken(JsonToken.VALUE_TRUE, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("$", p.currentName());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("money!", p.currentText());
// and then regular quoted one should still work too:
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals(" ", p.currentName());
assertToken(JsonToken.VALUE_NULL, p.nextToken());
assertToken(JsonToken.END_OBJECT, p.nextToken());
p.close();
// Another thing, as per [jackson-cre#102]: numbers
p = createParser(f, "{ 123:true,4:false }", offset, readSize);
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("123", p.currentName());
assertToken(JsonToken.VALUE_TRUE, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("4", p.currentName());
assertToken(JsonToken.VALUE_FALSE, p.nextToken());
assertToken(JsonToken.END_OBJECT, p.nextToken());
p.close();
}
/**
* Test to verify that the default parser settings do not
* accept single-quotes for String values (field names,
* textual values)
*/
public void testAposQuotingDisabled() throws Exception
{
JsonFactory f = new JsonFactory();
_testSingleQuotesDefault(f, 0, 99);
_testSingleQuotesDefault(f, 0, 5);
_testSingleQuotesDefault(f, 0, 3);
_testSingleQuotesDefault(f, 0, 1);
_testSingleQuotesDefault(f, 1, 99);
_testSingleQuotesDefault(f, 1, 1);
}
private void _testSingleQuotesDefault(JsonFactory f,
int offset, int readSize) throws Exception
{
// First, let's see that by default they are not allowed
String JSON = "[ 'text' ]";
AsyncReaderWrapper p = createParser(f, JSON, offset, readSize);
assertToken(JsonToken.START_ARRAY, p.nextToken());
try {
p.nextToken();
fail("Expected exception");
} catch (JsonParseException e) {
verifyException(e, "Unexpected character ('''");
} finally {
p.close();
}
JSON = "{ 'a':1 }";
p = createParser(f, JSON, offset, readSize);
assertToken(JsonToken.START_OBJECT, p.nextToken());
try {
p.nextToken();
fail("Expected exception");
} catch (JsonParseException e) {
verifyException(e, "Unexpected character ('''");
} finally {
p.close();
}
}
/**
* Test to verify optional handling of
* single quotes, to allow handling invalid (but, alas, common)
* JSON.
*/
public void testAposQuotingEnabled() throws Exception
{
JsonFactory f = new JsonFactory();
f.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
_testAposQuotingEnabled(f, 0, 99);
_testAposQuotingEnabled(f, 0, 5);
_testAposQuotingEnabled(f, 0, 3);
_testAposQuotingEnabled(f, 0, 2);
_testAposQuotingEnabled(f, 0, 1);
_testAposQuotingEnabled(f, 1, 99);
_testAposQuotingEnabled(f, 2, 1);
_testAposQuotingEnabled(f, 1, 1);
}
private void _testAposQuotingEnabled(JsonFactory f,
int offset, int readSize) throws Exception
{
String UNINAME = String.format("Uni%c-key-%c", UNICODE_2BYTES, UNICODE_3BYTES);
String UNIVALUE = String.format("Uni%c-value-%c", UNICODE_3BYTES, UNICODE_2BYTES);
String JSON = String.format(
"{ 'a' : 1, \"foobar\": 'b', '_abcde1234':'d', '\"' : '\"\"', '':'', '%s':'%s'}",
UNINAME, UNIVALUE);
AsyncReaderWrapper p = createParser(f, JSON, offset, readSize);
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("a", p.currentText());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals("1", p.currentText());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("foobar", p.currentText());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("b", p.currentText());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("_abcde1234", p.currentText());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("d", p.currentText());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("\"", p.currentText());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("\"\"", p.currentText());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("", p.currentText());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("", p.currentText());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals(UNINAME, p.currentText());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals(UNIVALUE, p.currentText());
assertToken(JsonToken.END_OBJECT, p.nextToken());
p.close();
JSON = "{'b':1,'array':[{'b':3}],'ob':{'b':4,'x':0,'y':'"+UNICODE_SEGMENT+"','a':false }}";
p = createParser(f, JSON, offset, readSize);
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("b", p.currentName());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("b", p.currentName());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(3, p.getIntValue());
assertToken(JsonToken.END_OBJECT, p.nextToken());
assertToken(JsonToken.END_ARRAY, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("b", p.currentName());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(4, p.getIntValue());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("x", p.currentName());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(0, p.getIntValue());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("y", p.currentName());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals(UNICODE_SEGMENT, p.currentText());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("a", p.currentName());
assertToken(JsonToken.VALUE_FALSE, p.nextToken());
assertToken(JsonToken.END_OBJECT, p.nextToken());
assertToken(JsonToken.END_OBJECT, p.nextToken());
p.close();
}
// test to verify that we implicitly allow escaping of apostrophe
public void testSingleQuotesEscaped() throws Exception
{
JsonFactory f = new JsonFactory();
f.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
_testSingleQuotesEscaped(f, 0, 99);
_testSingleQuotesEscaped(f, 0, 5);
_testSingleQuotesEscaped(f, 0, 3);
_testSingleQuotesEscaped(f, 0, 1);
_testSingleQuotesEscaped(f, 1, 99);
_testSingleQuotesEscaped(f, 1, 1);
}
private void _testSingleQuotesEscaped(JsonFactory f,
int offset, int readSize) throws Exception
{
String JSON = "[ '16\\'' ]";
AsyncReaderWrapper p = createParser(f, JSON, offset, readSize);
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("16'", p.currentText());
assertToken(JsonToken.END_ARRAY, p.nextToken());
p.close();
}
public void testNonStandardNameChars() throws Exception
{
JsonFactory f = new JsonFactory();
f.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
_testNonStandardNameChars(f, 0, 99);
_testNonStandardNameChars(f, 0, 6);
_testNonStandardNameChars(f, 0, 3);
_testNonStandardNameChars(f, 0, 1);
_testNonStandardNameChars(f, 1, 99);
_testNonStandardNameChars(f, 2, 1);
}
private void _testNonStandardNameChars(JsonFactory f,
int offset, int readSize) throws Exception
{
String JSON = "{ @type : \"mytype\", #color : 123, *error* : true, "
+" hyphen-ated : \"yes\", me+my : null"
+"}";
AsyncReaderWrapper p = createParser(f, JSON, offset, readSize);
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("@type", p.currentText());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("mytype", p.currentText());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("#color", p.currentText());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(123, p.getIntValue());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("*error*", p.currentText());
assertToken(JsonToken.VALUE_TRUE, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("hyphen-ated", p.currentText());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("yes", p.currentText());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("me+my", p.currentText());
assertToken(JsonToken.VALUE_NULL, p.nextToken());
assertToken(JsonToken.END_OBJECT, p.nextToken());
p.close();
}
public void testNonStandarBackslashQuotingForValues(int mode) throws Exception
{
_testNonStandarBackslashQuoting(0, 99);
_testNonStandarBackslashQuoting(0, 6);
_testNonStandarBackslashQuoting(0, 3);
_testNonStandarBackslashQuoting(0, 1);
_testNonStandarBackslashQuoting(2, 99);
_testNonStandarBackslashQuoting(1, 1);
}
private void _testNonStandarBackslashQuoting(
int offset, int readSize) throws Exception
{
// first: verify that we get an exception
JsonFactory f = new JsonFactory();
assertFalse(f.isEnabled(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER));
final String JSON = quote("\\'");
AsyncReaderWrapper p = createParser(f, JSON, offset, readSize);
try {
p.nextToken();
p.currentText();
fail("Should have thrown an exception for doc <"+JSON+">");
} catch (JsonParseException e) {
verifyException(e, "unrecognized character escape");
} finally {
p.close();
}
// and then verify it's ok...
f.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER, true);
assertTrue(f.isEnabled(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER));
p = createParser(f, JSON, offset, readSize);
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("'", p.currentText());
p.close();
}
private AsyncReaderWrapper createParser(JsonFactory f, String doc,
int offset, int readSize) throws IOException
{
return asyncForBytes(f, readSize, _jsonDoc(doc), offset);
}
}
|
|
// ----------------------------------------------------------------------------
// Copyright 2006-2010, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2009/06/01 Martin D. Flynn
// -Extracted from SendMail
// ----------------------------------------------------------------------------
package org.opengts.util;
import java.lang.reflect.*;
import java.io.*;
import java.util.*;
import java.net.*;
import java.awt.*;
import java.awt.event.*;
import javax.activation.*; // since Java 6
import javax.mail.*;
import javax.mail.internet.*;
public class SendMailArgs
{
// ------------------------------------------------------------------------
public static final boolean USE_AUTHENTICATOR = true;
public static final String SSL_FACTORY = "javax.net.ssl.SSLSocketFactory";
// ------------------------------------------------------------------------
/**
*** Filters and returns the base email address from the specified String.<br>
*** For example, if the String "Jones<jones@example.com>" is passed to this
*** method, then the value "jones@example.com" will be returned.
*** @param addr The email address to filter.
*** @return The filtered email address, or null if the specified email address is invalid.
**/
public static String parseEMailAddress(String addr)
{
if (addr != null) {
try {
InternetAddress ia = new InternetAddress(addr, true);
return ia.getAddress();
} catch (Throwable ae) { // AddressException
// drop through
}
}
return null;
}
// ------------------------------------------------------------------------
/**
*** Internal method to send email
*** @param args The email arguments
*** @return True if the email was sent, false otherwise
**/
public static boolean send(SendMail.Args args)
{
String from = args.getFrom();
String to[] = args.getTo();
String cc[] = args.getCc();
String bcc[] = args.getBcc();
String subject = args.getSubject();
String msgBody = args.getBody();
Properties headers = args.getHeaders();
SendMail.Attachment attach = args.getAttachment();
/* SMTP properties */
// http://www.j2ee.me/products/javamail/javadocs/com/sun/mail/smtp/package-summary.html
// mail.smtp.host (String)
// mail.smtp.port (int)
// mail.smtp.user (String)
// mail.smtp.auth (boolean)
// mail.smtp.connectiontimeout (int) [miliseconds]
// mail.smtp.timeout (int) [miliseconds]
// mail.smtp.socketFactory.class (String)
// mail.smtp.socketFactory.port (int)
// mail.smtp.socketFactory.fallback (boolean)
// mail.smtp.starttls.enable (boolean)
// mail.smtp.sendpartial (boolean)
Properties props = new Properties();
// Debug
if (RTConfig.getBoolean(RTKey.SMTP_DEBUG)) {
props.put("mail.debug", "true");
Print.logDebug("SendMail debug mode");
}
// SMTP host:port
final String smtpHost = RTConfig.getString(RTKey.SMTP_SERVER_HOST);
final int smtpPort = RTConfig.getInt(RTKey.SMTP_SERVER_PORT,25);
if (StringTools.isBlank(smtpHost) || smtpHost.endsWith("example.com")) {
Print.logError("Null/Invalid SMTP host, not sending email");
return false;
} else
if (smtpPort <= 0) {
Print.logError("Invalid SMTP port, not sending email");
return false;
}
props.put("mail.smtp.host" , smtpHost);
props.put("mail.smtp.port" , String.valueOf(smtpPort));
props.put("mail.smtp.connectiontimeout" , "60000");
props.put("mail.smtp.timeout" , "60000");
//props.put("mail.smtp.auth" , "true");
//props.put("mail.smtp.auth.mechanisms" , "LOGIN PLAIN DIGEST-MD5 NTLM");
// SSL
String enableSSL = RTConfig.getString(RTKey.SMTP_ENABLE_SSL,"false").toLowerCase();
if (enableSSL.equals("only") || enableSSL.equals("true")) {
props.put("mail.smtp.socketFactory.port" , String.valueOf(smtpPort));
props.put("mail.smtp.socketFactory.class" , SSL_FACTORY);
props.put("mail.smtp.socketFactory.fallback" , "false");
//props.put("mail.smtp.socketFactory.fallback" , "true");
if (enableSSL.equals("only")) {
props.put("mail.smtp.ssl.enable" , "true");
props.put("mail.smtp.ssl.socketFactory.port", String.valueOf(smtpPort));
}
}
// TLS
String enableTLS = RTConfig.getString(RTKey.SMTP_ENABLE_TLS,"false").toLowerCase();
if (enableTLS.equals("only") || enableTLS.equals("true")) {
props.put("mail.smtp.starttls.required" , "true");
props.put("mail.smtp.starttls.enable" , "true");
}
/* SMTP Authenticator */
javax.mail.Authenticator auth = null;
final String smtpUser = StringTools.trim(RTConfig.getString(RTKey.SMTP_SERVER_USER));
final String smtpPass = RTConfig.getString(RTKey.SMTP_SERVER_PASSWORD);
if (USE_AUTHENTICATOR && !StringTools.isBlank(smtpUser)) {
auth = new javax.mail.Authenticator() {
public javax.mail.PasswordAuthentication getPasswordAuthentication() {
return new javax.mail.PasswordAuthentication(smtpUser, smtpPass);
}
};
props.put("mail.smtp.user", smtpUser);
props.put("mail.smtp.auth", "true"); // SSL
}
/* SMTP Session */
//props.list(System.out);
Session session = Session.getInstance(props, auth);
try {
Message msg = new MimeMessage(session);
msg.setFrom(new InternetAddress(from));
InternetAddress toAddr[] = _convertRecipients(to);
InternetAddress ccAddr[] = _convertRecipients(cc);
InternetAddress bccAddr[] = _convertRecipients(bcc);
if ((toAddr != null) && (toAddr.length > 0)) {
for (Iterator i = headers.keySet().iterator(); i.hasNext();) {
String k = (String)i.next();
String v = headers.getProperty(k);
if (v != null) {
msg.setHeader(k, v);
}
}
msg.setRecipients(Message.RecipientType.TO , toAddr);
msg.setRecipients(Message.RecipientType.CC , ccAddr);
msg.setRecipients(Message.RecipientType.BCC, bccAddr);
msg.setSubject(subject);
msg.setSentDate(new Date());
if ((attach != null) && (attach.getSize() > 0)) {
Multipart multipart = new MimeMultipart();
if ((msgBody != null) && !msgBody.equals("")) {
BodyPart textBodyPart = new MimeBodyPart();
textBodyPart.setText(msgBody);
multipart.addBodyPart(textBodyPart);
}
// add attachment
BodyPart attachBodyPart = new MimeBodyPart();
DataSource source = new ByteArrayDataSource(attach.getName(), attach.getType(), attach.getBytes());
attachBodyPart.setDataHandler(new DataHandler(source));
attachBodyPart.setFileName(source.getName());
multipart.addBodyPart(attachBodyPart);
// set content
msg.setContent(multipart);
} else {
msg.setText(msgBody); // setContent(msgBody, CONTENT_TYPE_PLAIN);
}
/* send email */
msg.saveChanges(); // implicit with send()
if (!USE_AUTHENTICATOR && !StringTools.isBlank(smtpUser)) {
Transport transport = session.getTransport("smtp");
transport.connect(smtpHost, smtpUser, (smtpPass!=null?smtpPass:""));
transport.sendMessage(msg, msg.getAllRecipients());
transport.close();
} else {
Transport.send(msg);
}
Print.logDebug("Email sent ...");
return true;
} else {
return false;
}
} catch (MessagingException me) {
Print.logStackTrace("Unable to send email [host="+smtpHost+"; port="+smtpPort+"]", me);
for (Exception ex = me; ex != null;) {
if (ex instanceof SendFailedException) {
SendFailedException sfex = (SendFailedException)ex;
_printAddresses("Invalid:" , sfex.getInvalidAddresses());
_printAddresses("Valid Unsent:", sfex.getValidUnsentAddresses());
_printAddresses("Valid Sent:" , sfex.getValidSentAddresses());
}
ex = (ex instanceof MessagingException)? ((MessagingException)ex).getNextException() : null;
}
return false;
}
}
// ------------------------------------------------------------------------
/**
*** Converts the list of String email addresses to instances of 'InternetAddress'
*** @param to The array of email addresses
*** @return An array of InternetAddress instances
*** @throws AddressException if any of the specified email addresses are invalid
**/
private static InternetAddress[] _convertRecipients(String to[])
throws AddressException
{
java.util.List<InternetAddress> inetAddr = new Vector<InternetAddress>();
for (int i = 0; i < to.length; i++) {
String t = (to[i] != null)? to[i].trim() : "";
if (!t.equals("")) {
try {
inetAddr.add(new InternetAddress(t));
} catch (AddressException ae) {
Print.logStackTrace("Address: " + t + " (skipped)", ae);
}
}
}
return inetAddr.toArray(new InternetAddress[inetAddr.size()]);
}
// ------------------------------------------------------------------------
/**
*** Prints the list of email addresses (debug purposes only)
**/
private static void _printAddresses(String msg, Address addr[])
{
if (addr != null) {
Print.logInfo(msg);
for (int i = 0; i < addr.length; i++) {
Print.logInfo(" " + addr[i]);
}
}
}
// ------------------------------------------------------------------------
/**
*** ByteArrayDataSource class
**/
private static class ByteArrayDataSource
implements DataSource
{
private String name = null;
private String type = null;
private Object source = null;
private ByteArrayDataSource(String name, String type, Object src) {
this.name = name;
this.type = type;
this.source = src;
}
public ByteArrayDataSource(String name, byte src[]) {
this(name, null, src);
}
public ByteArrayDataSource(String name, String type, byte src[]) {
this(name, type, (Object)src);
}
public ByteArrayDataSource(String name, String src) {
this(name, null, src);
}
public ByteArrayDataSource(String name, String type, String src) {
this(name, type, (Object)src);
}
public String getName() {
return (this.name != null)? this.name : "";
}
public String getContentType() {
if (this.type != null) {
return this.type;
} else
if (this.getName().toLowerCase().endsWith(".csv")) {
return HTMLTools.MIME_CSV();
} else
if (this.getName().toLowerCase().endsWith(".gif")) {
return HTMLTools.MIME_GIF();
} else
if (this.getName().toLowerCase().endsWith(".png")) {
return HTMLTools.MIME_PNG();
} else
if (this.source instanceof byte[]) {
return SendMail.DefaultContentType((byte[])this.source);
} else
if (this.source instanceof ByteArrayOutputStream) {
return SendMail.DefaultContentType(((ByteArrayOutputStream)this.source).toByteArray());
} else {
return HTMLTools.MIME_PLAIN();
}
}
public InputStream getInputStream() {
return new ByteArrayInputStream(this.toByteArray());
}
public OutputStream getOutputStream() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte b[] = this.toByteArray();
if ((b != null) && (b.length > 0)) {
out.write(b, 0, b.length);
}
this.source = out;
return (ByteArrayOutputStream)this.source;
}
private byte[] toByteArray() {
if (this.source == null) {
return new byte[0];
} else
if (this.source instanceof byte[]) {
return (byte[])this.source;
} else
if (this.source instanceof ByteArrayOutputStream) {
return ((ByteArrayOutputStream)this.source).toByteArray();
} else {
return StringTools.getBytes(this.source.toString());
}
}
}
}
|
|
package com.koushikdutta.async.http;
import com.koushikdutta.async.AsyncServer;
import com.koushikdutta.async.AsyncSocket;
import com.koushikdutta.async.ByteBufferList;
import com.koushikdutta.async.DataEmitter;
import com.koushikdutta.async.DataSink;
import com.koushikdutta.async.FilteredDataEmitter;
import com.koushikdutta.async.LineEmitter;
import com.koushikdutta.async.LineEmitter.StringCallback;
import com.koushikdutta.async.NullDataCallback;
import com.koushikdutta.async.callback.CompletedCallback;
import com.koushikdutta.async.callback.WritableCallback;
import com.koushikdutta.async.http.body.AsyncHttpRequestBody;
import com.koushikdutta.async.http.filter.ChunkedOutputFilter;
import com.koushikdutta.async.http.libcore.RawHeaders;
import com.koushikdutta.async.http.libcore.ResponseHeaders;
import java.nio.ByteBuffer;
abstract class AsyncHttpResponseImpl extends FilteredDataEmitter implements AsyncHttpResponse {
private AsyncHttpRequestBody mWriter;
public AsyncSocket getSocket() {
return mSocket;
}
@Override
public AsyncHttpRequest getRequest() {
return mRequest;
}
void setSocket(AsyncSocket exchange) {
mSocket = exchange;
if (mSocket == null)
return;
mWriter = mRequest.getBody();
if (mWriter != null) {
if (mRequest.getHeaders().getContentType() == null)
mRequest.getHeaders().setContentType(mWriter.getContentType());
if (mWriter.length() > 0) {
mRequest.getHeaders().setContentLength(mWriter.length());
mSink = mSocket;
}
else {
mRequest.getHeaders().getHeaders().set("Transfer-Encoding", "Chunked");
mSink = new ChunkedOutputFilter(mSocket);
}
}
else {
mSink = mSocket;
}
mSocket.setEndCallback(mReporter);
mSocket.setClosedCallback(new CompletedCallback() {
@Override
public void onCompleted(Exception ex) {
// TODO: do we care? throw if socket is still writing or something?
}
});
String rs = mRequest.getRequestString();
mRequest.logv("\n" + rs);
com.koushikdutta.async.Util.writeAll(exchange, rs.getBytes(), new CompletedCallback() {
@Override
public void onCompleted(Exception ex) {
if (mWriter != null) {
mWriter.write(mRequest, AsyncHttpResponseImpl.this, new CompletedCallback() {
@Override
public void onCompleted(Exception ex) {
onRequestCompleted(ex);
}
});
}
else {
onRequestCompleted(null);
}
}
});
LineEmitter liner = new LineEmitter();
exchange.setDataCallback(liner);
liner.setLineCallback(mHeaderCallback);
}
protected void onRequestCompleted(Exception ex) {
}
private CompletedCallback mReporter = new CompletedCallback() {
@Override
public void onCompleted(Exception error) {
if (error != null && !mCompleted) {
report(new Exception("connection closed before response completed."));
}
else {
report(error);
}
}
};
protected abstract void onHeadersReceived();
StringCallback mHeaderCallback = new StringCallback() {
private RawHeaders mRawHeaders = new RawHeaders();
@Override
public void onStringAvailable(String s) {
try {
if (mRawHeaders.getStatusLine() == null) {
mRawHeaders.setStatusLine(s);
}
else if (!"\r".equals(s)) {
mRawHeaders.addLine(s);
}
else {
mHeaders = new ResponseHeaders(mRequest.getUri(), mRawHeaders);
onHeadersReceived();
// socket may get detached after headers (websocket)
if (mSocket == null)
return;
DataEmitter emitter;
// HEAD requests must not return any data. They still may
// return content length, etc, which will confuse the body decoder
if (AsyncHttpHead.METHOD.equalsIgnoreCase(mRequest.getMethod())) {
emitter = HttpUtil.EndEmitter.create(getServer(), null);
}
else {
emitter = HttpUtil.getBodyDecoder(mSocket, mRawHeaders, false);
}
setDataEmitter(emitter);
}
}
catch (Exception ex) {
report(ex);
}
}
};
@Override
protected void report(Exception e) {
super.report(e);
// DISCONNECT. EVERYTHING.
// should not get any data after this point...
// if so, eat it and disconnect.
mSocket.setDataCallback(new NullDataCallback() {
@Override
public void onDataAvailable(DataEmitter emitter, ByteBufferList bb) {
super.onDataAvailable(emitter, bb);
mSocket.close();
}
});
mSocket.setWriteableCallback(null);
mSocket.setClosedCallback(null);
mSocket.setEndCallback(null);
mCompleted = true;
}
private AsyncHttpRequest mRequest;
private AsyncSocket mSocket;
ResponseHeaders mHeaders;
public AsyncHttpResponseImpl(AsyncHttpRequest request) {
mRequest = request;
}
boolean mCompleted = false;
@Override
public ResponseHeaders getHeaders() {
return mHeaders;
}
private boolean mFirstWrite = true;
private void assertContent() {
if (!mFirstWrite)
return;
mFirstWrite = false;
assert null != mRequest.getHeaders().getHeaders().get("Content-Type");
assert mRequest.getHeaders().getHeaders().get("Transfer-Encoding") != null || mRequest.getHeaders().getContentLength() != -1;
}
DataSink mSink;
@Override
public void write(ByteBuffer bb) {
assertContent();
mSink.write(bb);
}
@Override
public void write(ByteBufferList bb) {
assertContent();
mSink.write(bb);
}
@Override
public void end() {
write(ByteBuffer.wrap(new byte[0]));
}
@Override
public void setWriteableCallback(WritableCallback handler) {
mSink.setWriteableCallback(handler);
}
@Override
public WritableCallback getWriteableCallback() {
return mSink.getWriteableCallback();
}
@Override
public boolean isOpen() {
return mSink.isOpen();
}
@Override
public void close() {
mSink.close();
}
@Override
public void setClosedCallback(CompletedCallback handler) {
mSink.setClosedCallback(handler);
}
@Override
public CompletedCallback getClosedCallback() {
return mSink.getClosedCallback();
}
@Override
public AsyncServer getServer() {
return mSocket.getServer();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.accumulo.fate.util.UtilWaitThread.sleepUninterruptibly;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.PartialKey;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.accumulo.core.util.OpTimer;
import org.apache.accumulo.core.util.Pair;
import org.apache.accumulo.core.util.TextUtil;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
public class TabletLocatorImpl extends TabletLocator {
private static final Logger log = LoggerFactory.getLogger(TabletLocatorImpl.class);
// MAX_TEXT represents a TEXT object that is greater than all others. Attempted to use null for
// this purpose, but there seems to be a bug in TreeMap.tailMap with null. Therefore instead of
// using null, created MAX_TEXT.
static final Text MAX_TEXT = new Text();
static final Comparator<Text> END_ROW_COMPARATOR = (o1, o2) -> {
if (o1 == o2)
return 0;
if (o1 == MAX_TEXT)
return 1;
if (o2 == MAX_TEXT)
return -1;
return o1.compareTo(o2);
};
protected TableId tableId;
protected TabletLocator parent;
protected TreeMap<Text,TabletLocation> metaCache = new TreeMap<>(END_ROW_COMPARATOR);
protected TabletLocationObtainer locationObtainer;
private TabletServerLockChecker lockChecker;
protected Text lastTabletRow;
private TreeSet<KeyExtent> badExtents = new TreeSet<>();
private ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock();
private final Lock rLock = rwLock.readLock();
private final Lock wLock = rwLock.writeLock();
public interface TabletLocationObtainer {
/**
* @return null when unable to read information successfully
*/
TabletLocations lookupTablet(ClientContext context, TabletLocation src, Text row, Text stopRow,
TabletLocator parent) throws AccumuloSecurityException, AccumuloException;
List<TabletLocation> lookupTablets(ClientContext context, String tserver,
Map<KeyExtent,List<Range>> map, TabletLocator parent)
throws AccumuloSecurityException, AccumuloException;
}
public interface TabletServerLockChecker {
boolean isLockHeld(String tserver, String session);
void invalidateCache(String server);
}
private class LockCheckerSession {
private HashSet<Pair<String,String>> okLocks = new HashSet<>();
private HashSet<Pair<String,String>> invalidLocks = new HashSet<>();
private TabletLocation checkLock(TabletLocation tl) {
// the goal of this class is to minimize calls out to lockChecker under that assumption that
// its a resource synchronized among many threads... want to
// avoid fine grained synchronization when binning lots of mutations or ranges... remember
// decisions from the lockChecker in thread local unsynchronized
// memory
if (tl == null)
return null;
Pair<String,String> lock = new Pair<>(tl.tablet_location, tl.tablet_session);
if (okLocks.contains(lock))
return tl;
if (invalidLocks.contains(lock))
return null;
if (lockChecker.isLockHeld(tl.tablet_location, tl.tablet_session)) {
okLocks.add(lock);
return tl;
}
if (log.isTraceEnabled())
log.trace("Tablet server {} {} no longer holds its lock", tl.tablet_location,
tl.tablet_session);
invalidLocks.add(lock);
return null;
}
}
public TabletLocatorImpl(TableId tableId, TabletLocator parent, TabletLocationObtainer tlo,
TabletServerLockChecker tslc) {
this.tableId = tableId;
this.parent = parent;
this.locationObtainer = tlo;
this.lockChecker = tslc;
this.lastTabletRow = new Text(tableId.canonical());
lastTabletRow.append(new byte[] {'<'}, 0, 1);
}
@Override
public <T extends Mutation> void binMutations(ClientContext context, List<T> mutations,
Map<String,TabletServerMutations<T>> binnedMutations, List<T> failures)
throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
OpTimer timer = null;
if (log.isTraceEnabled()) {
log.trace("tid={} Binning {} mutations for table {}", Thread.currentThread().getId(),
mutations.size(), tableId);
timer = new OpTimer().start();
}
ArrayList<T> notInCache = new ArrayList<>();
Text row = new Text();
LockCheckerSession lcSession = new LockCheckerSession();
rLock.lock();
try {
processInvalidated(context, lcSession);
// for this to be efficient rows need to be in sorted order, but always sorting is slow...
// therefore only sort the
// stuff not in the cache.... it is most efficient to pass _locateTablet rows in sorted order
// For this to be efficient, need to avoid fine grained synchronization and fine grained
// logging.
// Therefore methods called by this are not synchronized and should not log.
for (T mutation : mutations) {
row.set(mutation.getRow());
TabletLocation tl = locateTabletInCache(row);
if (tl == null || !addMutation(binnedMutations, mutation, tl, lcSession))
notInCache.add(mutation);
}
} finally {
rLock.unlock();
}
if (!notInCache.isEmpty()) {
notInCache.sort((o1, o2) -> WritableComparator.compareBytes(o1.getRow(), 0,
o1.getRow().length, o2.getRow(), 0, o2.getRow().length));
wLock.lock();
try {
boolean failed = false;
for (T mutation : notInCache) {
if (failed) {
// when one table does not return a location, something is probably
// screwy, go ahead and fail everything.
failures.add(mutation);
continue;
}
row.set(mutation.getRow());
TabletLocation tl = _locateTablet(context, row, false, false, false, lcSession);
if (tl == null || !addMutation(binnedMutations, mutation, tl, lcSession)) {
failures.add(mutation);
failed = true;
}
}
} finally {
wLock.unlock();
}
}
if (timer != null) {
timer.stop();
log.trace("tid={} Binned {} mutations for table {} to {} tservers in {}",
Thread.currentThread().getId(), mutations.size(), tableId, binnedMutations.size(),
String.format("%.3f secs", timer.scale(SECONDS)));
}
}
private <T extends Mutation> boolean addMutation(
Map<String,TabletServerMutations<T>> binnedMutations, T mutation, TabletLocation tl,
LockCheckerSession lcSession) {
TabletServerMutations<T> tsm = binnedMutations.get(tl.tablet_location);
if (tsm == null) {
// do lock check once per tserver here to make binning faster
boolean lockHeld = lcSession.checkLock(tl) != null;
if (lockHeld) {
tsm = new TabletServerMutations<>(tl.tablet_session);
binnedMutations.put(tl.tablet_location, tsm);
} else {
return false;
}
}
// its possible the same tserver could be listed with different sessions
if (tsm.getSession().equals(tl.tablet_session)) {
tsm.addMutation(tl.tablet_extent, mutation);
return true;
}
return false;
}
private List<Range> binRanges(ClientContext context, List<Range> ranges,
Map<String,Map<KeyExtent,List<Range>>> binnedRanges, boolean useCache,
LockCheckerSession lcSession)
throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
List<Range> failures = new ArrayList<>();
List<TabletLocation> tabletLocations = new ArrayList<>();
boolean lookupFailed = false;
l1: for (Range range : ranges) {
tabletLocations.clear();
Text startRow;
if (range.getStartKey() != null) {
startRow = range.getStartKey().getRow();
} else
startRow = new Text();
TabletLocation tl = null;
if (useCache)
tl = lcSession.checkLock(locateTabletInCache(startRow));
else if (!lookupFailed)
tl = _locateTablet(context, startRow, false, false, false, lcSession);
if (tl == null) {
failures.add(range);
if (!useCache)
lookupFailed = true;
continue;
}
tabletLocations.add(tl);
while (tl.tablet_extent.endRow() != null
&& !range.afterEndKey(new Key(tl.tablet_extent.endRow()).followingKey(PartialKey.ROW))) {
if (useCache) {
Text row = new Text(tl.tablet_extent.endRow());
row.append(new byte[] {0}, 0, 1);
tl = lcSession.checkLock(locateTabletInCache(row));
} else {
tl = _locateTablet(context, tl.tablet_extent.endRow(), true, false, false, lcSession);
}
if (tl == null) {
failures.add(range);
if (!useCache)
lookupFailed = true;
continue l1;
}
tabletLocations.add(tl);
}
for (TabletLocation tl2 : tabletLocations) {
TabletLocatorImpl.addRange(binnedRanges, tl2.tablet_location, tl2.tablet_extent, range);
}
}
return failures;
}
@Override
public List<Range> binRanges(ClientContext context, List<Range> ranges,
Map<String,Map<KeyExtent,List<Range>>> binnedRanges)
throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
/*
* For this to be efficient, need to avoid fine grained synchronization and fine grained
* logging. Therefore methods called by this are not synchronized and should not log.
*/
OpTimer timer = null;
if (log.isTraceEnabled()) {
log.trace("tid={} Binning {} ranges for table {}", Thread.currentThread().getId(),
ranges.size(), tableId);
timer = new OpTimer().start();
}
LockCheckerSession lcSession = new LockCheckerSession();
List<Range> failures;
rLock.lock();
try {
processInvalidated(context, lcSession);
// for this to be optimal, need to look ranges up in sorted order when
// ranges are not present in cache... however do not want to always
// sort ranges... therefore try binning ranges using only the cache
// and sort whatever fails and retry
failures = binRanges(context, ranges, binnedRanges, true, lcSession);
} finally {
rLock.unlock();
}
if (!failures.isEmpty()) {
// sort failures by range start key
Collections.sort(failures);
// try lookups again
wLock.lock();
try {
failures = binRanges(context, failures, binnedRanges, false, lcSession);
} finally {
wLock.unlock();
}
}
if (timer != null) {
timer.stop();
log.trace("tid={} Binned {} ranges for table {} to {} tservers in {}",
Thread.currentThread().getId(), ranges.size(), tableId, binnedRanges.size(),
String.format("%.3f secs", timer.scale(SECONDS)));
}
return failures;
}
@Override
public void invalidateCache(KeyExtent failedExtent) {
wLock.lock();
try {
badExtents.add(failedExtent);
} finally {
wLock.unlock();
}
if (log.isTraceEnabled())
log.trace("Invalidated extent={}", failedExtent);
}
@Override
public void invalidateCache(Collection<KeyExtent> keySet) {
wLock.lock();
try {
badExtents.addAll(keySet);
} finally {
wLock.unlock();
}
if (log.isTraceEnabled())
log.trace("Invalidated {} cache entries for table {}", keySet.size(), tableId);
}
@Override
public void invalidateCache(ClientContext context, String server) {
int invalidatedCount = 0;
wLock.lock();
try {
for (TabletLocation cacheEntry : metaCache.values())
if (cacheEntry.tablet_location.equals(server)) {
badExtents.add(cacheEntry.tablet_extent);
invalidatedCount++;
}
} finally {
wLock.unlock();
}
lockChecker.invalidateCache(server);
if (log.isTraceEnabled())
log.trace("invalidated {} cache entries table={} server={}", invalidatedCount, tableId,
server);
}
@Override
public void invalidateCache() {
int invalidatedCount;
wLock.lock();
try {
invalidatedCount = metaCache.size();
metaCache.clear();
} finally {
wLock.unlock();
}
if (log.isTraceEnabled())
log.trace("invalidated all {} cache entries for table={}", invalidatedCount, tableId);
}
@Override
public TabletLocation locateTablet(ClientContext context, Text row, boolean skipRow,
boolean retry) throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
OpTimer timer = null;
if (log.isTraceEnabled()) {
log.trace("tid={} Locating tablet table={} row={} skipRow={} retry={}",
Thread.currentThread().getId(), tableId, TextUtil.truncate(row), skipRow, retry);
timer = new OpTimer().start();
}
while (true) {
LockCheckerSession lcSession = new LockCheckerSession();
TabletLocation tl = _locateTablet(context, row, skipRow, retry, true, lcSession);
if (retry && tl == null) {
sleepUninterruptibly(100, MILLISECONDS);
if (log.isTraceEnabled())
log.trace("Failed to locate tablet containing row {} in table {}, will retry...",
TextUtil.truncate(row), tableId);
continue;
}
if (timer != null) {
timer.stop();
log.trace("tid={} Located tablet {} at {} in {}", Thread.currentThread().getId(),
(tl == null ? "null" : tl.tablet_extent), (tl == null ? "null" : tl.tablet_location),
String.format("%.3f secs", timer.scale(SECONDS)));
}
return tl;
}
}
private void lookupTabletLocation(ClientContext context, Text row, boolean retry,
LockCheckerSession lcSession)
throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
Text metadataRow = new Text(tableId.canonical());
metadataRow.append(new byte[] {';'}, 0, 1);
metadataRow.append(row.getBytes(), 0, row.getLength());
TabletLocation ptl = parent.locateTablet(context, metadataRow, false, retry);
if (ptl != null) {
TabletLocations locations =
locationObtainer.lookupTablet(context, ptl, metadataRow, lastTabletRow, parent);
while (locations != null && locations.getLocations().isEmpty()
&& locations.getLocationless().isEmpty()) {
// try the next tablet, the current tablet does not have any tablets that overlap the row
Text er = ptl.tablet_extent.endRow();
if (er != null && er.compareTo(lastTabletRow) < 0) {
// System.out.println("er "+er+" ltr "+lastTabletRow);
ptl = parent.locateTablet(context, er, true, retry);
if (ptl != null)
locations =
locationObtainer.lookupTablet(context, ptl, metadataRow, lastTabletRow, parent);
else
break;
} else {
break;
}
}
if (locations == null)
return;
// cannot assume the list contains contiguous key extents... so it is probably
// best to deal with each extent individually
Text lastEndRow = null;
for (TabletLocation tabletLocation : locations.getLocations()) {
KeyExtent ke = tabletLocation.tablet_extent;
TabletLocation locToCache;
// create new location if current prevEndRow == endRow
if ((lastEndRow != null) && (ke.prevEndRow() != null)
&& ke.prevEndRow().equals(lastEndRow)) {
locToCache = new TabletLocation(new KeyExtent(ke.tableId(), ke.endRow(), lastEndRow),
tabletLocation.tablet_location, tabletLocation.tablet_session);
} else {
locToCache = tabletLocation;
}
// save endRow for next iteration
lastEndRow = locToCache.tablet_extent.endRow();
updateCache(locToCache, lcSession);
}
}
}
private void updateCache(TabletLocation tabletLocation, LockCheckerSession lcSession) {
if (!tabletLocation.tablet_extent.tableId().equals(tableId)) {
// sanity check
throw new IllegalStateException(
"Unexpected extent returned " + tableId + " " + tabletLocation.tablet_extent);
}
if (tabletLocation.tablet_location == null) {
// sanity check
throw new IllegalStateException(
"Cannot add null locations to cache " + tableId + " " + tabletLocation.tablet_extent);
}
// clear out any overlapping extents in cache
removeOverlapping(metaCache, tabletLocation.tablet_extent);
// do not add to cache unless lock is held
if (lcSession.checkLock(tabletLocation) == null)
return;
// add it to cache
Text er = tabletLocation.tablet_extent.endRow();
if (er == null)
er = MAX_TEXT;
metaCache.put(er, tabletLocation);
if (!badExtents.isEmpty())
removeOverlapping(badExtents, tabletLocation.tablet_extent);
}
static void removeOverlapping(TreeMap<Text,TabletLocation> metaCache, KeyExtent nke) {
Iterator<Entry<Text,TabletLocation>> iter = null;
if (nke.prevEndRow() == null) {
iter = metaCache.entrySet().iterator();
} else {
Text row = rowAfterPrevRow(nke);
SortedMap<Text,TabletLocation> tailMap = metaCache.tailMap(row);
iter = tailMap.entrySet().iterator();
}
while (iter.hasNext()) {
Entry<Text,TabletLocation> entry = iter.next();
KeyExtent ke = entry.getValue().tablet_extent;
if (stopRemoving(nke, ke)) {
break;
}
iter.remove();
}
}
private static boolean stopRemoving(KeyExtent nke, KeyExtent ke) {
return ke.prevEndRow() != null && nke.endRow() != null
&& ke.prevEndRow().compareTo(nke.endRow()) >= 0;
}
private static Text rowAfterPrevRow(KeyExtent nke) {
Text row = new Text(nke.prevEndRow());
row.append(new byte[] {0}, 0, 1);
return row;
}
static void removeOverlapping(TreeSet<KeyExtent> extents, KeyExtent nke) {
for (KeyExtent overlapping : KeyExtent.findOverlapping(nke, extents)) {
extents.remove(overlapping);
}
}
private TabletLocation locateTabletInCache(Text row) {
Entry<Text,TabletLocation> entry = metaCache.ceilingEntry(row);
if (entry != null) {
KeyExtent ke = entry.getValue().tablet_extent;
if (ke.prevEndRow() == null || ke.prevEndRow().compareTo(row) < 0) {
return entry.getValue();
}
}
return null;
}
protected TabletLocation _locateTablet(ClientContext context, Text row, boolean skipRow,
boolean retry, boolean lock, LockCheckerSession lcSession)
throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
if (skipRow) {
row = new Text(row);
row.append(new byte[] {0}, 0, 1);
}
TabletLocation tl;
if (lock) {
rLock.lock();
try {
tl = processInvalidatedAndCheckLock(context, lcSession, row);
} finally {
rLock.unlock();
}
} else {
tl = processInvalidatedAndCheckLock(context, lcSession, row);
}
if (tl == null) {
// not in cache, so obtain info
if (lock) {
wLock.lock();
try {
tl = lookupTabletLocationAndCheckLock(context, row, retry, lcSession);
} finally {
wLock.unlock();
}
} else {
tl = lookupTabletLocationAndCheckLock(context, row, retry, lcSession);
}
}
return tl;
}
private TabletLocation lookupTabletLocationAndCheckLock(ClientContext context, Text row,
boolean retry, LockCheckerSession lcSession)
throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
lookupTabletLocation(context, row, retry, lcSession);
return lcSession.checkLock(locateTabletInCache(row));
}
private TabletLocation processInvalidatedAndCheckLock(ClientContext context,
LockCheckerSession lcSession, Text row)
throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
processInvalidated(context, lcSession);
return lcSession.checkLock(locateTabletInCache(row));
}
@SuppressFBWarnings(value = {"UL_UNRELEASED_LOCK", "UL_UNRELEASED_LOCK_EXCEPTION_PATH"},
justification = "locking is confusing, but probably correct")
private void processInvalidated(ClientContext context, LockCheckerSession lcSession)
throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
if (badExtents.isEmpty())
return;
final boolean writeLockHeld = rwLock.isWriteLockedByCurrentThread();
try {
if (!writeLockHeld) {
rLock.unlock();
wLock.lock();
if (badExtents.isEmpty())
return;
}
List<Range> lookups = new ArrayList<>(badExtents.size());
for (KeyExtent be : badExtents) {
lookups.add(be.toMetaRange());
removeOverlapping(metaCache, be);
}
lookups = Range.mergeOverlapping(lookups);
Map<String,Map<KeyExtent,List<Range>>> binnedRanges = new HashMap<>();
parent.binRanges(context, lookups, binnedRanges);
// randomize server order
ArrayList<String> tabletServers = new ArrayList<>(binnedRanges.keySet());
Collections.shuffle(tabletServers);
for (String tserver : tabletServers) {
List<TabletLocation> locations =
locationObtainer.lookupTablets(context, tserver, binnedRanges.get(tserver), parent);
for (TabletLocation tabletLocation : locations) {
updateCache(tabletLocation, lcSession);
}
}
} finally {
if (!writeLockHeld) {
rLock.lock();
wLock.unlock();
}
}
}
protected static void addRange(Map<String,Map<KeyExtent,List<Range>>> binnedRanges,
String location, KeyExtent ke, Range range) {
binnedRanges.computeIfAbsent(location, k -> new HashMap<>())
.computeIfAbsent(ke, k -> new ArrayList<>()).add(range);
}
}
|
|
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2019_07_01.implementation;
import java.util.List;
import com.microsoft.azure.management.network.v2019_07_01.AzureFirewallApplicationRuleCollection;
import com.microsoft.azure.management.network.v2019_07_01.AzureFirewallNatRuleCollection;
import com.microsoft.azure.management.network.v2019_07_01.AzureFirewallNetworkRuleCollection;
import com.microsoft.azure.management.network.v2019_07_01.AzureFirewallIPConfiguration;
import com.microsoft.azure.management.network.v2019_07_01.ProvisioningState;
import com.microsoft.azure.management.network.v2019_07_01.AzureFirewallThreatIntelMode;
import com.microsoft.azure.SubResource;
import com.microsoft.azure.management.network.v2019_07_01.HubIPAddresses;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.microsoft.rest.serializer.JsonFlatten;
import com.microsoft.rest.SkipParentValidation;
import com.microsoft.azure.Resource;
/**
* Azure Firewall resource.
*/
@JsonFlatten
@SkipParentValidation
public class AzureFirewallInner extends Resource {
/**
* Collection of application rule collections used by Azure Firewall.
*/
@JsonProperty(value = "properties.applicationRuleCollections")
private List<AzureFirewallApplicationRuleCollection> applicationRuleCollections;
/**
* Collection of NAT rule collections used by Azure Firewall.
*/
@JsonProperty(value = "properties.natRuleCollections")
private List<AzureFirewallNatRuleCollection> natRuleCollections;
/**
* Collection of network rule collections used by Azure Firewall.
*/
@JsonProperty(value = "properties.networkRuleCollections")
private List<AzureFirewallNetworkRuleCollection> networkRuleCollections;
/**
* IP configuration of the Azure Firewall resource.
*/
@JsonProperty(value = "properties.ipConfigurations")
private List<AzureFirewallIPConfiguration> ipConfigurations;
/**
* The provisioning state of the Azure firewall resource. Possible values
* include: 'Succeeded', 'Updating', 'Deleting', 'Failed'.
*/
@JsonProperty(value = "properties.provisioningState")
private ProvisioningState provisioningState;
/**
* The operation mode for Threat Intelligence. Possible values include:
* 'Alert', 'Deny', 'Off'.
*/
@JsonProperty(value = "properties.threatIntelMode")
private AzureFirewallThreatIntelMode threatIntelMode;
/**
* The virtualHub to which the firewall belongs.
*/
@JsonProperty(value = "properties.virtualHub")
private SubResource virtualHub;
/**
* The firewallPolicy associated with this azure firewall.
*/
@JsonProperty(value = "properties.firewallPolicy")
private SubResource firewallPolicy;
/**
* IP addresses associated with AzureFirewall.
*/
@JsonProperty(value = "properties.hubIpAddresses", access = JsonProperty.Access.WRITE_ONLY)
private HubIPAddresses hubIpAddresses;
/**
* A list of availability zones denoting where the resource needs to come
* from.
*/
@JsonProperty(value = "zones")
private List<String> zones;
/**
* A unique read-only string that changes whenever the resource is updated.
*/
@JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY)
private String etag;
/**
* Resource ID.
*/
@JsonProperty(value = "id")
private String id;
/**
* Get collection of application rule collections used by Azure Firewall.
*
* @return the applicationRuleCollections value
*/
public List<AzureFirewallApplicationRuleCollection> applicationRuleCollections() {
return this.applicationRuleCollections;
}
/**
* Set collection of application rule collections used by Azure Firewall.
*
* @param applicationRuleCollections the applicationRuleCollections value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withApplicationRuleCollections(List<AzureFirewallApplicationRuleCollection> applicationRuleCollections) {
this.applicationRuleCollections = applicationRuleCollections;
return this;
}
/**
* Get collection of NAT rule collections used by Azure Firewall.
*
* @return the natRuleCollections value
*/
public List<AzureFirewallNatRuleCollection> natRuleCollections() {
return this.natRuleCollections;
}
/**
* Set collection of NAT rule collections used by Azure Firewall.
*
* @param natRuleCollections the natRuleCollections value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withNatRuleCollections(List<AzureFirewallNatRuleCollection> natRuleCollections) {
this.natRuleCollections = natRuleCollections;
return this;
}
/**
* Get collection of network rule collections used by Azure Firewall.
*
* @return the networkRuleCollections value
*/
public List<AzureFirewallNetworkRuleCollection> networkRuleCollections() {
return this.networkRuleCollections;
}
/**
* Set collection of network rule collections used by Azure Firewall.
*
* @param networkRuleCollections the networkRuleCollections value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withNetworkRuleCollections(List<AzureFirewallNetworkRuleCollection> networkRuleCollections) {
this.networkRuleCollections = networkRuleCollections;
return this;
}
/**
* Get iP configuration of the Azure Firewall resource.
*
* @return the ipConfigurations value
*/
public List<AzureFirewallIPConfiguration> ipConfigurations() {
return this.ipConfigurations;
}
/**
* Set iP configuration of the Azure Firewall resource.
*
* @param ipConfigurations the ipConfigurations value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withIpConfigurations(List<AzureFirewallIPConfiguration> ipConfigurations) {
this.ipConfigurations = ipConfigurations;
return this;
}
/**
* Get the provisioning state of the Azure firewall resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'.
*
* @return the provisioningState value
*/
public ProvisioningState provisioningState() {
return this.provisioningState;
}
/**
* Set the provisioning state of the Azure firewall resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'.
*
* @param provisioningState the provisioningState value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withProvisioningState(ProvisioningState provisioningState) {
this.provisioningState = provisioningState;
return this;
}
/**
* Get the operation mode for Threat Intelligence. Possible values include: 'Alert', 'Deny', 'Off'.
*
* @return the threatIntelMode value
*/
public AzureFirewallThreatIntelMode threatIntelMode() {
return this.threatIntelMode;
}
/**
* Set the operation mode for Threat Intelligence. Possible values include: 'Alert', 'Deny', 'Off'.
*
* @param threatIntelMode the threatIntelMode value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withThreatIntelMode(AzureFirewallThreatIntelMode threatIntelMode) {
this.threatIntelMode = threatIntelMode;
return this;
}
/**
* Get the virtualHub to which the firewall belongs.
*
* @return the virtualHub value
*/
public SubResource virtualHub() {
return this.virtualHub;
}
/**
* Set the virtualHub to which the firewall belongs.
*
* @param virtualHub the virtualHub value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withVirtualHub(SubResource virtualHub) {
this.virtualHub = virtualHub;
return this;
}
/**
* Get the firewallPolicy associated with this azure firewall.
*
* @return the firewallPolicy value
*/
public SubResource firewallPolicy() {
return this.firewallPolicy;
}
/**
* Set the firewallPolicy associated with this azure firewall.
*
* @param firewallPolicy the firewallPolicy value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withFirewallPolicy(SubResource firewallPolicy) {
this.firewallPolicy = firewallPolicy;
return this;
}
/**
* Get iP addresses associated with AzureFirewall.
*
* @return the hubIpAddresses value
*/
public HubIPAddresses hubIpAddresses() {
return this.hubIpAddresses;
}
/**
* Get a list of availability zones denoting where the resource needs to come from.
*
* @return the zones value
*/
public List<String> zones() {
return this.zones;
}
/**
* Set a list of availability zones denoting where the resource needs to come from.
*
* @param zones the zones value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withZones(List<String> zones) {
this.zones = zones;
return this;
}
/**
* Get a unique read-only string that changes whenever the resource is updated.
*
* @return the etag value
*/
public String etag() {
return this.etag;
}
/**
* Get resource ID.
*
* @return the id value
*/
public String id() {
return this.id;
}
/**
* Set resource ID.
*
* @param id the id value to set
* @return the AzureFirewallInner object itself.
*/
public AzureFirewallInner withId(String id) {
this.id = id;
return this;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.gcp.pubsub;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkState;
import com.google.api.client.util.DateTime;
import java.io.Closeable;
import java.io.IOException;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
import javax.annotation.Nullable;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Objects;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Splitter;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
/** An (abstract) helper class for talking to Pubsub via an underlying transport. */
public abstract class PubsubClient implements Closeable {
/** Factory for creating clients. */
public interface PubsubClientFactory extends Serializable {
/**
* Construct a new Pubsub client. It should be closed via {@link #close} in order to ensure tidy
* cleanup of underlying netty resources (or use the try-with-resources construct). Uses {@code
* options} to derive pubsub endpoints and application credentials. If non-{@literal null}, use
* {@code timestampAttribute} and {@code idAttribute} to store custom timestamps/ids within
* message metadata.
*/
PubsubClient newClient(
@Nullable String timestampAttribute, @Nullable String idAttribute, PubsubOptions options)
throws IOException;
/** Return the display name for this factory. Eg "Json", "gRPC". */
String getKind();
}
/**
* Return timestamp as ms-since-unix-epoch corresponding to {@code timestamp}. Return {@literal
* null} if no timestamp could be found. Throw {@link IllegalArgumentException} if timestamp
* cannot be recognized.
*/
@Nullable
private static Long asMsSinceEpoch(@Nullable String timestamp) {
if (Strings.isNullOrEmpty(timestamp)) {
return null;
}
try {
// Try parsing as milliseconds since epoch. Note there is no way to parse a
// string in RFC 3339 format here.
// Expected IllegalArgumentException if parsing fails; we use that to fall back
// to RFC 3339.
return Long.parseLong(timestamp);
} catch (IllegalArgumentException e1) {
// Try parsing as RFC3339 string. DateTime.parseRfc3339 will throw an
// IllegalArgumentException if parsing fails, and the caller should handle.
return DateTime.parseRfc3339(timestamp).getValue();
}
}
/**
* Return the timestamp (in ms since unix epoch) to use for a Pubsub message with {@code
* attributes} and {@code pubsubTimestamp}.
*
* <p>If {@code timestampAttribute} is non-{@literal null} then the message attributes must
* contain that attribute, and the value of that attribute will be taken as the timestamp.
* Otherwise the timestamp will be taken from the Pubsub publish timestamp {@code
* pubsubTimestamp}.
*
* @throws IllegalArgumentException if the timestamp cannot be recognized as a ms-since-unix-epoch
* or RFC3339 time.
*/
protected static long extractTimestamp(
@Nullable String timestampAttribute,
@Nullable String pubsubTimestamp,
@Nullable Map<String, String> attributes) {
Long timestampMsSinceEpoch;
if (Strings.isNullOrEmpty(timestampAttribute)) {
timestampMsSinceEpoch = asMsSinceEpoch(pubsubTimestamp);
checkArgument(
timestampMsSinceEpoch != null,
"Cannot interpret PubSub publish timestamp: %s",
pubsubTimestamp);
} else {
String value = attributes == null ? null : attributes.get(timestampAttribute);
checkArgument(
value != null,
"PubSub message is missing a value for timestamp attribute %s",
timestampAttribute);
timestampMsSinceEpoch = asMsSinceEpoch(value);
checkArgument(
timestampMsSinceEpoch != null,
"Cannot interpret value of attribute %s as timestamp: %s",
timestampAttribute,
value);
}
return timestampMsSinceEpoch;
}
/** Path representing a cloud project id. */
public static class ProjectPath implements Serializable {
private final String projectId;
/**
* Creates a {@link ProjectPath} from a {@link String} representation, which must be of the form
* {@code "projects/" + projectId}.
*/
ProjectPath(String path) {
List<String> splits = Splitter.on('/').splitToList(path);
checkArgument(
splits.size() == 2 && "projects".equals(splits.get(0)),
"Malformed project path \"%s\": must be of the form \"projects/\" + <project id>",
path);
this.projectId = splits.get(1);
}
public String getPath() {
return String.format("projects/%s", projectId);
}
public String getId() {
return projectId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ProjectPath that = (ProjectPath) o;
return projectId.equals(that.projectId);
}
@Override
public int hashCode() {
return projectId.hashCode();
}
@Override
public String toString() {
return getPath();
}
}
public static ProjectPath projectPathFromPath(String path) {
return new ProjectPath(path);
}
public static ProjectPath projectPathFromId(String projectId) {
return new ProjectPath(String.format("projects/%s", projectId));
}
/** Path representing a Pubsub subscription. */
public static class SubscriptionPath implements Serializable {
private final String projectId;
private final String subscriptionName;
SubscriptionPath(String path) {
List<String> splits = Splitter.on('/').splitToList(path);
checkState(
splits.size() == 4
&& "projects".equals(splits.get(0))
&& "subscriptions".equals(splits.get(2)),
"Malformed subscription path %s: "
+ "must be of the form \"projects/\" + <project id> + \"subscriptions\"",
path);
this.projectId = splits.get(1);
this.subscriptionName = splits.get(3);
}
public String getPath() {
return String.format("projects/%s/subscriptions/%s", projectId, subscriptionName);
}
public String getName() {
return subscriptionName;
}
public String getV1Beta1Path() {
return String.format("/subscriptions/%s/%s", projectId, subscriptionName);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SubscriptionPath that = (SubscriptionPath) o;
return this.subscriptionName.equals(that.subscriptionName)
&& this.projectId.equals(that.projectId);
}
@Override
public int hashCode() {
return Objects.hashCode(projectId, subscriptionName);
}
@Override
public String toString() {
return getPath();
}
}
public static SubscriptionPath subscriptionPathFromPath(String path) {
return new SubscriptionPath(path);
}
public static SubscriptionPath subscriptionPathFromName(
String projectId, String subscriptionName) {
return new SubscriptionPath(
String.format("projects/%s/subscriptions/%s", projectId, subscriptionName));
}
/** Path representing a Pubsub topic. */
public static class TopicPath implements Serializable {
private final String path;
TopicPath(String path) {
this.path = path;
}
public String getPath() {
return path;
}
public String getName() {
List<String> splits = Splitter.on('/').splitToList(path);
checkState(splits.size() == 4, "Malformed topic path %s", path);
return splits.get(3);
}
public String getV1Beta1Path() {
List<String> splits = Splitter.on('/').splitToList(path);
checkState(splits.size() == 4, "Malformed topic path %s", path);
return String.format("/topics/%s/%s", splits.get(1), splits.get(3));
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TopicPath topicPath = (TopicPath) o;
return path.equals(topicPath.path);
}
@Override
public int hashCode() {
return path.hashCode();
}
@Override
public String toString() {
return path;
}
}
public static TopicPath topicPathFromPath(String path) {
return new TopicPath(path);
}
public static TopicPath topicPathFromName(String projectId, String topicName) {
return new TopicPath(String.format("projects/%s/topics/%s", projectId, topicName));
}
/**
* A message to be sent to Pubsub.
*
* <p>NOTE: This class is {@link Serializable} only to support the {@link PubsubTestClient}. Java
* serialization is never used for non-test clients.
*/
public static class OutgoingMessage implements Serializable {
/** Underlying (encoded) element. */
public final byte[] elementBytes;
public final Map<String, String> attributes;
/** Timestamp for element (ms since epoch). */
public final long timestampMsSinceEpoch;
/**
* If using an id attribute, the record id to associate with this record's metadata so the
* receiver can reject duplicates. Otherwise {@literal null}.
*/
@Nullable public final String recordId;
public OutgoingMessage(
byte[] elementBytes,
Map<String, String> attributes,
long timestampMsSinceEpoch,
@Nullable String recordId) {
this.elementBytes = elementBytes;
this.attributes = attributes;
this.timestampMsSinceEpoch = timestampMsSinceEpoch;
this.recordId = recordId;
}
@Override
public String toString() {
return String.format(
"OutgoingMessage(%db, %dms)", elementBytes.length, timestampMsSinceEpoch);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OutgoingMessage that = (OutgoingMessage) o;
return timestampMsSinceEpoch == that.timestampMsSinceEpoch
&& Arrays.equals(elementBytes, that.elementBytes)
&& Objects.equal(attributes, that.attributes)
&& Objects.equal(recordId, that.recordId);
}
@Override
public int hashCode() {
return Objects.hashCode(
Arrays.hashCode(elementBytes), attributes, timestampMsSinceEpoch, recordId);
}
}
/**
* A message received from Pubsub.
*
* <p>NOTE: This class is {@link Serializable} only to support the {@link PubsubTestClient}. Java
* serialization is never used for non-test clients.
*/
static class IncomingMessage implements Serializable {
/** Underlying (encoded) element. */
public final byte[] elementBytes;
public Map<String, String> attributes;
/**
* Timestamp for element (ms since epoch). Either Pubsub's processing time, or the custom
* timestamp associated with the message.
*/
public final long timestampMsSinceEpoch;
/** Timestamp (in system time) at which we requested the message (ms since epoch). */
public final long requestTimeMsSinceEpoch;
/** Id to pass back to Pubsub to acknowledge receipt of this message. */
public final String ackId;
/** Id to pass to the runner to distinguish this message from all others. */
public final String recordId;
public IncomingMessage(
byte[] elementBytes,
Map<String, String> attributes,
long timestampMsSinceEpoch,
long requestTimeMsSinceEpoch,
String ackId,
String recordId) {
this.elementBytes = elementBytes;
this.attributes = attributes;
this.timestampMsSinceEpoch = timestampMsSinceEpoch;
this.requestTimeMsSinceEpoch = requestTimeMsSinceEpoch;
this.ackId = ackId;
this.recordId = recordId;
}
public IncomingMessage withRequestTime(long requestTimeMsSinceEpoch) {
return new IncomingMessage(
elementBytes,
attributes,
timestampMsSinceEpoch,
requestTimeMsSinceEpoch,
ackId,
recordId);
}
@Override
public String toString() {
return String.format(
"IncomingMessage(%db, %dms)", elementBytes.length, timestampMsSinceEpoch);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
IncomingMessage that = (IncomingMessage) o;
return timestampMsSinceEpoch == that.timestampMsSinceEpoch
&& requestTimeMsSinceEpoch == that.requestTimeMsSinceEpoch
&& ackId.equals(that.ackId)
&& recordId.equals(that.recordId)
&& Arrays.equals(elementBytes, that.elementBytes)
&& Objects.equal(attributes, that.attributes);
}
@Override
public int hashCode() {
return Objects.hashCode(
Arrays.hashCode(elementBytes),
attributes,
timestampMsSinceEpoch,
requestTimeMsSinceEpoch,
ackId,
recordId);
}
}
/**
* Publish {@code outgoingMessages} to Pubsub {@code topic}. Return number of messages published.
*/
public abstract int publish(TopicPath topic, List<OutgoingMessage> outgoingMessages)
throws IOException;
/**
* Request the next batch of up to {@code batchSize} messages from {@code subscription}. Return
* the received messages, or empty collection if none were available. Does not wait for messages
* to arrive if {@code returnImmediately} is {@literal true}. Returned messages will record their
* request time as {@code requestTimeMsSinceEpoch}.
*/
public abstract List<IncomingMessage> pull(
long requestTimeMsSinceEpoch,
SubscriptionPath subscription,
int batchSize,
boolean returnImmediately)
throws IOException;
/** Acknowldege messages from {@code subscription} with {@code ackIds}. */
public abstract void acknowledge(SubscriptionPath subscription, List<String> ackIds)
throws IOException;
/**
* Modify the ack deadline for messages from {@code subscription} with {@code ackIds} to be {@code
* deadlineSeconds} from now.
*/
public abstract void modifyAckDeadline(
SubscriptionPath subscription, List<String> ackIds, int deadlineSeconds) throws IOException;
/** Create {@code topic}. */
public abstract void createTopic(TopicPath topic) throws IOException;
/*
* Delete {@code topic}.
*/
public abstract void deleteTopic(TopicPath topic) throws IOException;
/** Return a list of topics for {@code project}. */
public abstract List<TopicPath> listTopics(ProjectPath project) throws IOException;
/** Create {@code subscription} to {@code topic}. */
public abstract void createSubscription(
TopicPath topic, SubscriptionPath subscription, int ackDeadlineSeconds) throws IOException;
/**
* Create a random subscription for {@code topic}. Return the {@link SubscriptionPath}. It is the
* responsibility of the caller to later delete the subscription.
*/
public SubscriptionPath createRandomSubscription(
ProjectPath project, TopicPath topic, int ackDeadlineSeconds) throws IOException {
// Create a randomized subscription derived from the topic name.
String subscriptionName = topic.getName() + "_beam_" + ThreadLocalRandom.current().nextLong();
SubscriptionPath subscription =
PubsubClient.subscriptionPathFromName(project.getId(), subscriptionName);
createSubscription(topic, subscription, ackDeadlineSeconds);
return subscription;
}
/** Delete {@code subscription}. */
public abstract void deleteSubscription(SubscriptionPath subscription) throws IOException;
/** Return a list of subscriptions for {@code topic} in {@code project}. */
public abstract List<SubscriptionPath> listSubscriptions(ProjectPath project, TopicPath topic)
throws IOException;
/** Return the ack deadline, in seconds, for {@code subscription}. */
public abstract int ackDeadlineSeconds(SubscriptionPath subscription) throws IOException;
/**
* Return {@literal true} if {@link #pull} will always return empty list. Actual clients will
* return {@literal false}. Test clients may return {@literal true} to signal that all expected
* messages have been pulled and the test may complete.
*/
public abstract boolean isEOF();
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.mongodb;
import static com.google.common.base.Charsets.UTF_8;
import static org.junit.Assert.assertEquals;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.MockProcessContext;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.bson.Document;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.collect.Lists;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientURI;
import com.mongodb.client.MongoCollection;
@Ignore("Integration tests that cause failures in some environments. Require that they be run from Maven to run the embedded mongo maven plugin. Maven Plugin also fails in my CentOS 7 environment.")
public class PutMongoTest {
private static final String MONGO_URI = "mongodb://localhost";
private static final String DATABASE_NAME = PutMongoTest.class.getSimpleName().toLowerCase();
private static final String COLLECTION_NAME = "test";
private static final List<Document> DOCUMENTS = Lists.newArrayList(
new Document("_id", "doc_1").append("a", 1).append("b", 2).append("c", 3),
new Document("_id", "doc_2").append("a", 1).append("b", 2).append("c", 4),
new Document("_id", "doc_3").append("a", 1).append("b", 3)
);
private TestRunner runner;
private MongoClient mongoClient;
private MongoCollection<Document> collection;
@Before
public void setup() {
runner = TestRunners.newTestRunner(PutMongo.class);
runner.setProperty(AbstractMongoProcessor.URI, MONGO_URI);
runner.setProperty(AbstractMongoProcessor.DATABASE_NAME, DATABASE_NAME);
runner.setProperty(AbstractMongoProcessor.COLLECTION_NAME, COLLECTION_NAME);
mongoClient = new MongoClient(new MongoClientURI(MONGO_URI));
collection = mongoClient.getDatabase(DATABASE_NAME).getCollection(COLLECTION_NAME);
}
@After
public void teardown() {
runner = null;
mongoClient.getDatabase(DATABASE_NAME).drop();
}
private byte[] documentToByteArray(Document doc) {
return doc.toJson().getBytes(UTF_8);
}
@Test
public void testValidators() {
TestRunner runner = TestRunners.newTestRunner(PutMongo.class);
Collection<ValidationResult> results;
ProcessContext pc;
// missing uri, db, collection
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
results = new HashSet<>();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(3, results.size());
Iterator<ValidationResult> it = results.iterator();
Assert.assertTrue(it.next().toString().contains("is invalid because Mongo URI is required"));
Assert.assertTrue(it.next().toString().contains("is invalid because Mongo Database Name is required"));
Assert.assertTrue(it.next().toString().contains("is invalid because Mongo Collection Name is required"));
// invalid write concern
runner.setProperty(AbstractMongoProcessor.URI, MONGO_URI);
runner.setProperty(AbstractMongoProcessor.DATABASE_NAME, DATABASE_NAME);
runner.setProperty(AbstractMongoProcessor.COLLECTION_NAME, COLLECTION_NAME);
runner.setProperty(PutMongo.WRITE_CONCERN, "xyz");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
results = new HashSet<>();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
Assert.assertTrue(results.iterator().next().toString().matches("'Write Concern' .* is invalid because Given value not found in allowed set .*"));
// valid write concern
runner.setProperty(PutMongo.WRITE_CONCERN, PutMongo.WRITE_CONCERN_UNACKNOWLEDGED);
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
results = new HashSet<>();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(0, results.size());
}
@Test
public void testInsertOne() throws Exception {
Document doc = DOCUMENTS.get(0);
byte[] bytes = documentToByteArray(doc);
runner.enqueue(bytes);
runner.run();
runner.assertAllFlowFilesTransferred(PutMongo.REL_SUCCESS, 1);
MockFlowFile out = runner.getFlowFilesForRelationship(PutMongo.REL_SUCCESS).get(0);
out.assertContentEquals(bytes);
// verify 1 doc inserted into the collection
assertEquals(1, collection.count());
assertEquals(doc, collection.find().first());
}
@Test
public void testInsertMany() throws Exception {
for (Document doc : DOCUMENTS) {
runner.enqueue(documentToByteArray(doc));
}
runner.run(3);
runner.assertAllFlowFilesTransferred(PutMongo.REL_SUCCESS, 3);
List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutMongo.REL_SUCCESS);
for (int i=0; i < flowFiles.size(); i++) {
flowFiles.get(i).assertContentEquals(DOCUMENTS.get(i).toJson());
}
// verify 3 docs inserted into the collection
assertEquals(3, collection.count());
}
@Test
public void testInsertWithDuplicateKey() throws Exception {
// pre-insert one document
collection.insertOne(DOCUMENTS.get(0));
for (Document doc : DOCUMENTS) {
runner.enqueue(documentToByteArray(doc));
}
runner.run(3);
// first doc failed, other 2 succeeded
runner.assertTransferCount(PutMongo.REL_FAILURE, 1);
MockFlowFile out = runner.getFlowFilesForRelationship(PutMongo.REL_FAILURE).get(0);
out.assertContentEquals(documentToByteArray(DOCUMENTS.get(0)));
runner.assertTransferCount(PutMongo.REL_SUCCESS, 2);
List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutMongo.REL_SUCCESS);
for (int i=0; i < flowFiles.size(); i++) {
flowFiles.get(i).assertContentEquals(DOCUMENTS.get(i+1).toJson());
}
// verify 2 docs inserted into the collection for a total of 3
assertEquals(3, collection.count());
}
/**
* Verifies that 'update' does not insert if 'upsert' if false.
* @see #testUpsert()
*/
@Test
public void testUpdateDoesNotInsert() throws Exception {
Document doc = DOCUMENTS.get(0);
byte[] bytes = documentToByteArray(doc);
runner.setProperty(PutMongo.MODE, "update");
runner.enqueue(bytes);
runner.run();
runner.assertAllFlowFilesTransferred(PutMongo.REL_SUCCESS, 1);
MockFlowFile out = runner.getFlowFilesForRelationship(PutMongo.REL_SUCCESS).get(0);
out.assertContentEquals(bytes);
// nothing was in collection, so nothing to update since upsert defaults to false
assertEquals(0, collection.count());
}
/**
* Verifies that 'update' does insert if 'upsert' is true.
* @see #testUpdateDoesNotInsert()
*/
@Test
public void testUpsert() throws Exception {
Document doc = DOCUMENTS.get(0);
byte[] bytes = documentToByteArray(doc);
runner.setProperty(PutMongo.MODE, "update");
runner.setProperty(PutMongo.UPSERT, "true");
runner.enqueue(bytes);
runner.run();
runner.assertAllFlowFilesTransferred(PutMongo.REL_SUCCESS, 1);
MockFlowFile out = runner.getFlowFilesForRelationship(PutMongo.REL_SUCCESS).get(0);
out.assertContentEquals(bytes);
// verify 1 doc inserted into the collection
assertEquals(1, collection.count());
assertEquals(doc, collection.find().first());
}
@Test
public void testUpdate() throws Exception {
Document doc = DOCUMENTS.get(0);
// pre-insert document
collection.insertOne(doc);
// modify the object
doc.put("abc", "123");
doc.put("xyz", "456");
doc.remove("c");
byte[] bytes = documentToByteArray(doc);
runner.setProperty(PutMongo.MODE, "update");
runner.enqueue(bytes);
runner.run();
runner.assertAllFlowFilesTransferred(PutMongo.REL_SUCCESS, 1);
MockFlowFile out = runner.getFlowFilesForRelationship(PutMongo.REL_SUCCESS).get(0);
out.assertContentEquals(bytes);
assertEquals(1, collection.count());
assertEquals(doc, collection.find().first());
}
}
|
|
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.apigateway.model;
import java.io.Serializable;
/**
* <p>
* An immutable representation of a <a>RestApi</a> resource that can be called
* by users using <a>Stages</a>. A deployment must be associated with a
* <a>Stage</a> for it to be callable over the Internet.
* </p>
*/
public class UpdateDeploymentResult implements Serializable, Cloneable {
/**
* <p>
* The identifier for the deployment resource.
* </p>
*/
private String id;
/**
* <p>
* The description for the deployment resource.
* </p>
*/
private String description;
/**
* <p>
* The date and time that the deployment resource was created.
* </p>
*/
private java.util.Date createdDate;
/**
* <p>
* Gets a summary of the <a>RestApi</a> at the date and time that the
* deployment resource was created.
* </p>
*/
private java.util.Map<String, java.util.Map<String, MethodSnapshot>> apiSummary;
/**
* <p>
* The identifier for the deployment resource.
* </p>
*
* @param id
* The identifier for the deployment resource.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The identifier for the deployment resource.
* </p>
*
* @return The identifier for the deployment resource.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The identifier for the deployment resource.
* </p>
*
* @param id
* The identifier for the deployment resource.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateDeploymentResult withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The description for the deployment resource.
* </p>
*
* @param description
* The description for the deployment resource.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* The description for the deployment resource.
* </p>
*
* @return The description for the deployment resource.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* The description for the deployment resource.
* </p>
*
* @param description
* The description for the deployment resource.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateDeploymentResult withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The date and time that the deployment resource was created.
* </p>
*
* @param createdDate
* The date and time that the deployment resource was created.
*/
public void setCreatedDate(java.util.Date createdDate) {
this.createdDate = createdDate;
}
/**
* <p>
* The date and time that the deployment resource was created.
* </p>
*
* @return The date and time that the deployment resource was created.
*/
public java.util.Date getCreatedDate() {
return this.createdDate;
}
/**
* <p>
* The date and time that the deployment resource was created.
* </p>
*
* @param createdDate
* The date and time that the deployment resource was created.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateDeploymentResult withCreatedDate(java.util.Date createdDate) {
setCreatedDate(createdDate);
return this;
}
/**
* <p>
* Gets a summary of the <a>RestApi</a> at the date and time that the
* deployment resource was created.
* </p>
*
* @return Gets a summary of the <a>RestApi</a> at the date and time that
* the deployment resource was created.
*/
public java.util.Map<String, java.util.Map<String, MethodSnapshot>> getApiSummary() {
return apiSummary;
}
/**
* <p>
* Gets a summary of the <a>RestApi</a> at the date and time that the
* deployment resource was created.
* </p>
*
* @param apiSummary
* Gets a summary of the <a>RestApi</a> at the date and time that the
* deployment resource was created.
*/
public void setApiSummary(
java.util.Map<String, java.util.Map<String, MethodSnapshot>> apiSummary) {
this.apiSummary = apiSummary;
}
/**
* <p>
* Gets a summary of the <a>RestApi</a> at the date and time that the
* deployment resource was created.
* </p>
*
* @param apiSummary
* Gets a summary of the <a>RestApi</a> at the date and time that the
* deployment resource was created.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public UpdateDeploymentResult withApiSummary(
java.util.Map<String, java.util.Map<String, MethodSnapshot>> apiSummary) {
setApiSummary(apiSummary);
return this;
}
public UpdateDeploymentResult addApiSummaryEntry(String key,
java.util.Map<String, MethodSnapshot> value) {
if (null == this.apiSummary) {
this.apiSummary = new java.util.HashMap<String, java.util.Map<String, MethodSnapshot>>();
}
if (this.apiSummary.containsKey(key))
throw new IllegalArgumentException("Duplicated keys ("
+ key.toString() + ") are provided.");
this.apiSummary.put(key, value);
return this;
}
/**
* Removes all the entries added into ApiSummary. <p> Returns a reference
* to this object so that method calls can be chained together.
*/
public UpdateDeploymentResult clearApiSummaryEntries() {
this.apiSummary = null;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getId() != null)
sb.append("Id: " + getId() + ",");
if (getDescription() != null)
sb.append("Description: " + getDescription() + ",");
if (getCreatedDate() != null)
sb.append("CreatedDate: " + getCreatedDate() + ",");
if (getApiSummary() != null)
sb.append("ApiSummary: " + getApiSummary());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateDeploymentResult == false)
return false;
UpdateDeploymentResult other = (UpdateDeploymentResult) obj;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null
&& other.getId().equals(this.getId()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null
&& other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getCreatedDate() == null ^ this.getCreatedDate() == null)
return false;
if (other.getCreatedDate() != null
&& other.getCreatedDate().equals(this.getCreatedDate()) == false)
return false;
if (other.getApiSummary() == null ^ this.getApiSummary() == null)
return false;
if (other.getApiSummary() != null
&& other.getApiSummary().equals(this.getApiSummary()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime
* hashCode
+ ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime
* hashCode
+ ((getCreatedDate() == null) ? 0 : getCreatedDate().hashCode());
hashCode = prime * hashCode
+ ((getApiSummary() == null) ? 0 : getApiSummary().hashCode());
return hashCode;
}
@Override
public UpdateDeploymentResult clone() {
try {
return (UpdateDeploymentResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
|
|
package fi.tkk.cs.tkkcc.slx;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import fi.tkk.cs.tkkcc.slx.CommandWord;
/**
* SLX interpreter.
*
* @author Timo Montonen
*/
public final class Interpreter {
/** Log generator. */
private static final Logger log =
Logger.getLogger(Interpreter.class.getName());
/** Memory size of the SLX machine */
private final static int MEM_SIZE = 5000;
/** Maximum length of the SLX program */
private final static int MAX_PROGRAM_SIZE = 1000000;
/** Number of words reserved in a frame for interpreter internal use */
private final static int FRAME_SIZE = 2;
/** The program to interpret */
private final List<Instruction> program;
/** Label -> pc mapping of the program */
private final Map<Integer, Integer> labelMap;
/** List of integers that the program has prinetd */
private final List<Integer> result;
/** The memory of the SLX machine */
private final Integer[] memory;
/** The stack of the SLX machine */
private final Stack<Integer> stack;
/** The program counter (pc) register */
private int programCounter = 0;
/** The frame pointer (fp) register */
private int framePointer = 0;
/** The heap pointer (hp) register */
private int heapPointer = MEM_SIZE;
/** True if the program has already been executed */
private boolean programExecuted = false;
/**
* Initialize a new SLX interpreter.
*
* @param fileName Filename of the SLX program to execute.
*/
public Interpreter(final String fileName) {
super();
if (fileName == null) {
throw new IllegalArgumentException("Filename may not be null!");
}
this.program = new ArrayList<Instruction>();
this.labelMap = new HashMap<Integer, Integer>();
this.stack = new Stack<Integer>();
this.result = new ArrayList<Integer>();
this.memory = new Integer[MEM_SIZE];
for(int i = 0; i < this.memory.length; i++) {
this.memory[i] = new Integer(0);
}
this.readFile(fileName);
}
/**
* Initialize a new SLX interpreter.
*
* @param slxProgram instance of slxProgram
* @param loggingLevel the desired logging level
*/
public Interpreter(final SlxProgram slxProgram, final String loggingLevel) {
super();
if (slxProgram == null) {
throw new IllegalArgumentException("Parameter may not be null!");
}
// Log level hardcoded to SEVERE when excuting from TestRunner.
Interpreter.log.setLevel(Level.SEVERE);
Interpreter.parseLoggingLevel(loggingLevel);
this.stack = new Stack<Integer>();
this.result = new ArrayList<Integer>();
this.memory = new Integer[MEM_SIZE];
for(int i = 0; i < this.memory.length; i++) {
this.memory[i] = new Integer(0);
}
this.program = slxProgram.getProgram();
this.labelMap = slxProgram.getLabelMap();
}
/**
* Read an SLX program from a file.
*
* @param fileName the name of the SLX program file
*/
private void readFile(final String fileName) {
BufferedReader reader = null;
log.info("Start reading file " + fileName);
try {
try {
reader = new BufferedReader(new FileReader(fileName));
int instructionNumber = 0;
int sourceLineNumber = 0;
while (reader.ready()) {
String line = reader.readLine().trim();
sourceLineNumber++;
// ignore empty lines and comment lines
if (line.length() > 0 && !line.startsWith(";")) {
try {
Instruction c =
new Instruction(line, sourceLineNumber);
if (c.getCommandWord().equals(CommandWord.LAB)) {
// Add labels also to label -> pc map
this.labelMap.put
(c.getCommandParameter(0),
new Integer(instructionNumber));
}
this.program.add(c);
instructionNumber++;
} catch (IllegalInstructionException e) {
log.severe(e.getMessage());
this.program.clear();
return;
}
}
}
} finally {
if (reader != null) {
reader.close();
}
}
} catch (FileNotFoundException e) {
log.severe("File '" + fileName + "' not found!");
} catch (IOException e) {
log.severe("Unknown IO exception");
e.printStackTrace();
}
log.info("Reading done. Success? " + !this.program.isEmpty());
}
/**
* Are we at the end of the program?
*
* @return True if the program is not empty
*/
public boolean isReady() {
return !this.program.isEmpty();
}
/**
* Execute the program.
*
* @param printToStdout <code>true</code>, if wanted results to be printed
* @param input The integer inputs of the program. If
* <code>null</code>, input is read from stdin.
*/
public void execute(final boolean printToStdout, int[] input) {
if (this.isReady()) {
try {
this.executePrivate(printToStdout, input);
this.programExecuted = true;
} catch(ExecutionException ee) {
log.severe("Execution halted due program error.");
} catch(RuntimeException re) {
log.severe("Execution halted due unexpected error.");
re.printStackTrace();
}
}
}
/**
* Program execution helper method.
*
* @param printToStdout <code>true</code>, if wanted results to be printed
* @param input The integer inputs of the program. If
* <code>null</code>, input is read from stdin.
*/
private void executePrivate(final boolean printToStdout, int[] input) {
boolean halt = false;
Integer x1 = null;
Integer x2 = null;
int step = 0;
int inputPointer = 0;
boolean readDone;
BufferedReader inputReader = null;
if (input == null) {
inputReader = new BufferedReader(new InputStreamReader(System.in));
}
log.info("Start executing...");
while (!halt && step < MAX_PROGRAM_SIZE) {
Instruction instr = this.program.get(programCounter);
programCounter++;
switch (instr.getCommandWord()) {
case ADD:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
log.info("Stack <- " + x1 + " + " + x2);
stack.push(new Integer(x1.intValue() + x2.intValue()));
break;
case ALC:
// Allocate memory: take area size from stack, add one for
// length field. Move heap pointer and store
// length field.
// push the pointer to stack
this.check(instr, stack.size() > 0, "Stack underflow");
x1 = stack.pop();
x2 = x1.intValue() + 1;
heapPointer = heapPointer - x2 - 1;
memory[heapPointer] = x1;
stack.push(new Integer(heapPointer));
log.info("Allocate memory: " + x1 + " slots, HP=" +
heapPointer);
break;
case DIV:
// x = pop(); y = pop(); push(y / x);
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
this.check(instr, x1 != 0, "Division by zero");
stack.push(new Integer(x2.intValue() / x1.intValue()));
break;
case ENT:
stack.push(instr.getCommandParameter(0));
break;
case HLT:
halt = true;
log.info("Halt!");
break;
case JMP:
this.check(instr, this.labelMap.containsKey
(instr.getCommandParameter(0)),
"Unknown label: " + instr.getCommandParameter(0));
programCounter =
this.labelMap.get(instr.getCommandParameter(0));
log.info("Jump to " + programCounter);
break;
case JZE:
this.check(instr, stack.size() > 0, "Stack underflow");
x1 = stack.pop();
log.info("Compare " + x1 + " to zero...");
if (x1.intValue() == 0) {
this.check(instr, this.labelMap.containsKey
(instr.getCommandParameter(0)),
"Unknown label: " +
instr.getCommandParameter(0));
programCounter =
this.labelMap.get(instr.getCommandParameter(0));
log.info("... true => jump to " + programCounter);
} else {
log.info("... false => continue");
}
break;
case LAB:
// Just skip the label
break;
case LDL:
this.check(instr, stack.size() > 0, "Stack underflow");
x1 = stack.pop();
log.info("Stack <- mem[" + (x1 + framePointer) + "]=" +
memory[framePointer + x1.intValue()]);
stack.push(memory[framePointer + x1.intValue()]);
break;
case LDM:
this.check(instr, stack.size() > 0, "Stack underflow");
x1 = stack.pop();
this.check(instr, x1 >= 0 && x1 < memory.length,
"Memory access out of bounds");
log.info("Stack <- mem[" + x1 + "]=" + memory[x1.intValue()]);
stack.push(memory[x1.intValue()]);
break;
case MUL:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
log.info("Stack <- " + x1 + " * " + x2);
stack.push(new Integer(x1.intValue() * x2.intValue()));
break;
case NOT:
this.check(instr, stack.size() > 0, "Stack underflow");
x1 = stack.pop();
log.info("Stack <- !" + x1);
stack.push(new Integer(x1.intValue() == 1 ? 0 : 1));
break;
case REQ:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
log.info("Stack <-" + x2 + " == " + x1);
stack.push(new Integer(x1.intValue() == x2.intValue() ? 1 : 0));
break;
case RET:
programCounter = memory[framePointer];
heapPointer = memory[framePointer - FRAME_SIZE + 1];
framePointer = memory[framePointer - FRAME_SIZE];
log.info("Return from subroutine to " + programCounter +
", FP=" + framePointer);
break;
case RGE:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
log.info("Stack <- " + x2 + " >= " + x1);
stack.push(new Integer(x2.intValue() >= x1.intValue() ? 1 : 0));
break;
case RGT:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
log.info("Stack <- " + x2 + " > " + x1);
stack.push(new Integer(x2.intValue() > x1.intValue() ? 1 : 0));
break;
case RLE:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
log.info("Stack <- " + x2 + " <= " + x1);
stack.push(new Integer(x2.intValue() <= x1.intValue() ? 1 : 0));
break;
case RLT:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
log.info("Stack <- " + x2 + " < " + x1);
stack.push(new Integer(x2.intValue() < x1.intValue() ? 1 : 0));
break;
case RNE:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
log.info("Stack <- " + x2 + " != " + x1);
stack.push(new Integer(x1.intValue() != x2.intValue() ? 1 : 0));
break;
case SBR:
// Previous command has to be SFR, otherwise framepointer points
// to old frame
memory[framePointer] = programCounter;
this.check(instr, this.labelMap.containsKey
(instr.getCommandParameter(0)),
"Unknown label: " + instr.getCommandParameter(0));
programCounter =
this.labelMap.get(instr.getCommandParameter(0));
log.info("Call subroutine, jump to " + programCounter +
", old PC stored to " + framePointer);
this.check(instr, stack.size() >= instr.getCommandParameter(1),
"Stack underflow");
this.check(instr, instr.getCommandParameter(1) >= 0,
"Negative amount of parameters");
this.check(instr, framePointer + instr.getCommandParameter(1) <
memory.length, "Memory access out of bounds");
// Load parameters into frame in reverse order
for (int i = instr.getCommandParameter(1); i > 0; i--) {
x1 = stack.pop();
memory[framePointer + i] = x1;
log.info("Store parameter " + (i - 1) + "(value=" + x1 +
") at FP=" + (framePointer + i));
}
break;
case SFR:
int frameOffset = instr.getCommandParameter(0);
this.check(instr, framePointer + frameOffset +
FRAME_SIZE + 1 < memory.length,
"Frame pointer out of memory bounds");
log.info("Store FP=" + framePointer + " to memory location "
+ (framePointer + frameOffset + 1)
+ "(Offset=" + frameOffset + ")");
memory[framePointer + frameOffset + FRAME_SIZE - 1] =
framePointer;
memory[framePointer + frameOffset + FRAME_SIZE] = heapPointer;
framePointer = framePointer + frameOffset + FRAME_SIZE + 1;
log.info("New FP=" + framePointer);
break;
case STL:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
this.check(instr, (framePointer + x2.intValue()) >= 0 &&
(framePointer + x2.intValue()) < memory.length,
"Memory access out of bounds");
log.info("Stack (value=" + x1 + ") -> mem[" +
(x2.intValue() + framePointer) + "]");
memory[framePointer + x2.intValue()] = x1;
break;
case STM:
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
this.check(instr, x2 >= 0 && x2 < memory.length,
"Memory access out of bounds");
log.info("Stack (value=" + x1 + ") -> mem[" + x2.intValue() +
"]");
memory[x2.intValue()] = x1;
break;
case SUB:
// x = pop(); y = pop(); push(y - x);
this.check(instr, stack.size() >= 2, "Stack underflow");
x1 = stack.pop();
x2 = stack.pop();
this.checkForNull(x1, x2, instr);
log.info("Stack <- " + x2 + " - " + x1);
stack.push(new Integer(x2.intValue() - x1.intValue()));
break;
case UMN:
this.check(instr, stack.size() > 0, "Stack underflow");
x1 = stack.pop();
log.info("Stack <- -" + x1);
stack.push(new Integer(-x1.intValue()));
break;
case WRI:
this.check(instr, stack.size() > 0, "Stack underflow");
x1 = stack.pop();
if(printToStdout) {
System.out.println(x1);
}
this.result.add(x1);
break;
case REA:
if (input == null) {
// Read from stdout
readDone = false;
while(!readDone) {
try {
x1 = new Integer(Integer.parseInt
(inputReader.readLine()));
readDone = true;
} catch (IOException ioe) {
} catch (NumberFormatException nfe) {
}
}
stack.push(x1);
} else {
// Read from input array
if (inputPointer < input.length) {
stack.push(new Integer(input[inputPointer]));
inputPointer++;
} else {
// No more input
log.severe("Could not read input at line " +
instr.getSourceLine());
halt = true;
}
}
break;
default:
// Unknown instruction
log.severe("Unknown instruction: " + instr + " at line " +
instr.getSourceLine());
halt = true;
break;
}
step++;
}
if (step >= MAX_PROGRAM_SIZE) {
log.severe("Program execution halted due too many steps");
throw new ExecutionException();
}
}
/**
* Helper method for checking conditions and printing messages if
* the test is not passed.
*
* @param inst the related instruction (for logging)
* @param checkExpression the boolean value to check
* @param message the message to print if the check is not passed
*/
private void check(final Instruction instr, final boolean checkExpression,
final String message) {
if(!checkExpression) {
log.severe(message);
this.logCurrentState(instr);
throw new ExecutionException();
}
}
/**
* Log the current state of the machine.
*
* @param instr the current instruction
*/
private void logCurrentState(final Instruction instr) {
if (log.isLoggable(Level.INFO)) {
log.info("Current instruction: " + instr + " (line " +
instr.getSourceLine() + ")");
log.info("PC: " + programCounter + "\tFP: " + framePointer +
"\tHP:" + heapPointer);
log.info("Stack contents:");
if(this.stack.size() > 0) {
int limit = Math.max(stack.size() - 10, -1);
for(int i = stack.size() - 1; i > limit; i--) {
log.info(i + ": " + stack.get(i));
}
} else {
log.info("Stack is empty!");
}
log.info("Memory dump:");
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("0x00000001\t");
int i = 0;
while (i < MEM_SIZE) {
stringBuilder.append("0x");
stringBuilder.append(String.format("%08X", this.memory[i]));
stringBuilder.append(" ");
i++;
if(i < MEM_SIZE && (i % 8) == 0) {
log.info(stringBuilder.toString());
stringBuilder = new StringBuilder();
stringBuilder.append("0x");
stringBuilder.append(String.format("%08X", i));
stringBuilder.append("\t");
}
}
log.info(stringBuilder.toString());
}
}
/**
* Helper method to check for null arguments.
*
* @param x1 the first argument to check
* @param x2 the second argument to check
* @param instr the related instruction
*/
private void checkForNull(final Integer x1, final Integer x2,
final Instruction instr) {
if (x1 == null) {
throw new IllegalStateException
("Parameter 1 is null for instruction " + instr + " at line "
+ instr.getSourceLine());
}
if (x2 == null) {
throw new IllegalStateException
("Parameter 2 is null for instruction " + instr + " at line "
+ instr.getSourceLine());
}
}
/**
* Get the list of printed integers after execution of the program.
*
* @return the list of integers
*/
public List<Integer> getResult() {
return this.result;
}
/**
* Main program for executing the interpreter.
*
* @param args Program arguments. -d may be used to define the logging level
*/
public static void main(String[] args) {
if (args.length > 0) {
log.getParent().getHandlers()[0].setFormatter(new Formatter() {
@Override
public String format(LogRecord record) {
return record.getMillis() + ": " +
record.getMessage() + "\n";
}
});
log.setLevel(Level.WARNING);
if ((args.length > 2) &&
"-d".equals(args[0].trim().toLowerCase())) {
parseLoggingLevel(args[1]);
}
Interpreter inter = new Interpreter(args[args.length - 1]);
inter.execute(true, null);
} else {
// Print usage message
System.out.println
("Usage: java -jar SlxInterpreter.jar [-d logging-level] <filename>");
System.out.println
("Valid logging levels are one of the following:");
System.out.println("\tSevere, Warning [default], Info");
}
}
/**
* Parse the log level string.
*
* @param loggingLevelString the logging level
*/
private static void parseLoggingLevel(final String loggingLevelString) {
try {
log.setLevel(Level.parse(loggingLevelString.toUpperCase()));
} catch (IllegalArgumentException e) {
log.warning("Invalid logging level: " + loggingLevelString);
}
log.info("Logging level set to " + loggingLevelString.toUpperCase());
}
/**
* Check if the program has already been executed.
*
* @return <code>true</code> if the program has been executed
*/
public boolean isProgramExecuted() {
return this.programExecuted;
}
}
|
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.vm.dao;
import java.util.Date;
import java.util.UUID;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import com.cloud.utils.db.GenericDao;
import com.cloud.utils.net.NetUtils;
import com.cloud.vm.NicIpAlias;
@Entity
@Table(name = "nic_ip_alias")
public class NicIpAliasVO implements NicIpAlias {
public NicIpAliasVO(Long nicId, String ipaddr, Long vmId, Long accountId, Long domainId, Long networkId, String gateway, String netmask) {
this.nicId = nicId;
this.vmId = vmId;
ip4Address = ipaddr;
this.accountId = accountId;
this.domainId = domainId;
this.networkId = networkId;
this.netmask = netmask;
this.gateway = gateway;
state = NicIpAlias.State.active;
String cidr = NetUtils.getCidrFromGatewayAndNetmask(gateway, netmask);
String[] cidrPair = cidr.split("\\/");
String cidrAddress = cidrPair[0];
long cidrSize = Long.parseLong(cidrPair[1]);
startIpOfSubnet = NetUtils.getIpRangeStartIpFromCidr(cidrAddress, cidrSize);
}
protected NicIpAliasVO() {
}
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "id")
long id;
@Column(name = "nic_Id")
long nicId;
@Column(name = "domain_id", updatable = false)
long domainId;
@Column(name = "account_id", updatable = false)
private Long accountId;
@Column(name = "ip4_address")
String ip4Address;
@Column(name = "ip6_address")
String ip6Address;
@Column(name = "netmask")
String netmask;
@Column(name = "network_id")
long networkId;
@Column(name = GenericDao.CREATED_COLUMN)
Date created;
@Column(name = "uuid")
String uuid = UUID.randomUUID().toString();
@Column(name = "vmId")
Long vmId;
@Column(name = "alias_count")
Long aliasCount;
@Column(name = "gateway")
String gateway;
@Column(name = "state")
@Enumerated(value = EnumType.STRING)
NicIpAlias.State state;
@Column(name = "start_ip_of_subnet")
String startIpOfSubnet;
@Override
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
@Override
public long getNicId() {
return nicId;
}
public void setNicId(long nicId) {
this.nicId = nicId;
}
@Override
public long getDomainId() {
return domainId;
}
public void setDomainId(Long domainId) {
this.domainId = domainId;
}
@Override
public long getAccountId() {
return accountId;
}
public void setAccountId(Long accountId) {
this.accountId = accountId;
}
@Override
public String getIp4Address() {
return ip4Address;
}
public void setIp4Address(String ip4Address) {
this.ip4Address = ip4Address;
}
@Override
public String getIp6Address() {
return ip6Address;
}
public void setIp6Address(String ip6Address) {
this.ip6Address = ip6Address;
}
@Override
public long getNetworkId() {
return networkId;
}
public void setNetworkId(long networkId) {
this.networkId = networkId;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
@Override
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
@Override
public long getVmId() {
return vmId;
}
public void setVmId(Long vmId) {
this.vmId = vmId;
}
@Override
public Long getAliasCount() {
return aliasCount;
}
public void setAliasCount(long count) {
aliasCount = count;
}
public void setNetmask(String netmask) {
this.netmask = netmask;
}
@Override
public String getNetmask() {
return netmask;
}
@Override
public String getGateway() {
return gateway;
}
public void setGateway(String gateway) {
this.gateway = gateway;
}
public NicIpAlias.State getState() {
return state;
}
public void setState(NicIpAlias.State state) {
this.state = state;
}
public String getStartIpOfSubnet() {
return startIpOfSubnet;
}
@Override
public Class<?> getEntityType() {
return NicIpAlias.class;
}
}
|
|
/**
* Copyright Notice
*
* This is a work of the U.S. Government and is not subject to copyright
* protection in the United States. Foreign copyrights may apply.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.va.legoEdit.model.bdbModel;
import gov.va.legoEdit.model.schemaModel.Lego;
import gov.va.legoEdit.model.schemaModel.LegoList;
import gov.va.legoEdit.storage.BDBDataStoreImpl;
import gov.va.legoEdit.storage.DataStoreException;
import gov.va.legoEdit.storage.WriteException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.sleepycat.persist.model.Entity;
import com.sleepycat.persist.model.PrimaryKey;
import com.sleepycat.persist.model.Relationship;
import com.sleepycat.persist.model.SecondaryKey;
/**
*
* LegoListBDB
*
* @author <a href="mailto:daniel.armbrust.list@gmail.com">Dan Armbrust</a>
* Copyright 2013
*/
@Entity
public class LegoListBDB
{
@SecondaryKey(relate = Relationship.ONE_TO_ONE) protected String groupName;
@PrimaryKey protected String legoListUUID;
protected String groupDescription;
protected String comment;
protected List<String> legoUniqueIds;
@SecondaryKey(relate = Relationship.ONE_TO_MANY) protected Set<String> legoUUIDs;
protected HashMap<String, Integer> legoUUIDsUsage;
private transient List<LegoBDB> legoBDBRefs;
@SuppressWarnings("unused")
private LegoListBDB()
{
// required by BDB
}
public LegoListBDB(String uuid, String groupName, String groupDescription, String comment)
{
this.groupName = groupName;
this.groupDescription = groupDescription;
this.comment = comment;
this.legoListUUID = uuid;
this.legoUniqueIds = new ArrayList<>();
this.legoUUIDs = new HashSet<>();
this.legoUUIDsUsage = new HashMap<>();
}
public LegoListBDB(LegoList ll) throws WriteException
{
groupDescription = ll.getGroupDescription();
groupName = ll.getGroupName();
legoListUUID = ll.getLegoListUUID();
comment = ll.getComment();
legoUniqueIds = new ArrayList<>();
legoBDBRefs = new ArrayList<>();
this.legoUUIDs = new HashSet<>();
this.legoUUIDsUsage = new HashMap<>();
for (Lego l : ll.getLego())
{
LegoBDB lBDB = new LegoBDB(l);
verifyLegoUUID(l.getLegoUUID());
legoUniqueIds.add(lBDB.getUniqueId());
legoBDBRefs.add(lBDB);
legoUUIDs.add(l.getLegoUUID());
Integer temp = legoUUIDsUsage.get(l.getLegoUUID());
if (temp == null)
{
temp = new Integer(1);
}
else
{
temp = new Integer(1 + temp.intValue());
}
legoUUIDsUsage.put(l.getLegoUUID(), temp);
}
}
public List<LegoBDB> getLegoBDBs()
{
return legoBDBRefs;
}
public void addLego(LegoBDB lego) throws WriteException
{
verifyLegoUUID(lego.getLegoUUID());
legoUniqueIds.add(lego.getUniqueId());
legoUUIDs.add(lego.getLegoUUID());
Integer temp = legoUUIDsUsage.get(lego.getLegoUUID());
if (temp == null)
{
temp = new Integer(1);
}
else
{
temp = new Integer(1 + temp.intValue());
}
legoUUIDsUsage.put(lego.getLegoUUID(), temp);
}
public void removeLego(String legoUUID, String legoUniqueId)
{
legoUniqueIds.remove(legoUniqueId);
Integer temp = legoUUIDsUsage.get(legoUUID);
if (temp != null)
{
temp = new Integer(temp.intValue() - 1);
legoUUIDsUsage.put(legoUUID, temp);
}
if (temp == null || temp.intValue() == 0)
{
legoUUIDs.remove(legoUUID);
legoUUIDsUsage.remove(legoUUID);
}
}
private void verifyLegoUUID(String legoUUID) throws WriteException
{
// legoUUID should only be used by this legoList (no other)
List<String> legoListUUIDs = BDBDataStoreImpl.getInstance().getLegoListByLego(legoUUID);
for (String s : legoListUUIDs)
{
if (!this.legoListUUID.equals(s))
{
throw new WriteException("The LEGO UUID '" + legoUUID + "' is already in use by the legoList '" + s + "'. Lego UUIDs should not cross legoLists.");
}
}
}
public String getGroupName()
{
return groupName;
}
public void setGroupName(String groupName)
{
this.groupName = groupName;
}
public String getGroupDescription()
{
return groupDescription;
}
public void setGroupDescription(String description)
{
this.groupDescription = description;
}
public void setComment(String comment)
{
this.comment = comment;
}
public String getComment()
{
return comment;
}
public String getLegoListUUID()
{
return legoListUUID;
}
public List<String> getUniqueLegoIds()
{
ArrayList<String> result = new ArrayList<>();
if (legoUniqueIds != null)
{
for (String s : this.legoUniqueIds)
{
result.add(s);
}
}
return result;
}
public LegoList toSchemaLegoList()
{
LegoList ll = new LegoList();
ll.setGroupDescription(groupDescription);
ll.setGroupName(groupName);
ll.setLegoListUUID(legoListUUID);
ll.setComment(comment);
List<Lego> legos = ll.getLego();
for (String lui : legoUniqueIds)
{
Lego l = ((BDBDataStoreImpl) BDBDataStoreImpl.getInstance()).getLegoByUniqueId(lui);
if (l == null)
{
throw new DataStoreException("This shouldn't have been null!");
}
legos.add(l);
}
return ll;
}
}
|
|
package lv.ctco.cukes.ldap.facade;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import lv.ctco.cukes.core.CukesRuntimeException;
import lv.ctco.cukes.core.internal.matchers.ContainsPattern;
import lv.ctco.cukes.core.internal.resources.FilePathService;
import lv.ctco.cukes.ldap.internal.EntityService;
import lv.ctco.cukes.ldap.internal.ldif.LDIFUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail;
@Singleton
public class EntityFacade {
private static final Map<String, Function<Integer, Matcher<Integer>>> sizeMatchers = new HashMap<>();
static {
sizeMatchers.put("=", Matchers::is);
sizeMatchers.put(">", Matchers::greaterThan);
sizeMatchers.put(">=", Matchers::greaterThanOrEqualTo);
sizeMatchers.put("<", Matchers::lessThan);
sizeMatchers.put("<=", Matchers::lessThanOrEqualTo);
sizeMatchers.put("<>", Matchers::not);
}
private Attributes entity;
private final List<Attributes> searchResults = new ArrayList<>();
@Inject
EntityService entityService;
@Inject
FilePathService filePathService;
public void initConfiguration() {
entity = null;
}
public void readEntityByDn(String dn) {
entity = entityService.getEntityByDn(dn);
}
public void entityExists() {
assertThat(entity, notNullValue());
}
public void entityDoesNotExist() {
assertThat(entity, nullValue());
}
public void deleteEntityByDn(String dn) {
entityService.deleteEntityByDn(dn);
}
public void entityHasAttributeWithValueOtherThat(String attribute, String value) {
Attribute attr = getNotNullAttribute(attribute);
assertThat(attr.contains(value), is(false));
}
public Attribute getNotNullAttribute(String attribute) {
if (entity == null) {
throw new CukesRuntimeException("Entity was not loaded");
}
Attribute attr = entity.get(attribute);
assertThat("Expected that attribute '" + attribute + "' will be present", attr, notNullValue());
return attr;
}
public void entityContainsAttribute(String attribute) {
getNotNullAttribute(attribute);
}
public void entityDoesNotContainAttribute(String attribute) {
Attribute attr = getAttribute(attribute);
assertThat(attr, nullValue());
}
public Attribute getAttribute(String attribute) {
if (entity == null) {
throw new CukesRuntimeException("Entity was not loaded");
}
return entity.get(attribute);
}
public void entityHasAttributeAsArrayOfSize(String attribute, String operator, int size) {
Attribute attr = getNotNullAttribute(attribute);
int count = 0;
try {
NamingEnumeration<?> e = attr.getAll();
while (e.hasMore()) {
e.next();
count++;
}
} catch (NamingException e) {
throw new CukesRuntimeException(e);
}
Function<Integer, Matcher<Integer>> matcherFunction = sizeMatchers.get(operator);
if (matcherFunction == null) {
throw new IllegalArgumentException("Unknown operator: " + operator);
}
assertThat(count, matcherFunction.apply(size));
}
public void entityHasAttributeWithValueMatchingPattern(String attribute, String pattern) {
Attribute attr = getNotNullAttribute(attribute);
Matcher<CharSequence> matcher = ContainsPattern.containsPattern(pattern);
try {
NamingEnumeration<?> e = attr.getAll();
while (e.hasMore()) {
Object next = e.next();
String s = String.valueOf(next);
if (matcher.matches(s)) {
return;
}
}
} catch (NamingException ex) {
throw new CukesRuntimeException(ex);
}
fail();
}
public void entityHasAttributeWithValueNotMatchingPattern(String attribute, String pattern) {
Attribute attr = getNotNullAttribute(attribute);
try {
NamingEnumeration<?> e = attr.getAll();
while (e.hasMore()) {
Object next = e.next();
String s = String.valueOf(next);
assertThat(s, not(ContainsPattern.containsPattern(pattern)));
}
} catch (NamingException ex) {
throw new CukesRuntimeException(ex);
}
}
public void importLdif(String ldif) {
importLdif(new ByteArrayInputStream(ldif.getBytes(UTF_8)));
}
private void importLdif(InputStream inputStream) {
try {
Map<String, Attributes> entities = LDIFUtils.read(inputStream);
for (Map.Entry<String, Attributes> entry : entities.entrySet()) {
entityService.createEntity(entry.getKey(), entry.getValue());
}
} catch (IOException e) {
throw new CukesRuntimeException(e);
}
}
public void importLdifFromFile(String ldifFile) {
try {
String path = filePathService.normalize(ldifFile);
importLdif(new FileInputStream(path));
} catch (FileNotFoundException e) {
throw new CukesRuntimeException(e);
}
}
public void entityMatchesLDIF(String ldif) {
try {
Map<String, Attributes> entities = LDIFUtils.read(new ByteArrayInputStream(ldif.getBytes(UTF_8)));
assertThat(entities.size(), is(1));
Attributes ldifEntity = entities.values().iterator().next();
NamingEnumeration<? extends Attribute> attributes = ldifEntity.getAll();
while (attributes.hasMore()) {
Attribute attribute = attributes.next();
NamingEnumeration<?> values = attribute.getAll();
while (values.hasMore()) {
Object value = values.next();
entityHasAttributeWithValue(attribute.getID(), String.valueOf(value));
}
}
} catch (NamingException | IOException e) {
throw new CukesRuntimeException(e);
}
}
public void entityHasAttributeWithValue(String expectedAttr, String expectedValue) {
Attribute actualAttr = getNotNullAttribute(expectedAttr);
List<String> attributesList = new ArrayList<>();
for (int i = 0; i < actualAttr.size(); i++) {
try {
attributesList.add(toString(actualAttr.get(i)));
} catch (NamingException e) {
throw new CukesRuntimeException(e);
}
}
assertThat("Should have attribute '" + expectedAttr + "' with value '" + expectedValue + "'", attributesList, hasItem(expectedValue));
}
private String toString(Object value) {
if (value instanceof byte[]) {
return new String((byte[]) value, UTF_8);
} else if (value instanceof char[]) {
return new String((char[]) value);
} else if (value.getClass().isArray()) {
return ArrayUtils.toString(value);
}
return value.toString();
}
public void searchByFilter(String dn, String filter) {
searchResults.clear();
searchResults.addAll(entityService.searchByFilter(dn, filter));
}
public void searchResultHasSize(String operator, int size) {
Function<Integer, Matcher<Integer>> matcherFunction = sizeMatchers.get(operator);
if (matcherFunction == null) {
throw new IllegalArgumentException("Unknown operator: " + operator);
}
assertThat(searchResults.size(), matcherFunction.apply(size));
}
public void takeEntityFromSearchResults(int index) {
if (index < 0 || index >= searchResults.size()) {
throw new IllegalArgumentException("Cannot extract entity from search results set by index " + index +
". Total result set size is " + searchResults.size());
}
this.entity = searchResults.get(index);
}
}
|
|
/**
* Refinement Analysis Tools is Copyright (c) 2007 The Regents of the
* University of California (Regents). Provided that this notice and
* the following two paragraphs are included in any distribution of
* Refinement Analysis Tools or its derivative work, Regents agrees
* not to assert any of Regents' copyright rights in Refinement
* Analysis Tools against recipient for recipient's reproduction,
* preparation of derivative works, public display, public
* performance, distribution or sublicensing of Refinement Analysis
* Tools and derivative works, in source code and object code form.
* This agreement not to assert does not confer, by implication,
* estoppel, or otherwise any license or rights in any intellectual
* property of Regents, including, but not limited to, any patents
* of Regents or Regents' employees.
*
* IN NO EVENT SHALL REGENTS BE LIABLE TO ANY PARTY FOR DIRECT,
* INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
* INCLUDING LOST PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE
* AND ITS DOCUMENTATION, EVEN IF REGENTS HAS BEEN ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE AND FURTHER DISCLAIMS ANY STATUTORY
* WARRANTY OF NON-INFRINGEMENT. THE SOFTWARE AND ACCOMPANYING
* DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS
* IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT,
* UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*/
package edu.colorado.thresher.core;
import com.ibm.wala.classLoader.IBytecodeMethod;
import com.ibm.wala.classLoader.IClass;
import com.ibm.wala.demandpa.alg.ContextSensitiveStateMachine;
import com.ibm.wala.demandpa.alg.DemandRefinementPointsTo;
import com.ibm.wala.demandpa.alg.DemandRefinementPointsTo.PointsToResult;
import com.ibm.wala.demandpa.alg.refinepolicy.FieldRefinePolicy;
import com.ibm.wala.demandpa.alg.refinepolicy.ManualFieldPolicy;
import com.ibm.wala.demandpa.alg.refinepolicy.RefinementPolicyFactory;
import com.ibm.wala.demandpa.alg.refinepolicy.TunedRefinementPolicy;
import com.ibm.wala.demandpa.alg.statemachine.StateMachineFactory;
import com.ibm.wala.demandpa.flowgraph.IFlowLabel;
import com.ibm.wala.demandpa.util.MemoryAccessMap;
import com.ibm.wala.demandpa.util.PABasedMemoryAccessMap;
import com.ibm.wala.ipa.callgraph.*;
import com.ibm.wala.ipa.callgraph.impl.Util;
import com.ibm.wala.ipa.callgraph.propagation.HeapModel;
import com.ibm.wala.ipa.callgraph.propagation.InstanceKey;
import com.ibm.wala.ipa.callgraph.propagation.PointerAnalysis;
import com.ibm.wala.ipa.callgraph.propagation.PointerKey;
import com.ibm.wala.ipa.cha.ClassHierarchy;
import com.ibm.wala.ipa.cha.ClassHierarchyException;
import com.ibm.wala.ipa.cha.IClassHierarchy;
import com.ibm.wala.shrikeCT.InvalidClassFileException;
import com.ibm.wala.ssa.IR;
import com.ibm.wala.ssa.SSACheckCastInstruction;
import com.ibm.wala.ssa.SSAInstruction;
import com.ibm.wala.types.ClassLoaderReference;
import com.ibm.wala.types.MethodReference;
import com.ibm.wala.types.TypeReference;
import com.ibm.wala.util.CancelException;
import com.ibm.wala.util.NullProgressMonitor;
import com.ibm.wala.util.Predicate;
import com.ibm.wala.util.ProgressMaster;
import com.ibm.wala.util.collections.HashSetFactory;
import com.ibm.wala.util.collections.Pair;
import com.ibm.wala.util.debug.Assertions;
import com.ibm.wala.util.intset.OrdinalSet;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
/**
* Uses a demand-driven points-to analysis to check the safety of downcasts.
*
* @author Manu Sridharan
*
* Note -- this is adapted from Manu's original demand cast checker
*/
public class DemandCastChecker {
@SuppressWarnings("unchecked")
public static Pair<DemandRefinementPointsTo,PointerAnalysis> makeDemandPointerAnalysis(AnalysisScope scope,
ClassHierarchy cha,
AnalysisOptions options)
throws ClassHierarchyException, IllegalArgumentException, CancelException, IOException {
System.err.print("constructing call graph...");
final Pair<CallGraph, PointerAnalysis> cgAndPA = buildCallGraph(scope, cha, options);
CallGraph cg = cgAndPA.fst;
System.err.println("done");
System.err.println(CallGraphStats.getStats(cg));
MemoryAccessMap fam = new PABasedMemoryAccessMap(cg, cgAndPA.snd);
DemandRefinementPointsTo fullDemandPointsTo =
DemandRefinementPointsTo.makeWithDefaultFlowGraph(cg, heapModel, fam, cha, options, makeStateMachineFactory());
fullDemandPointsTo.setRefinementPolicyFactory(chooseRefinePolicyFactory(cha));
return Pair.make(fullDemandPointsTo,cgAndPA.snd);
}
// if true, construct up-front call graph cheaply (0-CFA)
private static final boolean CHEAP_CG = false;
private static HeapModel heapModel = null;
/**
* builds a call graph, and sets the corresponding heap model for analysis
*
* @param scope
* @param cha
* @param options
* @return
* @throws CancelException
* @throws IllegalArgumentException
*/
private static Pair<CallGraph, PointerAnalysis> buildCallGraph(AnalysisScope scope, ClassHierarchy cha,
AnalysisOptions options)
throws IllegalArgumentException, CancelException {
CallGraph retCG = null;
PointerAnalysis retPA = null;
final AnalysisCache cache = new AnalysisCache();
CallGraphBuilder builder;
if (CHEAP_CG) {
builder = Util.makeZeroCFABuilder(options, cache, cha, scope);
// we want vanilla 0-1 CFA, which has one abstract loc per allocation
heapModel = Util.makeVanillaZeroOneCFABuilder(options, cache, cha, scope);
} else {
builder = Util.makeZeroOneContainerCFABuilder(options, cache, cha, scope);
heapModel = (HeapModel) builder;
}
ProgressMaster master = ProgressMaster.make(new NullProgressMonitor(), 360000, false);
master.beginTask("runSolver", 1);
try {
retCG = builder.makeCallGraph(options, master);
retPA = builder.getPointerAnalysis();
} catch (CallGraphBuilderCancelException e) {
System.err.println("TIMED OUT!!");
retCG = e.getPartialCallGraph();
retPA = e.getPartialPointerAnalysis();
}
return Pair.make(retCG, retPA);
}
private static RefinementPolicyFactory chooseRefinePolicyFactory(ClassHierarchy cha) {
return new TunedRefinementPolicy.Factory(cha);
}
private static StateMachineFactory<IFlowLabel> makeStateMachineFactory() {
return new ContextSensitiveStateMachine.Factory();
}
public static Set<String> findFailingCasts(CallGraph cg, PointerAnalysis pa, DemandRefinementPointsTo dmp)
throws InvalidClassFileException {
final IClassHierarchy cha = dmp.getClassHierarchy();
Set<String> failing = HashSetFactory.make();
Set<Integer> noMoreRefinement = HashSetFactory.make();
int numSafe = 0, numMightFail = 0, safeViaPointsTo = 0, count = 0;
outer: for (Iterator<? extends CGNode> nodeIter = cg.iterator(); nodeIter.hasNext();) {
CGNode node = nodeIter.next();
MethodReference method = node.getMethod().getReference();
TypeReference declaringClass = node.getMethod().getReference().getDeclaringClass();
// skip library classes
if (declaringClass.getClassLoader().equals(ClassLoaderReference.Primordial)) {
continue;
}
IR ir = node.getIR();
if (ir == null)
continue;
SSAInstruction[] instrs = ir.getInstructions();
IBytecodeMethod bytecodeMethod = (IBytecodeMethod) node.getMethod();
for (int i = 0; i < instrs.length; i++) {
SSAInstruction instruction = instrs[i];
if (instruction instanceof SSACheckCastInstruction) {
SSACheckCastInstruction castInstr = (SSACheckCastInstruction) instruction;
final TypeReference[] declaredResultTypes = castInstr.getDeclaredResultTypes();
boolean primOnly = true;
for (TypeReference t : declaredResultTypes) {
if (! t.isPrimitiveType()) {
primOnly = false;
}
}
if (primOnly) {
continue;
}
// bytecode index is the only way we can get different points-to analyses to agree on which casts are the same
int bytecodeIndex = bytecodeMethod.getBytecodeIndex(i);
String castId = method + ":" + bytecodeIndex;
System.out.println("Checking cast #" + ++count + " " + castInstr + " in " + node.getMethod() + ", line ?");
PointerKey castedPk = heapModel.getPointerKeyForLocal(node, castInstr.getUse(0));
@SuppressWarnings("unchecked")
OrdinalSet<InstanceKey> pointsToSet = (OrdinalSet<InstanceKey>) pa.getPointsToSet(castedPk);
Predicate<InstanceKey> castPred = new Predicate<InstanceKey>() {
@Override
public boolean test(InstanceKey ik) {
TypeReference ikTypeRef = ik.getConcreteType().getReference();
for (TypeReference t : declaredResultTypes) {
IClass class1 = cha.lookupClass(t), class2 = cha.lookupClass(ikTypeRef);
if (class1 == null || class2 == null) return true; // (unsoundly) punt
if (cha.isAssignableFrom(class1, class2)) {
return true;
}
}
return false;
}
};
Collection<InstanceKey> collection = OrdinalSet.toCollection(pointsToSet);
if (com.ibm.wala.util.collections.Util.forAll(collection, castPred)) {
System.err.println("SAFE VIA POINTER ANALYSIS: " + castInstr + " in " + node.getMethod());
numSafe++;
safeViaPointsTo++;
continue;
}
long startTime = System.currentTimeMillis();
Pair<PointsToResult, Collection<InstanceKey>> queryResult;
try {
queryResult = dmp.getPointsTo(castedPk, castPred);
} catch (Exception e) {
// treat failures as timeouts
queryResult = Pair.make(PointsToResult.BUDGETEXCEEDED, null);
}
long runningTime = System.currentTimeMillis() - startTime;
System.err.println("running time: " + runningTime + "ms");
final FieldRefinePolicy fieldRefinePolicy = dmp.getRefinementPolicy().getFieldRefinePolicy();
switch (queryResult.fst) {
case SUCCESS:
System.err.println("SAFE: " + castInstr + " in " + node.getMethod());
if (fieldRefinePolicy instanceof ManualFieldPolicy) {
ManualFieldPolicy hackedFieldPolicy = (ManualFieldPolicy) fieldRefinePolicy;
System.err.println(hackedFieldPolicy.getHistory());
}
System.err.println("TRAVERSED " + dmp.getNumNodesTraversed() + " nodes");
numSafe++;
break;
case NOMOREREFINE:
if (queryResult.snd != null) {
System.err.println("MIGHT FAIL: no more refinement possible for " + castInstr + " in " + node.getMethod());
noMoreRefinement.add(count);
} else {
System.err.println("MIGHT FAILs: exceeded budget for " + castInstr + " in " + node.getMethod());
System.err.println("skipping.");
}
failing.add(castId);
numMightFail++;
break;
case BUDGETEXCEEDED:
System.err.println("MIGHT FAIL: exceeded budget for " + castInstr + " in " + node.getMethod());
System.err.println("skipping.");
failing.add(castId);
numMightFail++;
break;
default:
Assertions.UNREACHABLE();
}
}
}
}
System.err.println("TOTAL SAFE: " + numSafe);
System.err.println("TOTAL SAFE VIA POINTS-TO: " + safeViaPointsTo);
System.err.println("TOTAL MIGHT FAIL: " + numMightFail);
System.err.println("TOTAL NO MORE REFINEMENT: " + noMoreRefinement.size());
return failing;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.TaskCompletionEvent;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.Phase;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.ExponentiallySmoothedTaskRuntimeEstimator;
import org.apache.hadoop.mapreduce.v2.app.speculate.LegacyTaskRuntimeEstimator;
import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
import org.apache.hadoop.mapreduce.v2.app.speculate.TaskRuntimeEstimator;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.security.client.ClientToAMTokenSecretManager;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.junit.Assert;
import org.junit.Test;
@SuppressWarnings({"unchecked", "rawtypes"})
public class TestRuntimeEstimators {
private static int INITIAL_NUMBER_FREE_SLOTS = 600;
private static int MAP_SLOT_REQUIREMENT = 3;
// this has to be at least as much as map slot requirement
private static int REDUCE_SLOT_REQUIREMENT = 4;
private static int MAP_TASKS = 200;
private static int REDUCE_TASKS = 150;
MockClock clock;
Job myJob;
AppContext myAppContext;
private static final Log LOG = LogFactory.getLog(TestRuntimeEstimators.class);
private final AtomicInteger slotsInUse = new AtomicInteger(0);
AsyncDispatcher dispatcher;
DefaultSpeculator speculator;
TaskRuntimeEstimator estimator;
// This is a huge kluge. The real implementations have a decent approach
private final AtomicInteger completedMaps = new AtomicInteger(0);
private final AtomicInteger completedReduces = new AtomicInteger(0);
private final AtomicInteger successfulSpeculations
= new AtomicInteger(0);
private final AtomicLong taskTimeSavedBySpeculation
= new AtomicLong(0L);
private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
private void coreTestEstimator
(TaskRuntimeEstimator testedEstimator, int expectedSpeculations) {
estimator = testedEstimator;
clock = new MockClock();
dispatcher = new AsyncDispatcher();
myJob = null;
slotsInUse.set(0);
completedMaps.set(0);
completedReduces.set(0);
successfulSpeculations.set(0);
taskTimeSavedBySpeculation.set(0);
clock.advanceTime(1000);
Configuration conf = new Configuration();
myAppContext = new MyAppContext(MAP_TASKS, REDUCE_TASKS);
myJob = myAppContext.getAllJobs().values().iterator().next();
estimator.contextualize(conf, myAppContext);
conf.setLong(MRJobConfig.SPECULATIVE_RETRY_AFTER_NO_SPECULATE, 500L);
conf.setLong(MRJobConfig.SPECULATIVE_RETRY_AFTER_SPECULATE, 5000L);
conf.setDouble(MRJobConfig.SPECULATIVECAP_RUNNING_TASKS, 0.1);
conf.setDouble(MRJobConfig.SPECULATIVECAP_TOTAL_TASKS, 0.001);
conf.setInt(MRJobConfig.SPECULATIVE_MINIMUM_ALLOWED_TASKS, 5);
speculator = new DefaultSpeculator(conf, myAppContext, estimator, clock);
Assert.assertEquals("wrong SPECULATIVE_RETRY_AFTER_NO_SPECULATE value",
500L, speculator.getSoonestRetryAfterNoSpeculate());
Assert.assertEquals("wrong SPECULATIVE_RETRY_AFTER_SPECULATE value",
5000L, speculator.getSoonestRetryAfterSpeculate());
Assert.assertEquals(speculator.getProportionRunningTasksSpeculatable(),
0.1, 0.00001);
Assert.assertEquals(speculator.getProportionTotalTasksSpeculatable(),
0.001, 0.00001);
Assert.assertEquals("wrong SPECULATIVE_MINIMUM_ALLOWED_TASKS value",
5, speculator.getMinimumAllowedSpeculativeTasks());
dispatcher.register(Speculator.EventType.class, speculator);
dispatcher.register(TaskEventType.class, new SpeculationRequestEventHandler());
dispatcher.init(conf);
dispatcher.start();
speculator.init(conf);
speculator.start();
// Now that the plumbing is hooked up, we do the following:
// do until all tasks are finished, ...
// 1: If we have spare capacity, assign as many map tasks as we can, then
// assign as many reduce tasks as we can. Note that an odd reduce
// task might be started while there are still map tasks, because
// map tasks take 3 slots and reduce tasks 2 slots.
// 2: Send a speculation event for every task attempt that's running
// note that new attempts might get started by the speculator
// discover undone tasks
int undoneMaps = MAP_TASKS;
int undoneReduces = REDUCE_TASKS;
// build a task sequence where all the maps precede any of the reduces
List<Task> allTasksSequence = new LinkedList<Task>();
allTasksSequence.addAll(myJob.getTasks(TaskType.MAP).values());
allTasksSequence.addAll(myJob.getTasks(TaskType.REDUCE).values());
while (undoneMaps + undoneReduces > 0) {
undoneMaps = 0; undoneReduces = 0;
// start all attempts which are new but for which there is enough slots
for (Task task : allTasksSequence) {
if (!task.isFinished()) {
if (task.getType() == TaskType.MAP) {
++undoneMaps;
} else {
++undoneReduces;
}
}
for (TaskAttempt attempt : task.getAttempts().values()) {
if (attempt.getState() == TaskAttemptState.NEW
&& INITIAL_NUMBER_FREE_SLOTS - slotsInUse.get()
>= taskTypeSlots(task.getType())) {
MyTaskAttemptImpl attemptImpl = (MyTaskAttemptImpl)attempt;
SpeculatorEvent event
= new SpeculatorEvent(attempt.getID(), false, clock.getTime());
speculator.handle(event);
attemptImpl.startUp();
} else {
// If a task attempt is in progress we should send the news to
// the Speculator.
TaskAttemptStatus status = new TaskAttemptStatus();
status.id = attempt.getID();
status.progress = attempt.getProgress();
status.stateString = attempt.getState().name();
status.taskState = attempt.getState();
SpeculatorEvent event = new SpeculatorEvent(status, clock.getTime());
speculator.handle(event);
}
}
}
long startTime = System.currentTimeMillis();
// drain the speculator event queue
while (!speculator.eventQueueEmpty()) {
Thread.yield();
if (System.currentTimeMillis() > startTime + 130000) {
return;
}
}
clock.advanceTime(1000L);
if (clock.getTime() % 10000L == 0L) {
speculator.scanForSpeculations();
}
}
Assert.assertEquals("We got the wrong number of successful speculations.",
expectedSpeculations, successfulSpeculations.get());
}
@Test
public void testLegacyEstimator() throws Exception {
TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
coreTestEstimator(specificEstimator, 3);
}
@Test
public void testExponentialEstimator() throws Exception {
TaskRuntimeEstimator specificEstimator
= new ExponentiallySmoothedTaskRuntimeEstimator();
coreTestEstimator(specificEstimator, 3);
}
int taskTypeSlots(TaskType type) {
return type == TaskType.MAP ? MAP_SLOT_REQUIREMENT : REDUCE_SLOT_REQUIREMENT;
}
class SpeculationRequestEventHandler implements EventHandler<TaskEvent> {
@Override
public void handle(TaskEvent event) {
TaskId taskID = event.getTaskID();
Task task = myJob.getTask(taskID);
Assert.assertEquals
("Wrong type event", TaskEventType.T_ADD_SPEC_ATTEMPT, event.getType());
System.out.println("SpeculationRequestEventHandler.handle adds a speculation task for " + taskID);
addAttempt(task);
}
}
void addAttempt(Task task) {
MyTaskImpl myTask = (MyTaskImpl) task;
myTask.addAttempt();
}
class MyTaskImpl implements Task {
private final TaskId taskID;
private final Map<TaskAttemptId, TaskAttempt> attempts
= new ConcurrentHashMap<TaskAttemptId, TaskAttempt>(4);
MyTaskImpl(JobId jobID, int index, TaskType type) {
taskID = recordFactory.newRecordInstance(TaskId.class);
taskID.setId(index);
taskID.setTaskType(type);
taskID.setJobId(jobID);
}
void addAttempt() {
TaskAttempt taskAttempt
= new MyTaskAttemptImpl(taskID, attempts.size(), clock);
TaskAttemptId taskAttemptID = taskAttempt.getID();
attempts.put(taskAttemptID, taskAttempt);
System.out.println("TLTRE.MyTaskImpl.addAttempt " + getID());
SpeculatorEvent event = new SpeculatorEvent(taskID, +1);
dispatcher.getEventHandler().handle(event);
}
@Override
public TaskId getID() {
return taskID;
}
@Override
public TaskReport getReport() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Counters getCounters() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public float getProgress() {
float result = 0.0F;
for (TaskAttempt attempt : attempts.values()) {
result = Math.max(result, attempt.getProgress());
}
return result;
}
@Override
public TaskType getType() {
return taskID.getTaskType();
}
@Override
public Map<TaskAttemptId, TaskAttempt> getAttempts() {
Map<TaskAttemptId, TaskAttempt> result
= new HashMap<TaskAttemptId, TaskAttempt>(attempts.size());
result.putAll(attempts);
return result;
}
@Override
public TaskAttempt getAttempt(TaskAttemptId attemptID) {
return attempts.get(attemptID);
}
@Override
public boolean isFinished() {
for (TaskAttempt attempt : attempts.values()) {
if (attempt.getState() == TaskAttemptState.SUCCEEDED) {
return true;
}
}
return false;
}
@Override
public boolean canCommit(TaskAttemptId taskAttemptID) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public TaskState getState() {
throw new UnsupportedOperationException("Not supported yet.");
}
}
class MyJobImpl implements Job {
private final JobId jobID;
private final Map<TaskId, Task> allTasks = new HashMap<TaskId, Task>();
private final Map<TaskId, Task> mapTasks = new HashMap<TaskId, Task>();
private final Map<TaskId, Task> reduceTasks = new HashMap<TaskId, Task>();
MyJobImpl(JobId jobID, int numMaps, int numReduces) {
this.jobID = jobID;
for (int i = 0; i < numMaps; ++i) {
Task newTask = new MyTaskImpl(jobID, i, TaskType.MAP);
mapTasks.put(newTask.getID(), newTask);
allTasks.put(newTask.getID(), newTask);
}
for (int i = 0; i < numReduces; ++i) {
Task newTask = new MyTaskImpl(jobID, i, TaskType.REDUCE);
reduceTasks.put(newTask.getID(), newTask);
allTasks.put(newTask.getID(), newTask);
}
// give every task an attempt
for (Task task : allTasks.values()) {
MyTaskImpl myTaskImpl = (MyTaskImpl) task;
myTaskImpl.addAttempt();
}
}
@Override
public JobId getID() {
return jobID;
}
@Override
public JobState getState() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public JobReport getReport() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public float getProgress() {
return 0;
}
@Override
public Counters getAllCounters() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Map<TaskId, Task> getTasks() {
return allTasks;
}
@Override
public Map<TaskId, Task> getTasks(TaskType taskType) {
return taskType == TaskType.MAP ? mapTasks : reduceTasks;
}
@Override
public Task getTask(TaskId taskID) {
return allTasks.get(taskID);
}
@Override
public List<String> getDiagnostics() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int getCompletedMaps() {
return completedMaps.get();
}
@Override
public int getCompletedReduces() {
return completedReduces.get();
}
@Override
public TaskAttemptCompletionEvent[]
getTaskAttemptCompletionEvents(int fromEventId, int maxEvents) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public TaskCompletionEvent[]
getMapAttemptCompletionEvents(int startIndex, int maxEvents) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getName() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getQueueName() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int getTotalMaps() {
return mapTasks.size();
}
@Override
public int getTotalReduces() {
return reduceTasks.size();
}
@Override
public boolean isUber() {
return false;
}
@Override
public boolean checkAccess(UserGroupInformation callerUGI,
JobACL jobOperation) {
return true;
}
@Override
public String getUserName() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Path getConfFile() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Map<JobACL, AccessControlList> getJobACLs() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public List<AMInfo> getAMInfos() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Configuration loadConfFile() {
throw new UnsupportedOperationException();
}
@Override
public void setQueueName(String queueName) {
// do nothing
}
}
/*
* We follow the pattern of the real XxxImpl . We create a job and initialize
* it with a full suite of tasks which in turn have one attempt each in the
* NEW state. Attempts transition only from NEW to RUNNING to SUCCEEDED .
*/
class MyTaskAttemptImpl implements TaskAttempt {
private final TaskAttemptId myAttemptID;
long startMockTime = Long.MIN_VALUE;
long shuffleCompletedTime = Long.MAX_VALUE;
TaskAttemptState overridingState = TaskAttemptState.NEW;
MyTaskAttemptImpl(TaskId taskID, int index, Clock clock) {
myAttemptID = recordFactory.newRecordInstance(TaskAttemptId.class);
myAttemptID.setId(index);
myAttemptID.setTaskId(taskID);
}
void startUp() {
startMockTime = clock.getTime();
overridingState = null;
slotsInUse.addAndGet(taskTypeSlots(myAttemptID.getTaskId().getTaskType()));
System.out.println("TLTRE.MyTaskAttemptImpl.startUp starting " + getID());
SpeculatorEvent event = new SpeculatorEvent(getID().getTaskId(), -1);
dispatcher.getEventHandler().handle(event);
}
@Override
public NodeId getNodeId() throws UnsupportedOperationException{
throw new UnsupportedOperationException();
}
@Override
public TaskAttemptId getID() {
return myAttemptID;
}
@Override
public TaskAttemptReport getReport() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public List<String> getDiagnostics() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Counters getCounters() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int getShufflePort() {
throw new UnsupportedOperationException("Not supported yet.");
}
private float getCodeRuntime() {
int taskIndex = myAttemptID.getTaskId().getId();
int attemptIndex = myAttemptID.getId();
float result = 200.0F;
switch (taskIndex % 4) {
case 0:
if (taskIndex % 40 == 0 && attemptIndex == 0) {
result = 600.0F;
break;
}
break;
case 2:
break;
case 1:
result = 150.0F;
break;
case 3:
result = 250.0F;
break;
}
return result;
}
private float getMapProgress() {
float runtime = getCodeRuntime();
return Math.min
((float) (clock.getTime() - startMockTime) / (runtime * 1000.0F), 1.0F);
}
private float getReduceProgress() {
Job job = myAppContext.getJob(myAttemptID.getTaskId().getJobId());
float runtime = getCodeRuntime();
Collection<Task> allMapTasks = job.getTasks(TaskType.MAP).values();
int numberMaps = allMapTasks.size();
int numberDoneMaps = 0;
for (Task mapTask : allMapTasks) {
if (mapTask.isFinished()) {
++numberDoneMaps;
}
}
if (numberMaps == numberDoneMaps) {
shuffleCompletedTime = Math.min(shuffleCompletedTime, clock.getTime());
return Math.min
((float) (clock.getTime() - shuffleCompletedTime)
/ (runtime * 2000.0F) + 0.5F,
1.0F);
} else {
return ((float) numberDoneMaps) / numberMaps * 0.5F;
}
}
// we compute progress from time and an algorithm now
@Override
public float getProgress() {
if (overridingState == TaskAttemptState.NEW) {
return 0.0F;
}
return myAttemptID.getTaskId().getTaskType() == TaskType.MAP ? getMapProgress() : getReduceProgress();
}
@Override
public Phase getPhase() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public TaskAttemptState getState() {
if (overridingState != null) {
return overridingState;
}
TaskAttemptState result
= getProgress() < 1.0F ? TaskAttemptState.RUNNING : TaskAttemptState.SUCCEEDED;
if (result == TaskAttemptState.SUCCEEDED) {
overridingState = TaskAttemptState.SUCCEEDED;
System.out.println("MyTaskAttemptImpl.getState() -- attempt " + myAttemptID + " finished.");
slotsInUse.addAndGet(- taskTypeSlots(myAttemptID.getTaskId().getTaskType()));
(myAttemptID.getTaskId().getTaskType() == TaskType.MAP
? completedMaps : completedReduces).getAndIncrement();
// check for a spectacularly successful speculation
TaskId taskID = myAttemptID.getTaskId();
Task task = myJob.getTask(taskID);
for (TaskAttempt otherAttempt : task.getAttempts().values()) {
if (otherAttempt != this
&& otherAttempt.getState() == TaskAttemptState.RUNNING) {
// we had two instances running. Try to determine how much
// we might have saved by speculation
if (getID().getId() > otherAttempt.getID().getId()) {
// the speculation won
successfulSpeculations.getAndIncrement();
float hisProgress = otherAttempt.getProgress();
long hisStartTime = ((MyTaskAttemptImpl)otherAttempt).startMockTime;
System.out.println("TLTRE:A speculation finished at time "
+ clock.getTime()
+ ". The stalled attempt is at " + (hisProgress * 100.0)
+ "% progress, and it started at "
+ hisStartTime + ", which is "
+ (clock.getTime() - hisStartTime) + " ago.");
long originalTaskEndEstimate
= (hisStartTime
+ estimator.estimatedRuntime(otherAttempt.getID()));
System.out.println(
"TLTRE: We would have expected the original attempt to take "
+ estimator.estimatedRuntime(otherAttempt.getID())
+ ", finishing at " + originalTaskEndEstimate);
long estimatedSavings = originalTaskEndEstimate - clock.getTime();
taskTimeSavedBySpeculation.addAndGet(estimatedSavings);
System.out.println("TLTRE: The task is " + task.getID());
slotsInUse.addAndGet(- taskTypeSlots(myAttemptID.getTaskId().getTaskType()));
((MyTaskAttemptImpl)otherAttempt).overridingState
= TaskAttemptState.KILLED;
} else {
System.out.println(
"TLTRE: The normal attempt beat the speculation in "
+ task.getID());
}
}
}
}
return result;
}
@Override
public boolean isFinished() {
return getProgress() == 1.0F;
}
@Override
public ContainerId getAssignedContainerID() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getNodeHttpAddress() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getNodeRackName() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public long getLaunchTime() {
return startMockTime;
}
@Override
public long getFinishTime() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public long getShuffleFinishTime() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public long getSortFinishTime() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getAssignedContainerMgrAddress() {
throw new UnsupportedOperationException("Not supported yet.");
}
}
static class MockClock implements Clock {
private long currentTime = 0;
public long getTime() {
return currentTime;
}
void setMeasuredTime(long newTime) {
currentTime = newTime;
}
void advanceTime(long increment) {
currentTime += increment;
}
}
class MyAppMaster extends CompositeService {
final Clock clock;
public MyAppMaster(Clock clock) {
super(MyAppMaster.class.getName());
if (clock == null) {
clock = new SystemClock();
}
this.clock = clock;
LOG.info("Created MyAppMaster");
}
}
class MyAppContext implements AppContext {
private final ApplicationAttemptId myAppAttemptID;
private final ApplicationId myApplicationID;
private final JobId myJobID;
private final Map<JobId, Job> allJobs;
MyAppContext(int numberMaps, int numberReduces) {
myApplicationID = ApplicationId.newInstance(clock.getTime(), 1);
myAppAttemptID = ApplicationAttemptId.newInstance(myApplicationID, 0);
myJobID = recordFactory.newRecordInstance(JobId.class);
myJobID.setAppId(myApplicationID);
Job myJob
= new MyJobImpl(myJobID, numberMaps, numberReduces);
allJobs = Collections.singletonMap(myJobID, myJob);
}
@Override
public ApplicationAttemptId getApplicationAttemptId() {
return myAppAttemptID;
}
@Override
public ApplicationId getApplicationID() {
return myApplicationID;
}
@Override
public Job getJob(JobId jobID) {
return allJobs.get(jobID);
}
@Override
public Map<JobId, Job> getAllJobs() {
return allJobs;
}
@Override
public EventHandler getEventHandler() {
return dispatcher.getEventHandler();
}
@Override
public CharSequence getUser() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Clock getClock() {
return clock;
}
@Override
public String getApplicationName() {
return null;
}
@Override
public long getStartTime() {
return 0;
}
@Override
public ClusterInfo getClusterInfo() {
return new ClusterInfo();
}
@Override
public Set<String> getBlacklistedNodes() {
return null;
}
@Override
public ClientToAMTokenSecretManager getClientToAMTokenSecretManager() {
return null;
}
@Override
public boolean isLastAMRetry() {
return false;
}
@Override
public boolean hasSuccessfullyUnregistered() {
// bogus - Not Required
return true;
}
@Override
public String getNMHostname() {
// bogus - Not Required
return null;
}
}
}
|
|
/*
* #%L
* BroadleafCommerce Framework
* %%
* Copyright (C) 2009 - 2013 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.core.catalog.domain;
import org.broadleafcommerce.common.copy.MultiTenantCloneable;
import org.broadleafcommerce.common.media.domain.Media;
import org.broadleafcommerce.common.vendor.service.type.ContainerShapeType;
import org.broadleafcommerce.common.vendor.service.type.ContainerSizeType;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Implementations of this interface are used to hold data for a Product. A product is a general description
* of an item that can be sold (for example: a hat). Products are not sold or added to a cart. {@link Sku}s
* which are specific items (for example: a XL Blue Hat) are sold or added to a cart.
* <br>
* <br>
* You should implement this class if you want to make significant changes to how the
* Product is persisted. If you just want to add additional fields then you should extend {@link ProductImpl}.
*
* @author btaylor
* @see {@link ProductImpl},{@link Sku}, {@link Category}
*/
public interface Product extends Serializable, MultiTenantCloneable<Product> {
/**
* The id of the Product.
*
* @return the id of the Product
*/
public Long getId();
/**
* Sets the id of the Product.
*
* @param id - the id of the product
*/
public void setId(Long id);
/**
* Returns the name of the product that is used for display purposes.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return the name of the product
*/
public String getName();
/**
* Sets the name of the product that is used for display purposes.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param name - the name of the Product
*/
public void setName(String name);
/**
* Returns a brief description of the product that is used for display.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return a brief description of the product
*/
public String getDescription();
/**
* Sets a brief description of the product that is used for display.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param description - a brief description of the product
*/
public void setDescription(String description);
/**
* Returns a long description of the product that is used for display.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return a long description of the product
*/
public String getLongDescription();
/**
* Sets a long description of the product that is used for display.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param longDescription the long description
*/
public void setLongDescription(String longDescription);
/**
* Returns the first date a product will be available that is used to determine whether
* to display the product.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return the first date the product will be available
*/
public Date getActiveStartDate();
/**
* Sets the first date a product will be available that is used to determine whether
* to display the product.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param activeStartDate - the first day the product is available
*/
public void setActiveStartDate(Date activeStartDate);
/**
* Returns the last date a product will be available that is used to determine whether
* to display the product.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return the last day the product is available
*/
public Date getActiveEndDate();
/**
* Sets the last date a product will be available that is used to determine whether
* to display the product.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param activeEndDate - the last day the product is available
*/
public void setActiveEndDate(Date activeEndDate);
/**
* Returns a boolean that indicates if the product is currently active.
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return a boolean indicates if the product is active.
*/
public boolean isActive();
/**
* Gets the default {@link Sku} associated with this Product. A Product is
* required to have a default Sku which holds specific information about the Product
* like weight, dimensions, price, etc. Many of the Product attributes that
* have getters and setters on Product are actually pass-through to the default Sku.
* <br />
* <br />
* Products can also have multiple Skus associated with it that are represented by
* {@link ProductOption}s. For instance, a large, blue shirt. For more information on
* that relationship see {@link #getAdditionalSkus()}.
*
* @return the default Sku for this Product
*/
public Sku getDefaultSku();
/**
* Sets the default Sku for this Product
* <br />
* <br />
* Note: this operation is cascaded with CascadeType.ALL which saves from having to persist the Product
* in 2 operations: first persist the Sku and then take the merged Sku, set it as this Product's default
* Sku, and then persist this Product.
*
* @param defaultSku - the Sku that should be the default for this Product
*/
public void setDefaultSku(Sku defaultSku);
/**
* @return whether or not the default sku can be used for a multi-sku product in the case that no
* product options are set. Defaults to false if not specified. Note that this only affects multi-sku
* products.
*/
public Boolean getCanSellWithoutOptions();
/**
* Sets whether or not the default sku can be sold in the case that no product options are specified. Note
* that this only affects multi-sku products.
*
* @param canSellWithoutOptions
*/
public void setCanSellWithoutOptions(Boolean canSellWithoutOptions);
/**
* Returns a list of {@link Sku}s filtered by whether the Skus are active or not.
* This list does not contain the {@link #getDefaultSku()} and filters by {@link Sku#isActive()}.
*
* @return a list of active Skus from {@link #getAdditionalSkus()} for this Product
* @deprecated use {@link #getAdditionalSkusXrefs()} instead
*/
@Deprecated
public List<Sku> getSkus();
/**
* Gets all the additional Skus associated with this Product. For instance, if this
* Product represented a T-shirt and you could pick the size of the T-shirt as a
* {@link ProductOption} (like "small", "medium", "large") this would return 3 Skus
* if you had different inventory or price constraints on each {@link ProductOptionValue}.
* <br />
* <br />
* This list does not take into account whether any of these additional Skus are active or not, nor
* does it contain the {@link #getDefaultSku()} for this Product. For this functionality, see
* {@link #getSkus()} and {@link #getAllSkus()}, respectively.
*
* @return the additional Skus for this Product
* @see {@link ProductOption}, {@link ProductOptionValue}
*/
public List<Sku> getAdditionalSkus();
/**
* Sets the additional Skus associated to this Product. These additional Skus should
* come from {@link ProductOptionValue}s and are used in instance where you need to track inventory
* or change pricing on a per-option value basis.
*
* @param skus - a List of {@link Sku}s to associate with this Product, usually based off of {@link ProductOption}s
* @see {@link #getAdditionalSkus()}, {@link ProductOption}, {@link ProductOptionValue}
*/
public void setAdditionalSkus(List<Sku> skus);
/**
* Returns all the {@link Sku}s that are associated with this Product (including {@link #getDefaultSku()})
* regardless of whether or not the {@link Sku}s are active or not
* <br />
* <br />
* Note: in the event that the default Sku was added to the list of {@link #getAdditionalSkus()}, it is filtered out
* so that only a single instance of {@link #getDefaultSku()} is contained in the resulting list
*
* @return all the Skus associated to this Product
*/
public List<Sku> getAllSkus();
/**
* Gets the media for this product. This serves as a pass-through to
* the {@link getDefaultSku()} media
*
* @return the Media for the default Sku associated with this Product
* @see Sku
*/
public Map<String, Media> getMedia();
/**
* Gets the media for this product. This serves as a pass-through to
* the {@link getDefaultSku()} media
*
* @param media Media map to set on the default Sku associated with this Product
* @see Sku
*/
public void setMedia(Map<String, Media> media);
/**
* Convenience method for returning all of the media associated with this Product by adding
* all the media in {@link #getDefaultSku()} as well as all the media in the Skus represented by
* {@link #getAdditionalSkus()}
*
* @return all of the Media for all of the Skus for this Product
*/
public Map<String, Media> getAllSkuMedia();
/**
* Return the {@link org.broadleafcommerce.core.catalog.domain.Category} that contains this product
*
* @return
*/
public Category getCategory();
/**
* Set the {@link org.broadleafcommerce.core.catalog.domain.Category} that contains this product
*
* @param category
*/
public void setCategory(Category category);
/**
* Returns the default {@link Category} this product is associated with. This method will delegate to
* {@link #getCategory()} by default, unless the "use.legacy.default.category.mode" property is set to
* true in the implementation's property file. If set to true, this method will use legacy behavior,
* which is to return the deprecated defaultCategory field.
*
* @deprecated use {@link #getCategory()} instead
*/
@Deprecated
public Category getDefaultCategory();
/**
* Sets the default {@link Category} to associate this product with. This method will delegate to
* {@link #setCategory(Category)} by default, unless the "use.legacy.default.category.mode" property is set to
* true in the implementation's property file. If set to true, this method will use legacy behavior,
* which is to set the deprecated defaultCategory field.
*
* @deprecated use {@link #setCategory(Category)} instead
* @param defaultCategory - the default {@link Category} to associate this product with
*/
@Deprecated
public void setDefaultCategory(Category defaultCategory);
/**
* Returns the model number of the product
* @return the model number
*/
public String getModel();
/**
* Sets the model number of the product
* @param model
*/
public void setModel(String model);
/**
* Returns the manufacture name for this product
* @return the manufacture name
*/
public String getManufacturer();
/**
* Sets the manufacture for this product
* @param manufacturer
*/
public void setManufacturer(String manufacturer);
/**
* Returns the {@link Dimension} for this product
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return a ProductDimensions object
*
*/
public Dimension getDimension();
/**
* Sets the {@link Dimension} for this product
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param dimension
*
*/
public void setDimension(Dimension dimension);
/**
* Returns the dimension width
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return width dimension of the product
*
*/
public BigDecimal getWidth();
/**
* Sets the dimension width
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param width
*
*/
public void setWidth(BigDecimal width);
/**
* Returns the dimension height
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return height dimension of the product
*
*/
public BigDecimal getHeight();
/**
* Sets the dimension height
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param height
*
*/
public void setHeight(BigDecimal height);
/**
* Returns the dimension depth
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return width depth of the product
*
*/
public BigDecimal getDepth();
/**
* Sets the dimension depth
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param depth
*/
public void setDepth(BigDecimal depth);
/**
* Gets the dimension girth
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return the dimension girth
*/
public BigDecimal getGirth();
/**
* Sets the dimension girth
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param girth
*/
public void setGirth(BigDecimal girth);
/**
* Returns the dimension container size
*
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return dimension container size
*/
public ContainerSizeType getSize();
/**
* Sets the dimension container size
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param size
*/
public void setSize(ContainerSizeType size);
/**
* Gets the dimension container shape
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return dimension container shape
*/
public ContainerShapeType getContainer();
/**
* Sets the dimension container shape
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param container
*/
public void setContainer(ContainerShapeType container);
/**
* Returns a String representation of the dimension
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return a dimension String
*/
public String getDimensionString();
/**
* Returns the weight of the product
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @return weight of product
*/
public Weight getWeight();
/**
* Sets the product weight
* <br />
* <br />
* <b>Note:</b> this is a convenience method that merely serves as
* a pass-through to the same method via {@link getDefaultSku()}
*
* @param weight
*/
public void setWeight(Weight weight);
/**
* Returns a List of this product's related Cross Sales
* @return
*/
public List<RelatedProduct> getCrossSaleProducts();
/**
* Sets the related Cross Sales
* @param crossSaleProducts
*/
public void setCrossSaleProducts(List<RelatedProduct> crossSaleProducts);
/**
* Returns a List of this product's related Up Sales
* @return
*/
public List<RelatedProduct> getUpSaleProducts();
/**
* Sets the related Up Sales
* @param upSaleProducts
*/
public void setUpSaleProducts(List<RelatedProduct> upSaleProducts);
/**
* Returns whether or not the product is featured
* @return isFeaturedProduct as Boolean
*/
public boolean isFeaturedProduct();
/**
* Sets whether or not the product is featured
* @param isFeaturedProduct
*/
public void setFeaturedProduct(boolean isFeaturedProduct);
/**
* Generic key-value pair of attributes to associate to this Product for maximum
* extensibility.
*
* @return the attributes for this Product
*/
public Map<String, ProductAttribute> getProductAttributes();
/**
* Sets a generic list of key-value pairs for Product
* @param productAttributes
*/
public void setProductAttributes(Map<String, ProductAttribute> productAttributes);
/**
* Gets the promotional message for this Product. For instance, this could be a limited-time
* Product
*
* @return the Product's promotional message
*/
public String getPromoMessage();
/**
* Sets the promotional message for this Product
*
* @param promoMessage
*/
public void setPromoMessage(String promoMessage);
/**
* The available {@link ProductOption}s for this Product. For instance, if this
* Product is a T-Shirt, you might be able to specify a size and color. This would
* be modeled by 2 {@link ProductOption}s, each that could have multiple {@link ProductOptionValue}s
* (which could be "small" "medium" "large", "blue", "yellow", "green"). For specific pricing or
* inventory needs on a per-value basis, multiple Skus can be associated to this Product based
* off of the {@link ProductOptionValue}s
*
* @deprecated use getProductOptionXrefs instead
* @return the {@link ProductOption}s for this Product
* @see Product#getAdditionalSkus(), {@link ProductOption}, {@link ProductOptionValue}
*/
@Deprecated
public List<ProductOption> getProductOptions();
public List<ProductOptionXref> getProductOptionXrefs();
/**
* Sets the list of available ProductOptions for this Product
*
* @deprecated use setProductOptionXrefs instead
* @param productOptions
*/
@Deprecated
public void setProductOptions(List<ProductOption> productOptions);
public void setProductOptionXrefs(List<ProductOptionXref> productOptions);
/**
* Returns a Map of product option values, keyed by the product option name.
* E.g. "color":["red","green","black"]
* @return
*/
public Map<String, Set<String>> getProductOptionValuesMap();
/**
* A product can have a designated URL. When set, the ProductHandlerMapping will check for this
* URL and forward this user to the {@link #getDisplayTemplate()}.
*
* Alternatively, most sites will rely on the {@link Product#getGeneratedUrl()} to define the
* url for a product page.
*
* @see org.broadleafcommerce.core.web.catalog.ProductHandlerMapping
* @return
*/
public String getUrl();
/**
* Sets the URL that a customer could type in to reach this product.
*
* @param url
*/
public void setUrl(String url);
/**
* @return the flag for whether or not the URL should not be generated in the admin
*/
public Boolean getOverrideGeneratedUrl();
/**
* Sets the flag for whether or not the URL should not be generated in the admin
*
* @param overrideGeneratedUrl
*/
public void setOverrideGeneratedUrl(Boolean overrideGeneratedUrl);
/**
* Sets a url-fragment. By default, the system will attempt to create a unique url-fragment for
* this product by taking the {@link Product.getName()} and removing special characters and replacing
* dashes with spaces.
*/
public String getUrlKey();
/**
* Sets a url-fragment to be used with this product. By default, the system will attempt to create a
* unique url-fragment for this product by taking the {@link Product.getName()} and removing special characters and replacing
* dashes with spaces.
*/
public void setUrlKey(String url);
/**
* Returns the name of a display template that is used to render this product. Most implementations have a default
* template for all products. This allows for the user to define a specific template to be used by this product.
*
* @return
*/
public String getDisplayTemplate();
/**
* Sets the name of a display template that is used to render this product. Most implementations have a default
* template for all products. This allows for the user to define a specific template to be used by this product.
* @param displayTemplate
*/
public void setDisplayTemplate(String displayTemplate);
/**
* Generates a URL that can be used to access the product.
* Builds the url by combining the url of the default category with the getUrlKey() of this product.
*/
public String getGeneratedUrl();
/**
* Returns a list of the cross sale products for this product as well
* all cross sale products in all parent categories of this product.
*
* @return the cumulative cross sale products
*/
public List<RelatedProduct> getCumulativeCrossSaleProducts();
/**
* Returns a list of the upsale products for this product as well as
* all upsale products in all parent categories of this product.
*
* @return the cumulative upsale products
*/
public List<RelatedProduct> getCumulativeUpSaleProducts();
/**
* Removes any currently stored dynamic pricing
*/
public void clearDynamicPrices();
/**
* Retrieve all the xref entities linking this product to parent categories
*/
public List<CategoryProductXref> getAllParentCategoryXrefs();
/**
* Set all the xref entities linking this product to parent categories
*/
public void setAllParentCategoryXrefs(List<CategoryProductXref> allParentCategories);
/**
* Returns all parent {@link Category}(s) this product is associated with.
*
* @deprecated Use getAllParentCategoryXrefs() instead.
* @return the all parent categories for this product
*/
@Deprecated
public List<Category> getAllParentCategories();
/**
* Sets all parent {@link Category}s this product is associated with.
*
* @deprecated Use setAllParentCategoryXrefs() instead.
* @param allParentCategories - a List of all parent {@link Category}(s) to associate this product with
*/
@Deprecated
public void setAllParentCategories(List<Category> allParentCategories);
/**
* Returns the tax code of the product. If the tax code is null, then returns the tax code of this products category.
* @return taxCode
*/
public String getTaxCode();
/**
* Sets the tax code for this product.
* @param taxCode
*/
public void setTaxCode(String taxCode);
}
|
|
/*
* JBoss, Home of Professional Open Source.
* Copyright Red Hat, Inc., and individual contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.aerogear.cordova.push;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.util.Log;
import org.apache.cordova.*;
import org.jboss.aerogear.android.Callback;
import org.jboss.aerogear.android.unifiedpush.PushConfig;
import org.jboss.aerogear.android.unifiedpush.PushRegistrar;
import org.jboss.aerogear.android.unifiedpush.Registrations;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* @author edewit@redhat.com
*/
public class PushPlugin extends CordovaPlugin {
public static final String TAG = "PushPlugin";
private static final String UNIFIED_PUSH_URL = "pushServerURL";
private static final String GCM_SENDER_ID = "senderID";
private static final String VARIANT_ID = "variantID";
private static final String SECRET = "variantSecret";
private static final String DEVICE_TOKEN = "deviceToken";
private static final String CATEGORIES = "categories";
private static final String ALIAS = "alias";
public static final String REGISTER = "register";
public static final String UNREGISTER = "unregister";
private static final String REGISTRAR = "registrar";
private static final String SETTINGS = "settings";
private static CallbackContext context;
private static Bundle cachedMessage = null;
private static boolean foreground = false;
private SharedPreferences preferences;
/**
* Gets the application context from cordova's main activity.
* @return the application context
*/
private Context getApplicationContext() {
return this.cordova.getActivity().getApplicationContext();
}
@Override
public void initialize(CordovaInterface cordova, CordovaWebView webView) {
super.initialize(cordova, webView);
preferences = cordova.getActivity().getSharedPreferences(SETTINGS, Context.MODE_PRIVATE);
}
@Override
public boolean execute(String action, JSONArray data, final CallbackContext callbackContext) {
Log.v(TAG, "execute: action=" + action);
foreground = true;
if (REGISTER.equals(action)) {
Log.v(TAG, "execute: data=" + data.toString());
context = callbackContext;
try {
if(data.getJSONObject(0).getBoolean("idle")){
if (cachedMessage != null) {
Log.v(TAG, "sending cached extras");
sendMessage(cachedMessage);
cachedMessage = null;
}
return true;
}
} catch (JSONException e) {}
try {
JSONObject pushConfig = parseConfig(data);
saveConfig(pushConfig);
cordova.getThreadPool().execute(new Runnable() {
@Override
public void run() {
register(callbackContext);
}
});
} catch (JSONException e) {
callbackContext.error(e.getMessage());
return false;
}
if (cachedMessage != null) {
Log.v(TAG, "sending cached extras");
sendMessage(cachedMessage);
cachedMessage = null;
}
return true;
} else if (UNREGISTER.equals(action)) {
unRegister(callbackContext);
return true;
} else {
callbackContext.error("Invalid action : " + action);
}
return false;
}
private JSONObject parseConfig(JSONArray data) throws JSONException {
JSONObject pushConfig = data.getJSONObject(0);
if (!pushConfig.isNull("android")) {
final JSONObject android = pushConfig.getJSONObject("android");
for (Iterator iterator = android.keys(); iterator.hasNext(); ) {
String key = (String) iterator.next();
pushConfig.put(key, android.get(key));
}
pushConfig.remove("android");
}
return pushConfig;
}
private void saveConfig(JSONObject config) throws JSONException {
final SharedPreferences.Editor editor = preferences.edit();
for (Iterator i = config.keys(); i.hasNext(); ) {
final String key = String.valueOf(i.next());
editor.putString(key, config.getString(key));
}
editor.commit();
}
private void register(final CallbackContext callbackContext) {
try {
Registrations registrations = new Registrations();
final PushConfig pushConfig = getPushConfig();
PushRegistrar registrar = registrations.push(REGISTRAR, pushConfig);
registrar.register(getApplicationContext(), new Callback<Void>() {
@Override
public void onSuccess(Void data) {
preferences.edit().putString(DEVICE_TOKEN, pushConfig.getDeviceToken()).commit();
JSONObject obj = new JSONObject();
try {
obj.put("type", "successCallback");
JSONObject retdata = new JSONObject();
retdata.put("deviceToken", pushConfig.getDeviceToken());
retdata.put("operatingSystem", pushConfig.getOperatingSystem());
retdata.put("osVersion", pushConfig.getOsVersion());
obj.put("retData", retdata);
} catch (JSONException e) { }
PluginResult result = new PluginResult(PluginResult.Status.OK, obj);
result.setKeepCallback(true);
callbackContext.sendPluginResult(result);
}
@Override
public void onFailure(Exception e) {
callbackContext.error(e.getMessage());
}
});
} catch (Exception e) {
callbackContext.error(e.getMessage());
}
}
private void unRegister(CallbackContext callbackContext) {
PushRegistrar registrar = getPushRegistrar();
registrar.unregister(getApplicationContext(), new VoidCallback(callbackContext));
}
private PushRegistrar getPushRegistrar() {
Registrations registrations = new Registrations();
return registrations.push(REGISTRAR, getPushConfig());
}
private PushConfig getPushConfig() {
try {
final URI pushServerURI = new URI(preferences.getString(UNIFIED_PUSH_URL, null));
PushConfig config = new PushConfig(pushServerURI, preferences.getString(GCM_SENDER_ID, null));
config.setVariantID(preferences.getString(VARIANT_ID, null));
config.setSecret(preferences.getString(SECRET, null));
config.setAlias(preferences.getString(ALIAS, null));
config.setDeviceToken(preferences.getString(DEVICE_TOKEN, null));
final String categories = preferences.getString(CATEGORIES, null);
config.setCategories(convert(categories));
return config;
} catch (URISyntaxException e) {
throw new RuntimeException(e);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
private List<String> convert(String categories) throws JSONException {
List<String> categoryList = null;
if (categories != null) {
categoryList = new ArrayList<String>();
final JSONArray jsonArray = new JSONArray(categories);
for (int i = 0; i < jsonArray.length(); i++) {
categoryList.add(jsonArray.getString(i));
}
}
return categoryList;
}
/**
* Sends the message to the client application.
* If the client application isn't currently active, it is cached for later processing.
* @param message the message to be send to the client
*/
public static void sendMessage(Bundle message) {
if (message != null) {
message.putBoolean("foreground", foreground);
if (context != null) {
JSONObject obj = new JSONObject();
try {
obj.put("type", "onNotificationCallback");
obj.put("retData", convertBundleToJson(message));
} catch (JSONException e) { }
PluginResult result = new PluginResult(PluginResult.Status.OK, obj);
result.setKeepCallback(true);
context.sendPluginResult(result);
} else {
Log.v(TAG, "sendMessage: caching message to send at a later time.");
cachedMessage = message;
}
}
}
@Override
public void onPause(boolean multitasking) {
super.onPause(multitasking);
foreground = false;
}
@Override
public void onResume(boolean multitasking) {
super.onResume(multitasking);
foreground = true;
}
/**
* Serializes a bundle to JSON.
* @param message to be serialized
*/
private static JSONObject convertBundleToJson(Bundle message) {
try {
JSONObject json;
json = new JSONObject();
JSONObject jsondata = new JSONObject();
for (String key : message.keySet()) {
Object value = message.get(key);
// System data from Android
if (key.equals("from") || key.equals("collapse_key")) {
json.put(key, value);
} else if (key.equals("foreground")) {
json.put(key, message.getBoolean("foreground"));
} else if (key.equals("coldstart")) {
json.put(key, message.getBoolean("coldstart"));
} else {
// Maintain backwards compatibility
if (key.equals("message") || key.equals("msgcnt") || key.equals("sound") || key.equals("alert")) {
json.put(key, value);
}
if (value instanceof String) {
// Try to figure out if the value is another JSON object
String strValue = (String) value;
if (strValue.startsWith("{")) {
try {
JSONObject json2 = new JSONObject(strValue);
jsondata.put(key, json2);
} catch (Exception e) {
jsondata.put(key, value);
}
// Try to figure out if the value is another JSON array
} else if (strValue.startsWith("[")) {
try {
JSONArray json2 = new JSONArray(strValue);
jsondata.put(key, json2);
} catch (Exception e) {
jsondata.put(key, value);
}
} else {
jsondata.put(key, value);
}
}
}
} // while
json.put("payload", jsondata);
Log.v(TAG, "extrasToJSON: " + json.toString());
return json;
} catch (JSONException e) {
Log.e(TAG, "extrasToJSON: JSON exception");
}
return null;
}
public static boolean isInForeground() {
return foreground;
}
public static void setForeground(boolean foreground) {
PushPlugin.foreground = foreground;
}
public static boolean isActive() {
return context != null;
}
public void onDestroy() {
context = null;
super.onDestroy();
}
private class VoidCallback implements Callback<Void> {
private final CallbackContext callbackContext;
public VoidCallback(CallbackContext callbackContext) {
this.callbackContext = callbackContext;
}
@Override
public void onSuccess(Void data) {
callbackContext.success("OK");
}
@Override
public void onFailure(Exception e) {
callbackContext.error(e.getMessage());
}
}
}
|
|
/**
* Copyright 2010 The ForPlay Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package forplay.flash;
import forplay.core.ForPlay;
import forplay.flash.FlashCanvasLayer.Context2d;
import forplay.core.Canvas;
import forplay.core.Gradient;
import forplay.core.Image;
import forplay.core.Path;
import forplay.core.Pattern;
class FlashCanvas implements Canvas {
private final int width, height;
private boolean dirty = true;
private final Context2d context2d;
FlashCanvas(int width, int height, Context2d context2d) {
this.width = width;
this.height = height;
this.context2d = context2d;
}
@Override
public void clear() {
dirty = true;
}
@Override
public void clip(Path path) {
}
@Override
public void drawImage(Image img, float x, float y) {
assert img instanceof FlashImage;
dirty = true;
ForPlay.log().info("Drawing image " + ((FlashImage) img).bitmapData());
context2d.drawImage(((FlashImage) img).bitmapData(), x, y);
}
@Override
public void drawImage(Image img, float x, float y, float w, float h) {
assert img instanceof FlashImage;
dirty = true;
}
@Override
public void drawImage(Image img, float dx, float dy, float dw, float dh,
float sx, float sy, float sw, float sh) {
assert img instanceof FlashImage;
dirty = true;
}
@Override
public void drawImageCentered(Image img, float x, float y) {
drawImage(img, x - img.width()/2, y - img.height()/2);
dirty = true;
}
@Override
public void drawLine(float x0, float y0, float x1, float y1) {
dirty = true;
}
@Override
public void drawPoint(float x, float y) {
dirty = true;
}
@Override
public void drawText(String text, float x, float y) {
dirty = true;
}
@Override
public void fillCircle(float x, float y, float radius) {
dirty = true;
}
@Override
public void fillPath(Path path) {
dirty = true;
}
@Override
public void fillRect(float x, float y, float w, float h) {
dirty = true;
}
@Override
public final int height() {
return height;
}
@Override
public void restore() {
}
@Override
public void rotate(float radians) {
}
@Override
public void save() {
}
@Override
public void scale(float x, float y) {
}
@Override
public void setCompositeOperation(Composite composite) {
}
@Override
public void setFillColor(int color) {
}
@Override
public void setFillGradient(Gradient gradient) {
}
@Override
public void setFillPattern(Pattern pattern) {
}
@Override
public void setLineCap(LineCap cap) {
}
@Override
public void setLineJoin(LineJoin join) {
}
@Override
public void setMiterLimit(float miter) {
}
@Override
public void setStrokeColor(int color) {
}
@Override
public void setStrokeWidth(float w) {
}
@Override
public void setTransform(float m11, float m12, float m21, float m22, float dx, float dy) {
}
@Override
public void strokeCircle(float x, float y, float radius) {
dirty = true;
}
@Override
public void strokePath(Path path) {
dirty = true;
}
@Override
public void strokeRect(float x, float y, float w, float h) {
dirty = true;
}
@Override
public void transform(float m11, float m12, float m21, float m22, float dx,
float dy) {
}
@Override
public void translate(float x, float y) {
}
@Override
public final int width() {
return width;
}
void clearDirty() {
dirty = false;
}
boolean dirty() {
return dirty;
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p/>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/CreateDBSnapshot" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateDBSnapshotRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The identifier for the DB snapshot.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Can't be null, empty, or blank
* </p>
* </li>
* <li>
* <p>
* Must contain from 1 to 255 letters, numbers, or hyphens
* </p>
* </li>
* <li>
* <p>
* First character must be a letter
* </p>
* </li>
* <li>
* <p>
* Can't end with a hyphen or contain two consecutive hyphens
* </p>
* </li>
* </ul>
* <p>
* Example: <code>my-snapshot-id</code>
* </p>
*/
private String dBSnapshotIdentifier;
/**
* <p>
* The identifier of the DB instance that you want to create the snapshot of.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the identifier of an existing DBInstance.
* </p>
* </li>
* </ul>
*/
private String dBInstanceIdentifier;
private com.amazonaws.internal.SdkInternalList<Tag> tags;
/**
* Default constructor for CreateDBSnapshotRequest object. Callers should use the setter or fluent setter (with...)
* methods to initialize the object after creating it.
*/
public CreateDBSnapshotRequest() {
}
/**
* Constructs a new CreateDBSnapshotRequest object. Callers should use the setter or fluent setter (with...) methods
* to initialize any additional object members.
*
* @param dBSnapshotIdentifier
* The identifier for the DB snapshot.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Can't be null, empty, or blank
* </p>
* </li>
* <li>
* <p>
* Must contain from 1 to 255 letters, numbers, or hyphens
* </p>
* </li>
* <li>
* <p>
* First character must be a letter
* </p>
* </li>
* <li>
* <p>
* Can't end with a hyphen or contain two consecutive hyphens
* </p>
* </li>
* </ul>
* <p>
* Example: <code>my-snapshot-id</code>
* @param dBInstanceIdentifier
* The identifier of the DB instance that you want to create the snapshot of.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the identifier of an existing DBInstance.
* </p>
* </li>
*/
public CreateDBSnapshotRequest(String dBSnapshotIdentifier, String dBInstanceIdentifier) {
setDBSnapshotIdentifier(dBSnapshotIdentifier);
setDBInstanceIdentifier(dBInstanceIdentifier);
}
/**
* <p>
* The identifier for the DB snapshot.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Can't be null, empty, or blank
* </p>
* </li>
* <li>
* <p>
* Must contain from 1 to 255 letters, numbers, or hyphens
* </p>
* </li>
* <li>
* <p>
* First character must be a letter
* </p>
* </li>
* <li>
* <p>
* Can't end with a hyphen or contain two consecutive hyphens
* </p>
* </li>
* </ul>
* <p>
* Example: <code>my-snapshot-id</code>
* </p>
*
* @param dBSnapshotIdentifier
* The identifier for the DB snapshot.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Can't be null, empty, or blank
* </p>
* </li>
* <li>
* <p>
* Must contain from 1 to 255 letters, numbers, or hyphens
* </p>
* </li>
* <li>
* <p>
* First character must be a letter
* </p>
* </li>
* <li>
* <p>
* Can't end with a hyphen or contain two consecutive hyphens
* </p>
* </li>
* </ul>
* <p>
* Example: <code>my-snapshot-id</code>
*/
public void setDBSnapshotIdentifier(String dBSnapshotIdentifier) {
this.dBSnapshotIdentifier = dBSnapshotIdentifier;
}
/**
* <p>
* The identifier for the DB snapshot.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Can't be null, empty, or blank
* </p>
* </li>
* <li>
* <p>
* Must contain from 1 to 255 letters, numbers, or hyphens
* </p>
* </li>
* <li>
* <p>
* First character must be a letter
* </p>
* </li>
* <li>
* <p>
* Can't end with a hyphen or contain two consecutive hyphens
* </p>
* </li>
* </ul>
* <p>
* Example: <code>my-snapshot-id</code>
* </p>
*
* @return The identifier for the DB snapshot.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Can't be null, empty, or blank
* </p>
* </li>
* <li>
* <p>
* Must contain from 1 to 255 letters, numbers, or hyphens
* </p>
* </li>
* <li>
* <p>
* First character must be a letter
* </p>
* </li>
* <li>
* <p>
* Can't end with a hyphen or contain two consecutive hyphens
* </p>
* </li>
* </ul>
* <p>
* Example: <code>my-snapshot-id</code>
*/
public String getDBSnapshotIdentifier() {
return this.dBSnapshotIdentifier;
}
/**
* <p>
* The identifier for the DB snapshot.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Can't be null, empty, or blank
* </p>
* </li>
* <li>
* <p>
* Must contain from 1 to 255 letters, numbers, or hyphens
* </p>
* </li>
* <li>
* <p>
* First character must be a letter
* </p>
* </li>
* <li>
* <p>
* Can't end with a hyphen or contain two consecutive hyphens
* </p>
* </li>
* </ul>
* <p>
* Example: <code>my-snapshot-id</code>
* </p>
*
* @param dBSnapshotIdentifier
* The identifier for the DB snapshot.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Can't be null, empty, or blank
* </p>
* </li>
* <li>
* <p>
* Must contain from 1 to 255 letters, numbers, or hyphens
* </p>
* </li>
* <li>
* <p>
* First character must be a letter
* </p>
* </li>
* <li>
* <p>
* Can't end with a hyphen or contain two consecutive hyphens
* </p>
* </li>
* </ul>
* <p>
* Example: <code>my-snapshot-id</code>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDBSnapshotRequest withDBSnapshotIdentifier(String dBSnapshotIdentifier) {
setDBSnapshotIdentifier(dBSnapshotIdentifier);
return this;
}
/**
* <p>
* The identifier of the DB instance that you want to create the snapshot of.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the identifier of an existing DBInstance.
* </p>
* </li>
* </ul>
*
* @param dBInstanceIdentifier
* The identifier of the DB instance that you want to create the snapshot of.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the identifier of an existing DBInstance.
* </p>
* </li>
*/
public void setDBInstanceIdentifier(String dBInstanceIdentifier) {
this.dBInstanceIdentifier = dBInstanceIdentifier;
}
/**
* <p>
* The identifier of the DB instance that you want to create the snapshot of.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the identifier of an existing DBInstance.
* </p>
* </li>
* </ul>
*
* @return The identifier of the DB instance that you want to create the snapshot of.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the identifier of an existing DBInstance.
* </p>
* </li>
*/
public String getDBInstanceIdentifier() {
return this.dBInstanceIdentifier;
}
/**
* <p>
* The identifier of the DB instance that you want to create the snapshot of.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the identifier of an existing DBInstance.
* </p>
* </li>
* </ul>
*
* @param dBInstanceIdentifier
* The identifier of the DB instance that you want to create the snapshot of.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the identifier of an existing DBInstance.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDBSnapshotRequest withDBInstanceIdentifier(String dBInstanceIdentifier) {
setDBInstanceIdentifier(dBInstanceIdentifier);
return this;
}
/**
* @return
*/
public java.util.List<Tag> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.SdkInternalList<Tag>();
}
return tags;
}
/**
* @param tags
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags);
}
/**
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDBSnapshotRequest withTags(Tag... tags) {
if (this.tags == null) {
setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* @param tags
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDBSnapshotRequest withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDBSnapshotIdentifier() != null)
sb.append("DBSnapshotIdentifier: ").append(getDBSnapshotIdentifier()).append(",");
if (getDBInstanceIdentifier() != null)
sb.append("DBInstanceIdentifier: ").append(getDBInstanceIdentifier()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateDBSnapshotRequest == false)
return false;
CreateDBSnapshotRequest other = (CreateDBSnapshotRequest) obj;
if (other.getDBSnapshotIdentifier() == null ^ this.getDBSnapshotIdentifier() == null)
return false;
if (other.getDBSnapshotIdentifier() != null && other.getDBSnapshotIdentifier().equals(this.getDBSnapshotIdentifier()) == false)
return false;
if (other.getDBInstanceIdentifier() == null ^ this.getDBInstanceIdentifier() == null)
return false;
if (other.getDBInstanceIdentifier() != null && other.getDBInstanceIdentifier().equals(this.getDBInstanceIdentifier()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDBSnapshotIdentifier() == null) ? 0 : getDBSnapshotIdentifier().hashCode());
hashCode = prime * hashCode + ((getDBInstanceIdentifier() == null) ? 0 : getDBInstanceIdentifier().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public CreateDBSnapshotRequest clone() {
return (CreateDBSnapshotRequest) super.clone();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment.standby;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeFalse;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Random;
import com.google.common.io.ByteStreams;
import org.apache.commons.io.IOUtils;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.CIHelper;
import org.apache.jackrabbit.oak.commons.junit.TemporaryPort;
import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.standby.client.StandbyClientSync;
import org.apache.jackrabbit.oak.segment.standby.server.StandbyServerSync;
import org.apache.jackrabbit.oak.segment.test.proxy.NetworkErrorProxy;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public abstract class DataStoreTestBase extends TestBase {
static final long GB = 1024 * 1024 * 1024;
private NetworkErrorProxy proxy;
@Rule
public TemporaryPort serverPort = new TemporaryPort();
@Rule
public TemporaryPort proxyPort = new TemporaryPort();
abstract FileStore getPrimary();
abstract FileStore getSecondary();
abstract boolean storesShouldBeEqual();
private InputStream newRandomInputStream(final long size, final int seed) {
return new InputStream() {
private final Random random = new Random(seed);
private long count = 0;
@Override
public int read() throws IOException {
if (count >= size) {
return -1;
}
count++;
return Math.abs(random.nextInt());
}
};
}
protected byte[] addTestContent(NodeStore store, String child, int size)
throws CommitFailedException, IOException {
NodeBuilder builder = store.getRoot().builder();
builder.child(child).setProperty("ts", System.currentTimeMillis());
byte[] data = new byte[size];
new Random().nextBytes(data);
Blob blob = store.createBlob(new ByteArrayInputStream(data));
builder.child(child).setProperty("testBlob", blob);
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
return data;
}
protected void addTestContentOnTheFly(NodeStore store, String child, long size, int seed) throws CommitFailedException, IOException {
NodeBuilder builder = store.getRoot().builder();
builder.child(child).setProperty("ts", System.currentTimeMillis());
InputStream randomInputStream = newRandomInputStream(size, seed);
Blob blob = store.createBlob(randomInputStream);
builder.child(child).setProperty("testBlob", blob);
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
@Before
public void before() {
proxy = new NetworkErrorProxy(proxyPort.getPort(), getServerHost(), serverPort.getPort());
}
@After
public void after() {
proxy.close();
}
@Test
public void testSync() throws Exception {
final int blobSize = 5 * MB;
FileStore primary = getPrimary();
FileStore secondary = getSecondary();
NodeStore store = SegmentNodeStoreBuilders.builder(primary).build();
byte[] data = addTestContent(store, "server", blobSize);
try (
StandbyServerSync serverSync = new StandbyServerSync(serverPort.getPort(), primary, 1 * MB);
StandbyClientSync cl = newStandbyClientSync(secondary, serverPort.getPort())
) {
serverSync.start();
primary.flush();
cl.run();
assertEquals(primary.getHead(), secondary.getHead());
}
assertTrue(primary.getStats().getApproximateSize() < MB);
assertTrue(secondary.getStats().getApproximateSize() < MB);
PropertyState ps = secondary.getHead().getChildNode("root")
.getChildNode("server").getProperty("testBlob");
assertNotNull(ps);
assertEquals(Type.BINARY.tag(), ps.getType().tag());
Blob b = ps.getValue(Type.BINARY);
assertEquals(blobSize, b.length());
byte[] testData = new byte[blobSize];
try (
InputStream blobInputStream = b.getNewStream()
) {
ByteStreams.readFully(blobInputStream, testData);
assertArrayEquals(data, testData);
}
}
/*
* See OAK-5902.
*/
@Test
public void testSyncBigBlob() throws Exception {
assumeFalse(CIHelper.jenkins()); // FIXME OAK-6678: fails on Jenkins
final long blobSize = (long) (1 * GB);
final int seed = 13;
FileStore primary = getPrimary();
FileStore secondary = getSecondary();
NodeStore store = SegmentNodeStoreBuilders.builder(primary).build();
addTestContentOnTheFly(store, "server", blobSize, seed);
try (
StandbyServerSync serverSync = new StandbyServerSync(serverPort.getPort(), primary, 8 * MB);
StandbyClientSync cl = newStandbyClientSync(secondary, serverPort.getPort(), 60_000)
) {
serverSync.start();
primary.flush();
cl.run();
assertEquals(primary.getHead(), secondary.getHead());
}
assertTrue(primary.getStats().getApproximateSize() < MB);
assertTrue(secondary.getStats().getApproximateSize() < MB);
PropertyState ps = secondary.getHead().getChildNode("root")
.getChildNode("server").getProperty("testBlob");
assertNotNull(ps);
assertEquals(Type.BINARY.tag(), ps.getType().tag());
Blob b = ps.getValue(Type.BINARY);
assertEquals(blobSize, b.length());
try (
InputStream randomInputStream = newRandomInputStream(blobSize, seed);
InputStream blobInputStream = b.getNewStream()
) {
assertTrue(IOUtils.contentEquals(randomInputStream, blobInputStream));
}
}
/*
* See OAK-4969.
*/
@Test
public void testSyncUpdatedBinaryProperty() throws Exception {
final int blobSize = 5 * MB;
FileStore primary = getPrimary();
FileStore secondary = getSecondary();
NodeStore store = SegmentNodeStoreBuilders.builder(primary).build();
try (
StandbyServerSync serverSync = new StandbyServerSync(serverPort.getPort(), primary, 1 * MB);
StandbyClientSync clientSync = newStandbyClientSync(secondary, serverPort.getPort())
) {
serverSync.start();
addTestContent(store, "server", blobSize);
primary.flush();
clientSync.run();
assertEquals(primary.getHead(), secondary.getHead());
addTestContent(store, "server", blobSize);
primary.flush();
clientSync.run();
assertEquals(primary.getHead(), secondary.getHead());
}
}
@Test
public void testProxySkippedBytes() throws Exception {
useProxy(100, 1, -1, false);
}
@Test
public void testProxySkippedBytesIntermediateChange() throws Exception {
useProxy(100, 1, -1, true);
}
@Test
public void testProxyFlippedStartByte() throws Exception {
useProxy(0, 0, 0, false);
}
@Test
public void testProxyFlippedIntermediateByte() throws Exception {
useProxy(0, 0, 150, false);
}
@Test
public void testProxyFlippedIntermediateByte2() throws Exception {
useProxy(0, 0, 150000, false);
}
@Test
public void testProxyFlippedIntermediateByteChange() throws Exception {
useProxy(0, 0, 150, true);
}
@Test
public void testProxyFlippedIntermediateByteChange2() throws Exception {
useProxy(0, 0, 150000, true);
}
private void useProxy(int skipPosition, int skipBytes, int flipPosition, boolean intermediateChange) throws Exception {
int blobSize = 5 * MB;
FileStore primary = getPrimary();
FileStore secondary = getSecondary();
NodeStore store = SegmentNodeStoreBuilders.builder(primary).build();
byte[] data = addTestContent(store, "server", blobSize);
try (
StandbyServerSync serverSync = new StandbyServerSync(serverPort.getPort(), primary, 1 * MB);
StandbyClientSync clientSync = newStandbyClientSync(secondary, proxyPort.getPort())
) {
proxy.skipBytes(skipPosition, skipBytes);
proxy.flipByte(flipPosition);
proxy.connect();
serverSync.start();
primary.flush();
clientSync.run();
if (skipBytes > 0 || flipPosition >= 0) {
if (!storesShouldBeEqual()) {
assertFalse("stores are not expected to be equal", primary.getHead().equals(secondary.getHead()));
}
proxy.reset();
if (intermediateChange) {
blobSize = 2 * MB;
data = addTestContent(store, "server", blobSize);
primary.flush();
}
clientSync.run();
}
assertEquals(primary.getHead(), secondary.getHead());
}
assertTrue(primary.getStats().getApproximateSize() < MB);
assertTrue(secondary.getStats().getApproximateSize() < MB);
PropertyState ps = secondary.getHead().getChildNode("root")
.getChildNode("server").getProperty("testBlob");
assertNotNull(ps);
assertEquals(Type.BINARY.tag(), ps.getType().tag());
Blob b = ps.getValue(Type.BINARY);
assertEquals(blobSize, b.length());
byte[] testData = new byte[blobSize];
try (
InputStream blobInputStream = b.getNewStream()
) {
ByteStreams.readFully(blobInputStream, testData);
assertArrayEquals(data, testData);
}
}
}
|
|
/*
* Copyright 1999,2004-2006 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.tribes.tipis;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import org.apache.catalina.tribes.Channel;
import org.apache.catalina.tribes.ChannelException;
import org.apache.catalina.tribes.ChannelListener;
import org.apache.catalina.tribes.Heartbeat;
import org.apache.catalina.tribes.Member;
import org.apache.catalina.tribes.MembershipListener;
import org.apache.catalina.tribes.group.Response;
import org.apache.catalina.tribes.group.RpcCallback;
import org.apache.catalina.tribes.group.RpcChannel;
import org.apache.catalina.tribes.io.XByteBuffer;
import org.apache.catalina.tribes.membership.MemberImpl;
import org.apache.catalina.tribes.util.Arrays;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import java.util.ConcurrentModificationException;
/**
*
* @author Filip Hanik
* @version 1.0
*/
public abstract class AbstractReplicatedMap extends LinkedHashMap implements RpcCallback, ChannelListener, MembershipListener, Heartbeat {
protected static Log log = LogFactory.getLog(AbstractReplicatedMap.class);
/**
* The default initial capacity - MUST be a power of two.
*/
public static final int DEFAULT_INITIAL_CAPACITY = 16;
/**
* The load factor used when none specified in constructor.
**/
public static final float DEFAULT_LOAD_FACTOR = 0.75f;
/**
* Used to identify the map
*/
final String chset = "ISO-8859-1";
//------------------------------------------------------------------------------
// INSTANCE VARIABLES
//------------------------------------------------------------------------------
/**
* Timeout for RPC messages, how long we will wait for a reply
*/
protected transient long rpcTimeout = 5000;
/**
* Reference to the channel for sending messages
*/
protected transient Channel channel;
/**
* The RpcChannel to send RPC messages through
*/
protected transient RpcChannel rpcChannel;
/**
* The Map context name makes this map unique, this
* allows us to have more than one map shared
* through one channel
*/
protected transient byte[] mapContextName;
/**
* Has the state been transferred
*/
protected transient boolean stateTransferred = false;
/**
* Simple lock object for transfers
*/
protected transient Object stateMutex = new Object();
/**
* A list of members in our map
*/
protected transient HashMap mapMembers = new HashMap();
/**
* Our default send options
*/
protected transient int channelSendOptions = Channel.SEND_OPTIONS_DEFAULT;
/**
* The owner of this map, ala a SessionManager for example
*/
protected transient Object mapOwner;
/**
* External class loaders if serialization and deserialization is to be performed successfully.
*/
protected transient ClassLoader[] externalLoaders;
/**
* The node we are currently backing up data to, this index will rotate
* on a round robin basis
*/
protected transient int currentNode = 0;
/**
* Since the map keeps internal membership
* this is the timeout for a ping message to be responded to
* If a remote map doesn't respond within this timeframe,
* its considered dead.
*/
protected transient long accessTimeout = 5000;
/**
* Readable string of the mapContextName value
*/
protected transient String mapname = "";
//------------------------------------------------------------------------------
// CONSTRUCTORS
//------------------------------------------------------------------------------
/**
* Creates a new map
* @param channel The channel to use for communication
* @param timeout long - timeout for RPC messags
* @param mapContextName String - unique name for this map, to allow multiple maps per channel
* @param initialCapacity int - the size of this map, see HashMap
* @param loadFactor float - load factor, see HashMap
* @param cls - a list of classloaders to be used for deserialization of objects.
*/
public AbstractReplicatedMap(Object owner,
Channel channel,
long timeout,
String mapContextName,
int initialCapacity,
float loadFactor,
int channelSendOptions,
ClassLoader[] cls) {
super(initialCapacity, loadFactor);
init(owner, channel, mapContextName, timeout, channelSendOptions, cls);
}
/**
* Helper methods, wraps a single member in an array
* @param m Member
* @return Member[]
*/
protected Member[] wrap(Member m) {
if ( m == null ) return new Member[0];
else return new Member[] {m};
}
/**
* Initializes the map by creating the RPC channel, registering itself as a channel listener
* This method is also responsible for initiating the state transfer
* @param owner Object
* @param channel Channel
* @param mapContextName String
* @param timeout long
* @param channelSendOptions int
* @param cls ClassLoader[]
*/
protected void init(Object owner, Channel channel, String mapContextName, long timeout, int channelSendOptions,ClassLoader[] cls) {
log.info("Initializing AbstractReplicatedMap with context name:"+mapContextName);
this.mapOwner = owner;
this.externalLoaders = cls;
this.channelSendOptions = channelSendOptions;
this.channel = channel;
this.rpcTimeout = timeout;
try {
this.mapname = mapContextName;
//unique context is more efficient if it is stored as bytes
this.mapContextName = mapContextName.getBytes(chset);
} catch (UnsupportedEncodingException x) {
log.warn("Unable to encode mapContextName[" + mapContextName + "] using getBytes(" + chset +") using default getBytes()", x);
this.mapContextName = mapContextName.getBytes();
}
if ( log.isTraceEnabled() ) log.trace("Created Lazy Map with name:"+mapContextName+", bytes:"+Arrays.toString(this.mapContextName));
//create an rpc channel and add the map as a listener
this.rpcChannel = new RpcChannel(this.mapContextName, channel, this);
//add this map as a message listener
this.channel.addChannelListener(this);
//listen for membership notifications
this.channel.addMembershipListener(this);
try {
//broadcast our map, this just notifies other members of our existence
broadcast(MapMessage.MSG_INIT, true);
//transfer state from another map
transferState();
//state is transferred, we are ready for messaging
broadcast(MapMessage.MSG_START, true);
} catch (ChannelException x) {
log.warn("Unable to send map start message.");
throw new RuntimeException("Unable to start replicated map.",x);
}
}
/**
* Sends a ping out to all the members in the cluster, not just map members
* that this map is alive.
* @param timeout long
* @throws ChannelException
*/
protected void ping(long timeout) throws ChannelException {
//send out a map membership message, only wait for the first reply
MapMessage msg = new MapMessage(this.mapContextName,
MapMessage.MSG_INIT,
false,
null,
null,
null,
wrap(channel.getLocalMember(false)));
if ( channel.getMembers().length > 0 ) {
//send a ping, wait for all nodes to reply
Response[] resp = rpcChannel.send(channel.getMembers(),
msg, rpcChannel.ALL_REPLY,
(channelSendOptions),
(int) accessTimeout);
for (int i = 0; i < resp.length; i++) {
memberAlive(resp[i].getSource());
} //for
}
//update our map of members, expire some if we didn't receive a ping back
synchronized (mapMembers) {
Iterator it = mapMembers.entrySet().iterator();
long now = System.currentTimeMillis();
while ( it.hasNext() ) {
Map.Entry entry = (Map.Entry)it.next();
long access = ((Long)entry.getValue()).longValue();
if ( (now - access) > timeout ) memberDisappeared((Member)entry.getKey());
}
}//synch
}
/**
* We have received a member alive notification
* @param member Member
*/
protected void memberAlive(Member member) {
synchronized (mapMembers) {
if (!mapMembers.containsKey(member)) {
mapMemberAdded(member);
} //end if
mapMembers.put(member, new Long(System.currentTimeMillis()));
}
}
/**
* Helper method to broadcast a message to all members in a channel
* @param msgtype int
* @param rpc boolean
* @throws ChannelException
*/
protected void broadcast(int msgtype, boolean rpc) throws ChannelException {
//send out a map membership message, only wait for the first reply
MapMessage msg = new MapMessage(this.mapContextName, msgtype,
false, null, null, null, wrap(channel.getLocalMember(false)));
if ( rpc) {
Response[] resp = rpcChannel.send(channel.getMembers(), msg, rpcChannel.FIRST_REPLY, (channelSendOptions),rpcTimeout);
for (int i = 0; i < resp.length; i++) {
mapMemberAdded(resp[i].getSource());
messageReceived(resp[i].getMessage(), resp[i].getSource());
}
} else {
channel.send(channel.getMembers(),msg,channelSendOptions);
}
}
public void breakdown() {
finalize();
}
public void finalize() {
try {broadcast(MapMessage.MSG_STOP,false); }catch ( Exception ignore){}
//cleanup
if (this.rpcChannel != null) {
this.rpcChannel.breakdown();
}
if (this.channel != null) {
this.channel.removeChannelListener(this);
this.channel.removeMembershipListener(this);
}
this.rpcChannel = null;
this.channel = null;
this.mapMembers.clear();
super.clear();
this.stateTransferred = false;
this.externalLoaders = null;
}
public int hashCode() {
return Arrays.hashCode(this.mapContextName);
}
public boolean equals(Object o) {
if ( o == null ) return false;
if ( !(o instanceof AbstractReplicatedMap)) return false;
if ( !(o.getClass().equals(this.getClass())) ) return false;
AbstractReplicatedMap other = (AbstractReplicatedMap)o;
return Arrays.equals(mapContextName,other.mapContextName);
}
//------------------------------------------------------------------------------
// GROUP COM INTERFACES
//------------------------------------------------------------------------------
public Member[] getMapMembers(HashMap members) {
synchronized (members) {
Member[] result = new Member[members.size()];
members.keySet().toArray(result);
return result;
}
}
public Member[] getMapMembers() {
return getMapMembers(this.mapMembers);
}
public Member[] getMapMembersExcl(Member[] exclude) {
synchronized (mapMembers) {
HashMap list = (HashMap)mapMembers.clone();
for (int i=0; i<exclude.length;i++) list.remove(exclude[i]);
return getMapMembers(list);
}
}
/**
* Replicates any changes to the object since the last time
* The object has to be primary, ie, if the object is a proxy or a backup, it will not be replicated<br>
* @param complete - if set to true, the object is replicated to its backup
* if set to false, only objects that implement ReplicatedMapEntry and the isDirty() returns true will
* be replicated
*/
public void replicate(Object key, boolean complete) {
if ( log.isTraceEnabled() )
log.trace("Replicate invoked on key:"+key);
MapEntry entry = (MapEntry)super.get(key);
if ( entry == null ) return;
if ( !entry.isSerializable() ) return;
if (entry != null && entry.isPrimary() && entry.getBackupNodes()!= null && entry.getBackupNodes().length > 0) {
Object value = entry.getValue();
//check to see if we need to replicate this object isDirty()||complete
boolean repl = complete || ( (value instanceof ReplicatedMapEntry) && ( (ReplicatedMapEntry) value).isDirty());
if (!repl) {
if ( log.isTraceEnabled() )
log.trace("Not replicating:"+key+", no change made");
return;
}
//check to see if the message is diffable
boolean diff = ( (value instanceof ReplicatedMapEntry) && ( (ReplicatedMapEntry) value).isDiffable());
MapMessage msg = null;
if (diff) {
ReplicatedMapEntry rentry = (ReplicatedMapEntry)entry.getValue();
try {
rentry.lock();
//construct a diff message
msg = new MapMessage(mapContextName, MapMessage.MSG_BACKUP,
true, (Serializable) entry.getKey(), null,
rentry.getDiff(),
entry.getBackupNodes());
} catch (IOException x) {
log.error("Unable to diff object. Will replicate the entire object instead.", x);
} finally {
rentry.unlock();
}
}
if (msg == null) {
//construct a complete
msg = new MapMessage(mapContextName, MapMessage.MSG_BACKUP,
false, (Serializable) entry.getKey(),
(Serializable) entry.getValue(),
null, entry.getBackupNodes());
}
try {
if ( channel!=null && entry.getBackupNodes()!= null && entry.getBackupNodes().length > 0 ) {
channel.send(entry.getBackupNodes(), msg, channelSendOptions);
}
} catch (ChannelException x) {
log.error("Unable to replicate data.", x);
}
} //end if
}
/**
* This can be invoked by a periodic thread to replicate out any changes.
* For maps that don't store objects that implement ReplicatedMapEntry, this
* method should be used infrequently to avoid large amounts of data transfer
* @param complete boolean
*/
public void replicate(boolean complete) {
Iterator i = super.entrySet().iterator();
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
replicate(e.getKey(), complete);
} //while
}
public void transferState() {
try {
Member[] members = getMapMembers();
Member backup = members.length > 0 ? (Member) members[0] : null;
if (backup != null) {
MapMessage msg = new MapMessage(mapContextName, MapMessage.MSG_STATE, false,
null, null, null, null);
Response[] resp = rpcChannel.send(new Member[] {backup}, msg, rpcChannel.FIRST_REPLY, channelSendOptions, rpcTimeout);
if (resp.length > 0) {
synchronized (stateMutex) {
msg = (MapMessage) resp[0].getMessage();
msg.deserialize(getExternalLoaders());
ArrayList list = (ArrayList) msg.getValue();
for (int i = 0; i < list.size(); i++) {
messageReceived( (Serializable) list.get(i), resp[0].getSource());
} //for
}
} else {
log.warn("Transfer state, 0 replies, probably a timeout.");
}
}
} catch (ChannelException x) {
log.error("Unable to transfer LazyReplicatedMap state.", x);
} catch (IOException x) {
log.error("Unable to transfer LazyReplicatedMap state.", x);
} catch (ClassNotFoundException x) {
log.error("Unable to transfer LazyReplicatedMap state.", x);
}
stateTransferred = true;
}
/**
* @todo implement state transfer
* @param msg Serializable
* @return Serializable - null if no reply should be sent
*/
public Serializable replyRequest(Serializable msg, final Member sender) {
if (! (msg instanceof MapMessage))return null;
MapMessage mapmsg = (MapMessage) msg;
//map init request
if (mapmsg.getMsgType() == mapmsg.MSG_INIT) {
mapmsg.setBackUpNodes(wrap(channel.getLocalMember(false)));
return mapmsg;
}
//map start request
if (mapmsg.getMsgType() == mapmsg.MSG_START) {
mapmsg.setBackUpNodes(wrap(channel.getLocalMember(false)));
mapMemberAdded(sender);
return mapmsg;
}
//backup request
if (mapmsg.getMsgType() == mapmsg.MSG_RETRIEVE_BACKUP) {
MapEntry entry = (MapEntry)super.get(mapmsg.getKey());
if (entry == null || (!entry.isSerializable()) )return null;
mapmsg.setValue( (Serializable) entry.getValue());
return mapmsg;
}
//state transfer request
if (mapmsg.getMsgType() == mapmsg.MSG_STATE) {
synchronized (stateMutex) { //make sure we dont do two things at the same time
ArrayList list = new ArrayList();
Iterator i = super.entrySet().iterator();
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
MapEntry entry = (MapEntry) e.getValue();
if ( entry.isSerializable() ) {
MapMessage me = new MapMessage(mapContextName, MapMessage.MSG_PROXY,
false, (Serializable) entry.getKey(), null, null, entry.getBackupNodes());
list.add(me);
}
}
mapmsg.setValue(list);
return mapmsg;
} //synchronized
}
return null;
}
/**
* If the reply has already been sent to the requesting thread,
* the rpc callback can handle any data that comes in after the fact.
* @param msg Serializable
* @param sender Member
*/
public void leftOver(Serializable msg, Member sender) {
//left over membership messages
if (! (msg instanceof MapMessage))return;
MapMessage mapmsg = (MapMessage) msg;
try {
mapmsg.deserialize(getExternalLoaders());
if (mapmsg.getMsgType() == MapMessage.MSG_START) {
mapMemberAdded(mapmsg.getBackupNodes()[0]);
} else if (mapmsg.getMsgType() == MapMessage.MSG_INIT) {
memberAlive(mapmsg.getBackupNodes()[0]);
}
} catch (IOException x ) {
log.error("Unable to deserialize MapMessage.",x);
} catch (ClassNotFoundException x ) {
log.error("Unable to deserialize MapMessage.",x);
}
}
public void messageReceived(Serializable msg, Member sender) {
if (! (msg instanceof MapMessage)) return;
MapMessage mapmsg = (MapMessage) msg;
if ( log.isTraceEnabled() ) {
log.trace("Map["+mapname+"] received message:"+mapmsg);
}
try {
mapmsg.deserialize(getExternalLoaders());
} catch (IOException x) {
log.error("Unable to deserialize MapMessage.", x);
return;
} catch (ClassNotFoundException x) {
log.error("Unable to deserialize MapMessage.", x);
return;
}
if ( log.isTraceEnabled() )
log.trace("Map message received from:"+sender.getName()+" msg:"+mapmsg);
if (mapmsg.getMsgType() == MapMessage.MSG_START) {
mapMemberAdded(mapmsg.getBackupNodes()[0]);
}
if (mapmsg.getMsgType() == MapMessage.MSG_STOP) {
memberDisappeared(mapmsg.getBackupNodes()[0]);
}
if (mapmsg.getMsgType() == MapMessage.MSG_PROXY) {
MapEntry entry = (MapEntry)super.get(mapmsg.getKey());
if ( entry==null ) {
entry = new MapEntry(mapmsg.getKey(), mapmsg.getValue());
entry.setBackup(false);
entry.setProxy(true);
entry.setBackupNodes(mapmsg.getBackupNodes());
super.put(entry.getKey(), entry);
} else {
entry.setProxy(true);
entry.setBackup(false);
entry.setBackupNodes(mapmsg.getBackupNodes());
}
}
if (mapmsg.getMsgType() == MapMessage.MSG_REMOVE) {
super.remove(mapmsg.getKey());
}
if (mapmsg.getMsgType() == MapMessage.MSG_BACKUP) {
MapEntry entry = (MapEntry)super.get(mapmsg.getKey());
if (entry == null) {
entry = new MapEntry(mapmsg.getKey(), mapmsg.getValue());
entry.setBackup(true);
entry.setProxy(false);
entry.setBackupNodes(mapmsg.getBackupNodes());
if (mapmsg.getValue()!=null && mapmsg.getValue() instanceof ReplicatedMapEntry ) {
((ReplicatedMapEntry)mapmsg.getValue()).setOwner(getMapOwner());
}
} else {
entry.setBackup(true);
entry.setProxy(false);
entry.setBackupNodes(mapmsg.getBackupNodes());
if (entry.getValue() instanceof ReplicatedMapEntry) {
ReplicatedMapEntry diff = (ReplicatedMapEntry) entry.getValue();
if (mapmsg.isDiff()) {
try {
diff.lock();
diff.applyDiff(mapmsg.getDiffValue(), 0, mapmsg.getDiffValue().length);
} catch (Exception x) {
log.error("Unable to apply diff to key:" + entry.getKey(), x);
} finally {
diff.unlock();
}
} else {
if ( mapmsg.getValue()!=null ) entry.setValue(mapmsg.getValue());
((ReplicatedMapEntry)entry.getValue()).setOwner(getMapOwner());
} //end if
} else if (mapmsg.getValue() instanceof ReplicatedMapEntry) {
ReplicatedMapEntry re = (ReplicatedMapEntry)mapmsg.getValue();
re.setOwner(getMapOwner());
entry.setValue(re);
} else {
if ( mapmsg.getValue()!=null ) entry.setValue(mapmsg.getValue());
} //end if
} //end if
super.put(entry.getKey(), entry);
} //end if
}
public boolean accept(Serializable msg, Member sender) {
boolean result = false;
if (msg instanceof MapMessage) {
if ( log.isTraceEnabled() ) log.trace("Map["+mapname+"] accepting...."+msg);
result = Arrays.equals(mapContextName, ( (MapMessage) msg).getMapId());
if ( log.isTraceEnabled() ) log.trace("Msg["+mapname+"] accepted["+result+"]...."+msg);
}
return result;
}
public void mapMemberAdded(Member member) {
if ( member.equals(getChannel().getLocalMember(false)) ) return;
boolean memberAdded = false;
//select a backup node if we don't have one
synchronized (mapMembers) {
if (!mapMembers.containsKey(member) ) {
mapMembers.put(member, new Long(System.currentTimeMillis()));
memberAdded = true;
}
}
if ( memberAdded ) {
synchronized (stateMutex) {
Iterator i = super.entrySet().iterator();
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
MapEntry entry = (MapEntry) e.getValue();
if ( entry == null ) continue;
if (entry.isPrimary() && (entry.getBackupNodes() == null || entry.getBackupNodes().length == 0)) {
try {
Member[] backup = publishEntryInfo(entry.getKey(), entry.getValue());
entry.setBackupNodes(backup);
} catch (ChannelException x) {
log.error("Unable to select backup node.", x);
} //catch
} //end if
} //while
} //synchronized
}//end if
}
public boolean inSet(Member m, Member[] set) {
if ( set == null ) return false;
boolean result = false;
for (int i=0; i<set.length && (!result); i++ )
if ( m.equals(set[i]) ) result = true;
return result;
}
public Member[] excludeFromSet(Member[] mbrs, Member[] set) {
ArrayList result = new ArrayList();
for (int i=0; i<set.length; i++ ) {
boolean include = true;
for (int j=0; j<mbrs.length; j++ )
if ( mbrs[j].equals(set[i]) ) include = false;
if ( include ) result.add(set[i]);
}
return (Member[])result.toArray(new Member[result.size()]);
}
public void memberAdded(Member member) {
//do nothing
}
public void memberDisappeared(Member member) {
boolean removed = false;
synchronized (mapMembers) {
removed = (mapMembers.remove(member) != null );
}
Iterator i = super.entrySet().iterator();
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
MapEntry entry = (MapEntry) e.getValue();
if (entry.isPrimary() && inSet(member,entry.getBackupNodes())) {
try {
Member[] backup = publishEntryInfo(entry.getKey(), entry.getValue());
entry.setBackupNodes(backup);
} catch (ChannelException x) {
log.error("Unable to relocate[" + entry.getKey() + "] to a new backup node", x);
}
} //end if
} //while
}
public int getNextBackupIndex() {
int size = mapMembers.size();
if (mapMembers.size() == 0)return -1;
int node = currentNode++;
if (node >= size) {
node = 0;
currentNode = 0;
}
return node;
}
public Member getNextBackupNode() {
Member[] members = getMapMembers();
int node = getNextBackupIndex();
if ( members.length == 0 || node==-1) return null;
if ( node >= members.length ) node = 0;
return members[node];
}
protected abstract Member[] publishEntryInfo(Object key, Object value) throws ChannelException;
public void heartbeat() {
try {
ping(accessTimeout);
}catch ( Exception x ) {
log.error("Unable to send AbstractReplicatedMap.ping message",x);
}
}
//------------------------------------------------------------------------------
// METHODS TO OVERRIDE
//------------------------------------------------------------------------------
/**
* Removes an object from this map, it will also remove it from
*
* @param key Object
* @return Object
*/
public Object remove(Object key) {
MapEntry entry = (MapEntry)super.remove(key);
try {
if (getMapMembers().length > 0 ) {
MapMessage msg = new MapMessage(getMapContextName(), MapMessage.MSG_REMOVE, false, (Serializable) key, null, null, null);
getChannel().send(getMapMembers(), msg, getChannelSendOptions());
}
} catch ( ChannelException x ) {
log.error("Unable to replicate out data for a LazyReplicatedMap.remove operation",x);
}
return entry!=null?entry.getValue():null;
}
public Object get(Object key) {
MapEntry entry = (MapEntry)super.get(key);
if (log.isTraceEnabled()) log.trace("Requesting id:"+key+" entry:"+entry);
if ( entry == null ) return null;
if ( !entry.isPrimary() ) {
//if the message is not primary, we need to retrieve the latest value
try {
Member[] backup = null;
MapMessage msg = null;
if ( !entry.isBackup() ) {
//make sure we don't retrieve from ourselves
msg = new MapMessage(getMapContextName(), MapMessage.MSG_RETRIEVE_BACKUP, false,
(Serializable) key, null, null, null);
Response[] resp = getRpcChannel().send(entry.getBackupNodes(),msg, this.getRpcChannel().FIRST_REPLY, Channel.SEND_OPTIONS_DEFAULT, getRpcTimeout());
if (resp == null || resp.length == 0) {
//no responses
log.warn("Unable to retrieve remote object for key:" + key);
return null;
}
msg = (MapMessage) resp[0].getMessage();
msg.deserialize(getExternalLoaders());
backup = entry.getBackupNodes();
if ( entry.getValue() instanceof ReplicatedMapEntry ) {
ReplicatedMapEntry val = (ReplicatedMapEntry)entry.getValue();
val.setOwner(getMapOwner());
}
if ( msg.getValue()!=null ) entry.setValue(msg.getValue());
}
if (entry.isBackup()) {
//select a new backup node
backup = publishEntryInfo(key, entry.getValue());
} else if ( entry.isProxy() ) {
//invalidate the previous primary
msg = new MapMessage(getMapContextName(),MapMessage.MSG_PROXY,false,(Serializable)key,null,null,backup);
Member[] dest = getMapMembersExcl(backup);
if ( dest!=null && dest.length >0) {
getChannel().send(dest, msg, getChannelSendOptions());
}
}
entry.setBackupNodes(backup);
entry.setBackup(false);
entry.setProxy(false);
} catch (Exception x) {
log.error("Unable to replicate out data for a LazyReplicatedMap.get operation", x);
return null;
}
}
if (log.isTraceEnabled()) log.trace("Requesting id:"+key+" result:"+entry.getValue());
if ( entry.getValue() != null && entry.getValue() instanceof ReplicatedMapEntry ) {
ReplicatedMapEntry val = (ReplicatedMapEntry)entry.getValue();
//hack, somehow this is not being set above
val.setOwner(getMapOwner());
}
return entry.getValue();
}
protected void printMap(String header) {
try {
System.out.println("\nDEBUG MAP:"+header);
System.out.println("Map["+ new String(mapContextName, chset) + ", Map Size:" + super.size());
Member[] mbrs = getMapMembers();
for ( int i=0; i<mbrs.length;i++ ) {
System.out.println("Mbr["+(i+1)+"="+mbrs[i].getName());
}
Iterator i = super.entrySet().iterator();
int cnt = 0;
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
System.out.println( (++cnt) + ". " + e.getValue());
}
System.out.println("EndMap]\n\n");
}catch ( Exception ignore) {
ignore.printStackTrace();
}
}
/**
* Returns true if the key has an entry in the map.
* The entry can be a proxy or a backup entry, invoking <code>get(key)</code>
* will make this entry primary for the group
* @param key Object
* @return boolean
*/
public boolean containsKey(Object key) {
return super.containsKey(key);
}
public Object put(Object key, Object value) {
MapEntry entry = new MapEntry(key,value);
entry.setBackup(false);
entry.setProxy(false);
Object old = null;
//make sure that any old values get removed
if ( containsKey(key) ) old = remove(key);
try {
Member[] backup = publishEntryInfo(key, value);
entry.setBackupNodes(backup);
} catch (ChannelException x) {
log.error("Unable to replicate out data for a LazyReplicatedMap.put operation", x);
}
super.put(key,entry);
return old;
}
/**
* Copies all values from one map to this instance
* @param m Map
*/
public void putAll(Map m) {
Iterator i = m.entrySet().iterator();
while ( i.hasNext() ) {
Map.Entry entry = (Map.Entry)i.next();
put(entry.getKey(),entry.getValue());
}
}
public void clear() {
//only delete active keys
Iterator keys = keySet().iterator();
while ( keys.hasNext() ) remove(keys.next());
}
public boolean containsValue(Object value) {
if ( value == null ) {
return super.containsValue(value);
} else {
Iterator i = super.entrySet().iterator();
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
MapEntry entry = (MapEntry) e.getValue();
if (entry.isPrimary() && value.equals(entry.getValue())) return true;
}//while
return false;
}//end if
}
public Object clone() {
throw new UnsupportedOperationException("This operation is not valid on a replicated map");
}
/**
* Returns the entire contents of the map
* Map.Entry.getValue() will return a LazyReplicatedMap.MapEntry object containing all the information
* about the object.
* @return Set
*/
public Set entrySetFull() {
return super.entrySet();
}
public Set keySetFull() {
return super.keySet();
}
public int sizeFull() {
return super.size();
}
public Set entrySet() {
LinkedHashSet set = new LinkedHashSet(super.size());
Iterator i = super.entrySet().iterator();
while ( i.hasNext() ) {
Map.Entry e = (Map.Entry)i.next();
MapEntry entry = (MapEntry)e.getValue();
if ( entry.isPrimary() ) set.add(entry);
}
return Collections.unmodifiableSet(set);
}
public Set keySet() {
//todo implement
//should only return keys where this is active.
LinkedHashSet set = new LinkedHashSet(super.size());
Iterator i = super.entrySet().iterator();
while ( i.hasNext() ) {
Map.Entry e = (Map.Entry)i.next();
MapEntry entry = (MapEntry)e.getValue();
if ( entry.isPrimary() ) set.add(entry.getKey());
}
return Collections.unmodifiableSet(set);
}
public int size() {
//todo, implement a counter variable instead
//only count active members in this node
int counter = 0;
Iterator it = Collections.unmodifiableSet(super.entrySet()).iterator();
while (it.hasNext() ) {
Map.Entry e = (Map.Entry) it.next();
if ( e != null ) {
MapEntry entry = (MapEntry) e.getValue();
if (entry.isPrimary() && entry.getValue() != null) counter++;
}
}
return counter;
}
protected boolean removeEldestEntry(Map.Entry eldest) {
return false;
}
public boolean isEmpty() {
return size()==0;
}
public Collection values() {
ArrayList values = new ArrayList();
Iterator i = super.entrySet().iterator();
while ( i.hasNext() ) {
Map.Entry e = (Map.Entry)i.next();
MapEntry entry = (MapEntry)e.getValue();
if ( entry.isPrimary() && entry.getValue()!=null) values.add(entry.getValue());
}
return Collections.unmodifiableCollection(values);
}
//------------------------------------------------------------------------------
// Map Entry class
//------------------------------------------------------------------------------
public static class MapEntry implements Map.Entry {
private boolean backup;
private boolean proxy;
private Member[] backupNodes;
private Object key;
private Object value;
public MapEntry(Object key, Object value) {
setKey(key);
setValue(value);
}
public boolean isKeySerializable() {
return (key == null) || (key instanceof Serializable);
}
public boolean isValueSerializable() {
return (value==null) || (value instanceof Serializable);
}
public boolean isSerializable() {
return isKeySerializable() && isValueSerializable();
}
public boolean isBackup() {
return backup;
}
public void setBackup(boolean backup) {
this.backup = backup;
}
public boolean isProxy() {
return proxy;
}
public boolean isPrimary() {
return ( (!proxy) && (!backup));
}
public void setProxy(boolean proxy) {
this.proxy = proxy;
}
public boolean isDiffable() {
return (value instanceof ReplicatedMapEntry) &&
((ReplicatedMapEntry)value).isDiffable();
}
public void setBackupNodes(Member[] nodes) {
this.backupNodes = nodes;
}
public Member[] getBackupNodes() {
return backupNodes;
}
public Object getValue() {
return value;
}
public Object setValue(Object value) {
Object old = this.value;
this.value = (Serializable) value;
return old;
}
public Object getKey() {
return key;
}
public Object setKey(Object key) {
Object old = this.key;
this.key = (Serializable)key;
return old;
}
public int hashCode() {
return key.hashCode();
}
public boolean equals(Object o) {
return key.equals(o);
}
/**
* apply a diff, or an entire object
* @param data byte[]
* @param offset int
* @param length int
* @param diff boolean
* @throws IOException
* @throws ClassNotFoundException
*/
public void apply(byte[] data, int offset, int length, boolean diff) throws IOException, ClassNotFoundException {
if (isDiffable() && diff) {
ReplicatedMapEntry rentry = (ReplicatedMapEntry) value;
try {
rentry.lock();
rentry.applyDiff(data, offset, length);
} finally {
rentry.unlock();
}
} else if (length == 0) {
value = null;
proxy = true;
} else {
value = XByteBuffer.deserialize(data, offset, length);
}
}
public String toString() {
StringBuffer buf = new StringBuffer("MapEntry[key:");
buf.append(getKey()).append("; ");
buf.append("value:").append(getValue()).append("; ");
buf.append("primary:").append(isPrimary()).append("; ");
buf.append("backup:").append(isBackup()).append("; ");
buf.append("proxy:").append(isProxy()).append(";]");
return buf.toString();
}
}
//------------------------------------------------------------------------------
// map message to send to and from other maps
//------------------------------------------------------------------------------
public static class MapMessage implements Serializable {
public static final int MSG_BACKUP = 1;
public static final int MSG_RETRIEVE_BACKUP = 2;
public static final int MSG_PROXY = 3;
public static final int MSG_REMOVE = 4;
public static final int MSG_STATE = 5;
public static final int MSG_START = 6;
public static final int MSG_STOP = 7;
public static final int MSG_INIT = 8;
private byte[] mapId;
private int msgtype;
private boolean diff;
private transient Serializable key;
private transient Serializable value;
private byte[] valuedata;
private byte[] keydata;
private byte[] diffvalue;
private Member[] nodes;
public String toString() {
StringBuffer buf = new StringBuffer("MapMessage[context=");
buf.append(new String(mapId));
buf.append("; type=");
buf.append(getTypeDesc());
buf.append("; key=");
buf.append(key);
buf.append("; value=");
buf.append(value);
return buf.toString();
}
public String getTypeDesc() {
switch (msgtype) {
case MSG_BACKUP: return "MSG_BACKUP";
case MSG_RETRIEVE_BACKUP: return "MSG_RETRIEVE_BACKUP";
case MSG_PROXY: return "MSG_PROXY";
case MSG_REMOVE: return "MSG_REMOVE";
case MSG_STATE: return "MSG_STATE";
case MSG_START: return "MSG_START";
case MSG_STOP: return "MSG_STOP";
case MSG_INIT: return "MSG_INIT";
default : return "UNKNOWN";
}
}
public MapMessage() {}
public MapMessage(byte[] mapId,int msgtype, boolean diff,
Serializable key, Serializable value,
byte[] diffvalue, Member[] nodes) {
this.mapId = mapId;
this.msgtype = msgtype;
this.diff = diff;
this.key = key;
this.value = value;
this.diffvalue = diffvalue;
this.nodes = nodes;
setValue(value);
setKey(key);
}
public void deserialize(ClassLoader[] cls) throws IOException, ClassNotFoundException {
key(cls);
value(cls);
}
public int getMsgType() {
return msgtype;
}
public boolean isDiff() {
return diff;
}
public Serializable getKey() {
try {
return key(null);
} catch ( Exception x ) {
log.error("Deserialization error of the MapMessage.key",x);
return null;
}
}
public Serializable key(ClassLoader[] cls) throws IOException, ClassNotFoundException {
if ( key!=null ) return key;
if ( keydata == null || keydata.length == 0 ) return null;
key = XByteBuffer.deserialize(keydata,0,keydata.length,cls);
keydata = null;
return key;
}
public byte[] getKeyData() {
return keydata;
}
public Serializable getValue() {
try {
return value(null);
} catch ( Exception x ) {
log.error("Deserialization error of the MapMessage.value",x);
return null;
}
}
public Serializable value(ClassLoader[] cls) throws IOException, ClassNotFoundException {
if ( value!=null ) return value;
if ( valuedata == null || valuedata.length == 0 ) return null;
value = XByteBuffer.deserialize(valuedata,0,valuedata.length,cls);
valuedata = null;;
return value;
}
public byte[] getValueData() {
return valuedata;
}
public byte[] getDiffValue() {
return diffvalue;
}
public Member[] getBackupNodes() {
return nodes;
}
private void setBackUpNodes(Member[] nodes) {
this.nodes = nodes;
}
public byte[] getMapId() {
return mapId;
}
public void setValue(Serializable value) {
try {
if ( value != null ) valuedata = XByteBuffer.serialize(value);
this.value = value;
}catch ( IOException x ) {
throw new RuntimeException(x);
}
}
public void setKey(Serializable key) {
try {
if (key != null) keydata = XByteBuffer.serialize(key);
this.key = key;
} catch (IOException x) {
throw new RuntimeException(x);
}
}
protected Member[] readMembers(ObjectInput in) throws IOException, ClassNotFoundException {
int nodecount = in.readInt();
Member[] members = new Member[nodecount];
for ( int i=0; i<members.length; i++ ) {
byte[] d = new byte[in.readInt()];
in.read(d);
if (d.length > 0) members[i] = MemberImpl.getMember(d);
}
return members;
}
protected void writeMembers(ObjectOutput out,Member[] members) throws IOException {
if ( members == null ) members = new Member[0];
out.writeInt(members.length);
for (int i=0; i<members.length; i++ ) {
if ( members[i] != null ) {
byte[] d = members[i] != null ? ( (MemberImpl)members[i]).getData(false) : new byte[0];
out.writeInt(d.length);
out.write(d);
}
}
}
/**
* shallow clone
* @return Object
*/
public Object clone() {
MapMessage msg = new MapMessage(this.mapId, this.msgtype, this.diff, this.key, this.value, this.diffvalue, this.nodes);
msg.keydata = this.keydata;
msg.valuedata = this.valuedata;
return msg;
}
} //MapMessage
public Channel getChannel() {
return channel;
}
public byte[] getMapContextName() {
return mapContextName;
}
public RpcChannel getRpcChannel() {
return rpcChannel;
}
public long getRpcTimeout() {
return rpcTimeout;
}
public Object getStateMutex() {
return stateMutex;
}
public boolean isStateTransferred() {
return stateTransferred;
}
public Object getMapOwner() {
return mapOwner;
}
public ClassLoader[] getExternalLoaders() {
return externalLoaders;
}
public int getChannelSendOptions() {
return channelSendOptions;
}
public long getAccessTimeout() {
return accessTimeout;
}
public void setMapOwner(Object mapOwner) {
this.mapOwner = mapOwner;
}
public void setExternalLoaders(ClassLoader[] externalLoaders) {
this.externalLoaders = externalLoaders;
}
public void setChannelSendOptions(int channelSendOptions) {
this.channelSendOptions = channelSendOptions;
}
public void setAccessTimeout(long accessTimeout) {
this.accessTimeout = accessTimeout;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.orc.mapred;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.ShortWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobContext;
import org.apache.hadoop.mapred.OutputCommitter;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TaskAttemptContext;
import org.apache.hadoop.util.Progressable;
import org.apache.orc.CompressionKind;
import org.apache.orc.OrcConf;
import org.apache.orc.OrcFile;
import org.apache.orc.Reader;
import org.apache.orc.TypeDescription;
import org.apache.orc.Writer;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestOrcOutputFormat {
Path workDir = new Path(System.getProperty("test.tmp.dir",
"target" + File.separator + "test" + File.separator + "tmp"));
JobConf conf = new JobConf();
FileSystem fs;
{
try {
fs = FileSystem.getLocal(conf).getRaw();
fs.delete(workDir, true);
fs.mkdirs(workDir);
} catch (IOException e) {
throw new IllegalStateException("bad fs init", e);
}
}
static class NullOutputCommitter extends OutputCommitter {
@Override
public void setupJob(JobContext jobContext) {
// PASS
}
@Override
public void setupTask(TaskAttemptContext taskAttemptContext) {
}
@Override
public boolean needsTaskCommit(TaskAttemptContext taskAttemptContext) {
return false;
}
@Override
public void commitTask(TaskAttemptContext taskAttemptContext) {
// PASS
}
@Override
public void abortTask(TaskAttemptContext taskAttemptContext) {
// PASS
}
}
@Test
public void testAllTypes() throws Exception {
conf.set("mapreduce.task.attempt.id", "attempt_20160101_0001_m_000001_0");
conf.setOutputCommitter(NullOutputCommitter.class);
final String typeStr = "struct<b1:binary,b2:boolean,b3:tinyint," +
"c:char(10),d1:date,d2:decimal(20,5),d3:double,fff:float,int:int," +
"l:array<bigint>,map:map<smallint,string>," +
"str:struct<u:uniontype<timestamp,varchar(100)>>,ts:timestamp>";
OrcConf.MAPRED_OUTPUT_SCHEMA.setString(conf, typeStr);
FileOutputFormat.setOutputPath(conf, workDir);
TypeDescription type = TypeDescription.fromString(typeStr);
// build a row object
OrcStruct row = (OrcStruct) OrcStruct.createValue(type);
((BytesWritable) row.getFieldValue(0)).set(new byte[]{1,2,3,4}, 0, 4);
((BooleanWritable) row.getFieldValue(1)).set(true);
((ByteWritable) row.getFieldValue(2)).set((byte) 23);
((Text) row.getFieldValue(3)).set("aaabbbcccddd");
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
((DateWritable) row.getFieldValue(4)).set(DateWritable.millisToDays
(format.parse("2016-04-01").getTime()));
((HiveDecimalWritable) row.getFieldValue(5)).set(new HiveDecimalWritable("1.23"));
((DoubleWritable) row.getFieldValue(6)).set(1.5);
((FloatWritable) row.getFieldValue(7)).set(4.5f);
((IntWritable) row.getFieldValue(8)).set(31415);
OrcList<LongWritable> longList = (OrcList<LongWritable>) row.getFieldValue(9);
longList.add(new LongWritable(123));
longList.add(new LongWritable(456));
OrcMap<ShortWritable,Text> map = (OrcMap<ShortWritable,Text>) row.getFieldValue(10);
map.put(new ShortWritable((short) 1000), new Text("aaaa"));
map.put(new ShortWritable((short) 123), new Text("bbbb"));
OrcStruct struct = (OrcStruct) row.getFieldValue(11);
OrcUnion union = (OrcUnion) struct.getFieldValue(0);
union.set((byte) 1, new Text("abcde"));
((OrcTimestamp) row.getFieldValue(12)).set("1996-12-11 15:00:00");
NullWritable nada = NullWritable.get();
RecordWriter<NullWritable, OrcStruct> writer =
new OrcOutputFormat<OrcStruct>().getRecordWriter(fs, conf, "all.orc",
Reporter.NULL);
for(int r=0; r < 10; ++r) {
row.setFieldValue(8, new IntWritable(r * 10));
writer.write(nada, row);
}
union.set((byte) 0, new OrcTimestamp("2011-12-25 12:34:56"));
for(int r=0; r < 10; ++r) {
row.setFieldValue(8, new IntWritable(r * 10 + 100));
writer.write(nada, row);
}
OrcStruct row2 = new OrcStruct(type);
writer.write(nada, row2);
row.setFieldValue(8, new IntWritable(210));
writer.write(nada, row);
writer.close(Reporter.NULL);
FileSplit split = new FileSplit(new Path(workDir, "all.orc"), 0, 100000,
new String[0]);
RecordReader<NullWritable, OrcStruct> reader =
new OrcInputFormat<OrcStruct>().getRecordReader(split, conf,
Reporter.NULL);
nada = reader.createKey();
row = reader.createValue();
for(int r=0; r < 22; ++r) {
assertTrue(reader.next(nada, row));
if (r == 20) {
for(int c=0; c < 12; ++c) {
assertNull(row.getFieldValue(c));
}
} else {
assertEquals(new BytesWritable(new byte[]{1, 2, 3, 4}), row.getFieldValue(0));
assertEquals(new BooleanWritable(true), row.getFieldValue(1));
assertEquals(new ByteWritable((byte) 23), row.getFieldValue(2));
assertEquals(new Text("aaabbbcccd"), row.getFieldValue(3));
assertEquals(new DateWritable(DateWritable.millisToDays
(format.parse("2016-04-01").getTime())), row.getFieldValue(4));
assertEquals(new HiveDecimalWritable("1.23"), row.getFieldValue(5));
assertEquals(new DoubleWritable(1.5), row.getFieldValue(6));
assertEquals(new FloatWritable(4.5f), row.getFieldValue(7));
assertEquals(new IntWritable(r * 10), row.getFieldValue(8));
assertEquals(longList, row.getFieldValue(9));
assertEquals(map, row.getFieldValue(10));
if (r < 10) {
union.set((byte) 1, new Text("abcde"));
} else {
union.set((byte) 0, new OrcTimestamp("2011-12-25 12:34:56"));
}
assertEquals(struct, row.getFieldValue(11), "row " + r);
assertEquals(new OrcTimestamp("1996-12-11 15:00:00"),
row.getFieldValue(12), "row " + r);
}
}
assertFalse(reader.next(nada, row));
}
/**
* Test the case where the top level isn't a struct, but a long.
*/
@Test
public void testLongRoot() throws Exception {
conf.set("mapreduce.task.attempt.id", "attempt_20160101_0001_m_000001_0");
conf.setOutputCommitter(NullOutputCommitter.class);
conf.set(OrcConf.COMPRESS.getAttribute(), "SNAPPY");
conf.setInt(OrcConf.ROW_INDEX_STRIDE.getAttribute(), 1000);
conf.setInt(OrcConf.BUFFER_SIZE.getAttribute(), 64 * 1024);
conf.set(OrcConf.WRITE_FORMAT.getAttribute(), "0.11");
final String typeStr = "bigint";
OrcConf.MAPRED_OUTPUT_SCHEMA.setString(conf, typeStr);
FileOutputFormat.setOutputPath(conf, workDir);
LongWritable value = new LongWritable();
NullWritable nada = NullWritable.get();
RecordWriter<NullWritable, LongWritable> writer =
new OrcOutputFormat<LongWritable>().getRecordWriter(fs, conf,
"long.orc", Reporter.NULL);
for(long lo=0; lo < 2000; ++lo) {
value.set(lo);
writer.write(nada, value);
}
writer.close(Reporter.NULL);
Path path = new Path(workDir, "long.orc");
Reader file = OrcFile.createReader(path, OrcFile.readerOptions(conf));
assertEquals(CompressionKind.SNAPPY, file.getCompressionKind());
assertEquals(2000, file.getNumberOfRows());
assertEquals(1000, file.getRowIndexStride());
assertEquals(64 * 1024, file.getCompressionSize());
assertEquals(OrcFile.Version.V_0_11, file.getFileVersion());
FileSplit split = new FileSplit(path, 0, 100000,
new String[0]);
RecordReader<NullWritable, LongWritable> reader =
new OrcInputFormat<LongWritable>().getRecordReader(split, conf,
Reporter.NULL);
nada = reader.createKey();
value = reader.createValue();
for(long lo=0; lo < 2000; ++lo) {
assertTrue(reader.next(nada, value));
assertEquals(lo, value.get());
}
assertFalse(reader.next(nada, value));
}
/**
* Make sure that the writer ignores the OrcKey
* @throws Exception
*/
@Test
public void testOrcKey() throws Exception {
conf.set("mapreduce.output.fileoutputformat.outputdir", workDir.toString());
conf.set("mapreduce.task.attempt.id", "attempt_jt0_0_m_0_0");
String TYPE_STRING = "struct<i:int,s:string>";
OrcConf.MAPRED_OUTPUT_SCHEMA.setString(conf, TYPE_STRING);
conf.setOutputCommitter(NullOutputCommitter.class);
TypeDescription schema = TypeDescription.fromString(TYPE_STRING);
OrcKey key = new OrcKey(new OrcStruct(schema));
RecordWriter<NullWritable, Writable> writer =
new OrcOutputFormat<>().getRecordWriter(fs, conf, "key.orc",
Reporter.NULL);
NullWritable nada = NullWritable.get();
for(int r=0; r < 2000; ++r) {
((OrcStruct) key.key).setAllFields(new IntWritable(r),
new Text(Integer.toString(r)));
writer.write(nada, key);
}
writer.close(Reporter.NULL);
Path path = new Path(workDir, "key.orc");
Reader file = OrcFile.createReader(path, OrcFile.readerOptions(conf));
assertEquals(2000, file.getNumberOfRows());
assertEquals(TYPE_STRING, file.getSchema().toString());
}
/**
* Make sure that the writer ignores the OrcValue
* @throws Exception
*/
@Test
public void testOrcValue() throws Exception {
conf.set("mapreduce.output.fileoutputformat.outputdir", workDir.toString());
conf.set("mapreduce.task.attempt.id", "attempt_jt0_0_m_0_0");
String TYPE_STRING = "struct<i:int>";
OrcConf.MAPRED_OUTPUT_SCHEMA.setString(conf, TYPE_STRING);
conf.setOutputCommitter(NullOutputCommitter.class);
TypeDescription schema = TypeDescription.fromString(TYPE_STRING);
OrcValue value = new OrcValue(new OrcStruct(schema));
RecordWriter<NullWritable, Writable> writer =
new OrcOutputFormat<>().getRecordWriter(fs, conf, "value.orc",
Reporter.NULL);
NullWritable nada = NullWritable.get();
for(int r=0; r < 3000; ++r) {
((OrcStruct) value.value).setAllFields(new IntWritable(r));
writer.write(nada, value);
}
writer.close(Reporter.NULL);
Path path = new Path(workDir, "value.orc");
Reader file = OrcFile.createReader(path, OrcFile.readerOptions(conf));
assertEquals(3000, file.getNumberOfRows());
assertEquals(TYPE_STRING, file.getSchema().toString());
}
/**
* Make sure that the ORC writer is initialized with a configured row batch size
* @throws Exception
*/
@Test
public void testOrcOutputFormatWithRowBatchSize() throws Exception {
conf.set("mapreduce.output.fileoutputformat.outputdir", workDir.toString());
conf.set("mapreduce.task.attempt.id", "attempt_jt0_0_m_0_0");
conf.setInt(OrcOutputFormatWithRowBatchSize.ROW_BATCH_SIZE, 128);
String TYPE_STRING = "struct<i:int,s:string>";
OrcConf.MAPRED_OUTPUT_SCHEMA.setString(conf, TYPE_STRING);
conf.setOutputCommitter(NullOutputCommitter.class);
TypeDescription schema = TypeDescription.fromString(TYPE_STRING);
OrcKey key = new OrcKey(new OrcStruct(schema));
RecordWriter<NullWritable, Writable> writer =
new OrcOutputFormatWithRowBatchSize<>().getRecordWriter(fs, conf, "key.orc",
Reporter.NULL);
NullWritable nada = NullWritable.get();
for(int r=0; r < 2000; ++r) {
((OrcStruct) key.key).setAllFields(new IntWritable(r),
new Text(Integer.toString(r)));
writer.write(nada, key);
}
writer.close(Reporter.NULL);
Path path = new Path(workDir, "key.orc");
Reader file = OrcFile.createReader(path, OrcFile.readerOptions(conf));
assertEquals(2000, file.getNumberOfRows());
assertEquals(TYPE_STRING, file.getSchema().toString());
}
private static class OrcOutputFormatWithRowBatchSize<V extends Writable> extends OrcOutputFormat {
public static final String ROW_BATCH_SIZE = "orc.row.batch.size";
@Override
public RecordWriter<NullWritable, V> getRecordWriter(FileSystem fileSystem,
JobConf conf,
String name,
Progressable progressable
) throws IOException {
Path path = getTaskOutputPath(conf, name);
Writer writer = OrcFile.createWriter(path,
buildOptions(conf).fileSystem(fileSystem));
//Ensure that orc.row.batch.size config is set in the JobConf
int rowBatchSize = Integer.parseInt(conf.get(ROW_BATCH_SIZE));
return new OrcMapredRecordWriter<>(writer, rowBatchSize);
}
}
}
|
|
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.group.ui.portlet;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang.StringEscapeUtils;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.Windows;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.panel.Panel;
import org.olat.core.gui.components.table.ColumnDescriptor;
import org.olat.core.gui.components.table.DefaultColumnDescriptor;
import org.olat.core.gui.components.table.Table;
import org.olat.core.gui.components.table.TableController;
import org.olat.core.gui.components.table.TableEvent;
import org.olat.core.gui.components.table.TableGuiConfiguration;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.dtabs.DTabs;
import org.olat.core.gui.control.generic.portal.AbstractPortletRunController;
import org.olat.core.gui.control.generic.portal.PortletDefaultTableDataModel;
import org.olat.core.gui.control.generic.portal.PortletEntry;
import org.olat.core.gui.control.generic.portal.PortletToolSortingControllerImpl;
import org.olat.core.gui.control.generic.portal.SortingCriteria;
import org.olat.core.gui.translator.Translator;
import org.olat.core.id.Identity;
import org.olat.core.util.coordinate.CoordinatorManager;
import org.olat.core.util.event.GenericEventListener;
import org.olat.core.util.filter.FilterFactory;
import org.olat.core.util.resource.OresHelper;
import org.olat.group.BusinessGroup;
import org.olat.group.BusinessGroupManager;
import org.olat.group.BusinessGroupManagerImpl;
import org.olat.group.site.GroupsSite;
import org.olat.group.ui.BGControllerFactory;
import org.olat.group.ui.edit.BusinessGroupModifiedEvent;
/**
* Description:<br>
* Run view controller for the groups list portlet
* <P>
* Initial Date: 11.07.2005 <br>
*
* @author gnaegi
*/
public class GroupsPortletRunController extends AbstractPortletRunController implements GenericEventListener {
private static final String CMD_LAUNCH = "cmd.launch";
private final Panel panel;
private final TableController tableCtr;
// private GroupListMiniModel groupListModel;
private GroupTableDataModel groupListModel;
private final VelocityContainer groupsVC;
private List groupList;
private final Identity ident;
private final Link showAllLink;
/**
* Constructor
*
* @param ureq
* @param component
*/
public GroupsPortletRunController(final WindowControl wControl, final UserRequest ureq, final Translator trans, final String portletName) {
super(wControl, ureq, trans, portletName);
sortingTermsList.add(SortingCriteria.TYPE_SORTING);
sortingTermsList.add(SortingCriteria.ALPHABETICAL_SORTING);
sortingTermsList.add(SortingCriteria.DATE_SORTING);
this.ident = ureq.getIdentity();
this.groupsVC = this.createVelocityContainer("groupsPortlet");
showAllLink = LinkFactory.createLink("groupsPortlet.showAll", groupsVC, this);
final TableGuiConfiguration tableConfig = new TableGuiConfiguration();
tableConfig.setTableEmptyMessage(trans.translate("groupsPortlet.nogroups"));
tableConfig.setDisplayTableHeader(false);
tableConfig.setCustomCssClass("b_portlet_table");
tableConfig.setDisplayRowCount(false);
tableConfig.setPageingEnabled(false);
tableConfig.setDownloadOffered(false);
// disable the default sorting for this table
tableConfig.setSortingEnabled(false);
tableCtr = new TableController(tableConfig, ureq, getWindowControl(), trans);
listenTo(tableCtr);
// dummy header key, won't be used since setDisplayTableHeader is set to
// false
tableCtr.addColumnDescriptor(new DefaultColumnDescriptor("groupsPortlet.bgname", 0, CMD_LAUNCH, trans.getLocale()));
tableCtr.addColumnDescriptor(new DefaultColumnDescriptor("groupsPortlet.type", 1, null, trans.getLocale(), ColumnDescriptor.ALIGNMENT_RIGHT));
this.sortingCriteria = getPersistentSortingConfiguration(ureq);
reloadModel(this.sortingCriteria);
this.groupsVC.put("table", tableCtr.getInitialComponent());
panel = this.putInitialPanel(groupsVC);
// register for businessgroup type events
CoordinatorManager.getInstance().getCoordinator().getEventBus().registerFor(this, ureq.getIdentity(), OresHelper.lookupType(BusinessGroup.class));
}
/**
* Gets all groups for this portlet and wraps them into PortletEntry impl.
*
* @param ureq
* @return the PortletEntry list.
*/
private List<PortletEntry> getAllPortletEntries() {
final BusinessGroupManager bgm = BusinessGroupManagerImpl.getInstance();
groupList = bgm.findBusinessGroupsAttendedBy(null, identity, null);
groupList.addAll(bgm.findBusinessGroupsOwnedBy(null, identity, null));
final List<PortletEntry> entries = convertBusinessGroupToPortletEntryList(groupList);
return entries;
}
private List<PortletEntry> convertBusinessGroupToPortletEntryList(final List<BusinessGroup> items) {
final List<PortletEntry> convertedList = new ArrayList<PortletEntry>();
final Iterator<BusinessGroup> listIterator = items.iterator();
while (listIterator.hasNext()) {
convertedList.add(new GroupPortletEntry(listIterator.next()));
}
return convertedList;
}
@Override
protected void reloadModel(final SortingCriteria sortingCriteria) {
if (sortingCriteria.getSortingType() == SortingCriteria.AUTO_SORTING) {
final BusinessGroupManager bgm = BusinessGroupManagerImpl.getInstance();
groupList = bgm.findBusinessGroupsAttendedBy(null, identity, null);
groupList.addAll(bgm.findBusinessGroupsOwnedBy(null, identity, null));
groupList = getSortedList(groupList, sortingCriteria);
final List<PortletEntry> entries = convertBusinessGroupToPortletEntryList(groupList);
groupListModel = new GroupTableDataModel(entries);
tableCtr.setTableDataModel(groupListModel);
} else {
reloadModel(this.getPersistentManuallySortedItems());
}
}
@Override
protected void reloadModel(final List<PortletEntry> sortedItems) {
groupListModel = new GroupTableDataModel(sortedItems);
tableCtr.setTableDataModel(groupListModel);
}
/**
* @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, org.olat.core.gui.components.Component, org.olat.core.gui.control.Event)
*/
@Override
public void event(final UserRequest ureq, final Component source, final Event event) {
if (source == showAllLink) {
// activate group tab in top navigation
final DTabs dts = (DTabs) Windows.getWindows(ureq).getWindow(ureq).getAttribute("DTabs");
// was brasato:: getWindowControl().getDTabs().activateStatic(ureq, GroupsSite.class.getName(), null);
dts.activateStatic(ureq, GroupsSite.class.getName(), null);
}
}
/**
* @see org.olat.core.gui.control.ControllerEventListener#dispatchEvent(org.olat.core.gui.UserRequest, org.olat.core.gui.control.Controller,
* org.olat.core.gui.control.Event)
*/
@Override
public void event(final UserRequest ureq, final Controller source, final Event event) {
super.event(ureq, source, event);
if (source == tableCtr) {
if (event.getCommand().equals(Table.COMMANDLINK_ROWACTION_CLICKED)) {
final TableEvent te = (TableEvent) event;
final String actionid = te.getActionId();
if (actionid.equals(CMD_LAUNCH)) {
final int rowid = te.getRowId();
final BusinessGroup currBusinessGroup = groupListModel.getBusinessGroupAt(rowid);
final boolean isInBusinessGroup = BusinessGroupManagerImpl.getInstance().isIdentityInBusinessGroup(ureq.getIdentity(), currBusinessGroup);
if (isInBusinessGroup) {
BGControllerFactory.getInstance().createRunControllerAsTopNavTab(currBusinessGroup, ureq, getWindowControl(), false, null);
} else {
showInfo("groupsPortlet.no_member");
}
}
}
}
}
/**
* @see org.olat.core.gui.control.DefaultController#doDispose(boolean)
*/
@Override
protected void doDispose() {
super.doDispose();
// de-register for businessgroup type events
CoordinatorManager.getInstance().getCoordinator().getEventBus().deregisterFor(this, OresHelper.lookupType(BusinessGroup.class));
// POST: all firing event for the source just deregistered are finished
// (listeners lock in EventAgency)
}
@Override
public void event(final Event event) {
if (event instanceof BusinessGroupModifiedEvent) {
final BusinessGroupModifiedEvent mev = (BusinessGroupModifiedEvent) event;
// TODO:fj:b this operation should not be too expensive since many other
// users have to be served also
// store the event and apply it only when the component validate event is
// fired.
// FIXME:fj:a check all such event that they do not say, execute more than
// 1-2 db queries : 100 listening users -> 100-200 db queries!
// TODO:fj:b concept of defering that event if this controller here is in
// the dispatchEvent - code (e.g. DefaultController implements
// GenericEventListener)
// -> to avoid rare race conditions like e.g. dispose->deregister and null
// controllers, but queue is still firing events
final boolean modified = mev.updateBusinessGroupList(groupList, ident);
if (modified) {
tableCtr.modelChanged();
}
}
}
/**
* Retrieves the persistent sortingCriteria and the persistent manually sorted, if any, creates the table model for the manual sorting, and instantiates the
* PortletToolSortingControllerImpl.
*
* @param ureq
* @param wControl
* @return a PortletToolSortingControllerImpl instance.
*/
protected PortletToolSortingControllerImpl createSortingTool(final UserRequest ureq, final WindowControl wControl) {
if (portletToolsController == null) {
final List<PortletEntry> portletEntryList = getAllPortletEntries();
final PortletDefaultTableDataModel tableDataModel = new GroupsManualSortingTableDataModel(portletEntryList);
final List sortedItems = getPersistentManuallySortedItems();
portletToolsController = new PortletToolSortingControllerImpl(ureq, wControl, getTranslator(), sortingCriteria, tableDataModel, sortedItems);
portletToolsController.setConfigManualSorting(true);
portletToolsController.setConfigAutoSorting(true);
portletToolsController.addControllerListener(this);
}
return portletToolsController;
}
/**
* Retrieves the persistent manually sorted items for the current portlet.
*
* @param ureq
* @return
*/
private List<PortletEntry> getPersistentManuallySortedItems() {
final List<PortletEntry> portletEntryList = getAllPortletEntries();
return this.getPersistentManuallySortedItems(portletEntryList);
}
/**
* Comparator implementation used for sorting BusinessGroup entries according with the input sortingCriteria.
* <p>
*
* @param sortingCriteria
* @return a Comparator for the input sortingCriteria
*/
@Override
protected Comparator getComparator(final SortingCriteria sortingCriteria) {
return new Comparator() {
@Override
public int compare(final Object o1, final Object o2) {
final BusinessGroup group1 = (BusinessGroup) o1;
final BusinessGroup group2 = (BusinessGroup) o2;
int comparisonResult = 0;
if (sortingCriteria.getSortingTerm() == SortingCriteria.ALPHABETICAL_SORTING) {
comparisonResult = collator.compare(group1.getName(), group2.getName());
} else if (sortingCriteria.getSortingTerm() == SortingCriteria.DATE_SORTING) {
comparisonResult = group1.getCreationDate().compareTo(group2.getCreationDate());
} else if (sortingCriteria.getSortingTerm() == SortingCriteria.TYPE_SORTING) {
comparisonResult = group1.getType().compareTo(group2.getType());
}
if (!sortingCriteria.isAscending()) {
// if not isAscending return (-comparisonResult)
return -comparisonResult;
}
return comparisonResult;
}
};
}
/**
* PortletDefaultTableDataModel implementation for the current portlet.
* <P>
* Initial Date: 10.12.2007 <br>
*
* @author Lavinia Dumitrescu
*/
private class GroupTableDataModel extends PortletDefaultTableDataModel {
public GroupTableDataModel(final List<PortletEntry> objects) {
super(objects, 2);
}
@Override
public Object getValueAt(final int row, final int col) {
final PortletEntry entry = getObject(row);
final BusinessGroup businessGroup = (BusinessGroup) entry.getValue();
switch (col) {
case 0:
String name = businessGroup.getName();
name = StringEscapeUtils.escapeHtml(name).toString();
return name;
case 1:
return getTranslator().translate(businessGroup.getType());
default:
return "ERROR";
}
}
public BusinessGroup getBusinessGroupAt(final int row) {
return (BusinessGroup) getObject(row).getValue();
}
}
/**
* PortletDefaultTableDataModel implementation for the manual sorting component.
* <P>
* Initial Date: 10.12.2007 <br>
*
* @author Lavinia Dumitrescu
*/
private class GroupsManualSortingTableDataModel extends PortletDefaultTableDataModel {
/**
* @param objects
* @param locale
*/
public GroupsManualSortingTableDataModel(final List<PortletEntry> objects) {
super(objects, 4);
}
/**
* @see org.olat.core.gui.components.table.TableDataModel#getValueAt(int, int)
*/
@Override
public final Object getValueAt(final int row, final int col) {
final PortletEntry portletEntry = getObject(row);
final BusinessGroup group = (BusinessGroup) portletEntry.getValue();
switch (col) {
case 0:
return group.getName();
case 1:
String description = group.getDescription();
description = FilterFactory.getHtmlTagsFilter().filter(description);
return (description == null ? "n/a" : description);
case 2:
final String resType = group.getType();
return (resType == null ? "n/a" : translate(resType));
case 3:
final Date date = group.getCreationDate();
// return DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, getTranslator().getLocale()).format(date);
return date;
default:
return "error";
}
}
}
private class GroupPortletEntry implements PortletEntry {
private final BusinessGroup value;
private final Long key;
public GroupPortletEntry(final BusinessGroup group) {
value = group;
key = group.getKey();
}
@Override
public Long getKey() {
return key;
}
@Override
public BusinessGroup getValue() {
return value;
}
}
}
|
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceResult;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeVpcClassicLinkDnsSupportResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable,
Cloneable {
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*/
private String nextToken;
/**
* <p>
* Information about the ClassicLink DNS support status of the VPCs.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<ClassicLinkDnsSupport> vpcs;
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*
* @param nextToken
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no
* more results to return.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*
* @return The token to use to retrieve the next page of results. This value is <code>null</code> when there are no
* more results to return.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more
* results to return.
* </p>
*
* @param nextToken
* The token to use to retrieve the next page of results. This value is <code>null</code> when there are no
* more results to return.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeVpcClassicLinkDnsSupportResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* Information about the ClassicLink DNS support status of the VPCs.
* </p>
*
* @return Information about the ClassicLink DNS support status of the VPCs.
*/
public java.util.List<ClassicLinkDnsSupport> getVpcs() {
if (vpcs == null) {
vpcs = new com.amazonaws.internal.SdkInternalList<ClassicLinkDnsSupport>();
}
return vpcs;
}
/**
* <p>
* Information about the ClassicLink DNS support status of the VPCs.
* </p>
*
* @param vpcs
* Information about the ClassicLink DNS support status of the VPCs.
*/
public void setVpcs(java.util.Collection<ClassicLinkDnsSupport> vpcs) {
if (vpcs == null) {
this.vpcs = null;
return;
}
this.vpcs = new com.amazonaws.internal.SdkInternalList<ClassicLinkDnsSupport>(vpcs);
}
/**
* <p>
* Information about the ClassicLink DNS support status of the VPCs.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setVpcs(java.util.Collection)} or {@link #withVpcs(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param vpcs
* Information about the ClassicLink DNS support status of the VPCs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeVpcClassicLinkDnsSupportResult withVpcs(ClassicLinkDnsSupport... vpcs) {
if (this.vpcs == null) {
setVpcs(new com.amazonaws.internal.SdkInternalList<ClassicLinkDnsSupport>(vpcs.length));
}
for (ClassicLinkDnsSupport ele : vpcs) {
this.vpcs.add(ele);
}
return this;
}
/**
* <p>
* Information about the ClassicLink DNS support status of the VPCs.
* </p>
*
* @param vpcs
* Information about the ClassicLink DNS support status of the VPCs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeVpcClassicLinkDnsSupportResult withVpcs(java.util.Collection<ClassicLinkDnsSupport> vpcs) {
setVpcs(vpcs);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getVpcs() != null)
sb.append("Vpcs: ").append(getVpcs());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeVpcClassicLinkDnsSupportResult == false)
return false;
DescribeVpcClassicLinkDnsSupportResult other = (DescribeVpcClassicLinkDnsSupportResult) obj;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getVpcs() == null ^ this.getVpcs() == null)
return false;
if (other.getVpcs() != null && other.getVpcs().equals(this.getVpcs()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getVpcs() == null) ? 0 : getVpcs().hashCode());
return hashCode;
}
@Override
public DescribeVpcClassicLinkDnsSupportResult clone() {
try {
return (DescribeVpcClassicLinkDnsSupportResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
// ========================================================================
// Copyright 2007 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========================================================================
package org.mortbay.jetty.plus.jaas.ldap;
import org.apache.directory.shared.ldap.util.Base64;
import org.apache.directory.shared.ldap.exception.LdapNameNotFoundException;
import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.apacheds.ApacheDs;
import org.mortbay.jetty.security.Credential;
import org.mortbay.jetty.plus.jaas.callback.DefaultCallbackHandler;
import org.mortbay.jetty.plus.jaas.JAASRole;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.BasicAttribute;
import javax.naming.directory.BasicAttributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.security.auth.Subject;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class LdapLoginModuleTest extends PlexusTestCase
{
private ApacheDs apacheDs;
private String suffix;
protected void setUp()
throws Exception
{
super.setUp();
apacheDs = (ApacheDs)lookup(ApacheDs.ROLE, "test" );
suffix = apacheDs.addSimplePartition( "test", new String[]{"jetty", "mortbay", "org"} ).getSuffix();
System.out.println( "DN Suffix: " + suffix );
apacheDs.startServer();
makeUsers();
}
protected void tearDown() throws Exception
{
InitialDirContext context = apacheDs.getAdminContext();
unbind(context, createDn( "ldap-admin" ) );
unbind(context, createDn( "jesse" ) );
apacheDs.stopServer();
super.tearDown();
}
public void testNothing() throws Exception
{
}
public void testBindingAuth() throws Exception
{
LdapLoginModule lm = new LdapLoginModule();
Map options = new HashMap();
options.put( "hostname", "localhost" );
options.put( "port", "10390" );
options.put( "contextFactory", "com.sun.jndi.ldap.LdapCtxFactory" );
options.put( "bindDn", "uid=admin,ou=system" );
options.put( "bindPassword", "secret" );
options.put( "userBaseDn", "dc=jetty,dc=mortbay,dc=org" );
options.put( "roleBaseDn", "dc=jetty,dc=mortbay,dc=org" );
options.put( "roleNameAttribute", "cn" );
options.put( "forceBindingLogin", "true" );
options.put("useLdaps", "false");
options.put( "debug", "true" );
Subject subject = new Subject();
DefaultCallbackHandler callbackHandler = new DefaultCallbackHandler();
callbackHandler.setUserName("jesse");
callbackHandler.setCredential("foo");
lm.initialize( subject, callbackHandler, null, options );
assertTrue( lm.bindingLogin( "jesse", "foo" ) );
assertTrue( lm.login() );
assertTrue( lm.commit() );
Set roles = subject.getPrincipals(JAASRole.class);
assertEquals(1, roles.size());
assertTrue(roles.contains(new JAASRole("ldap-admin")));
}
/*
public void testCredentialAuth() throws Exception
{
LdapLoginModule lm = new LdapLoginModule();
Map options = new HashMap();
options.put( "hostname", "localhost" );
options.put( "port", "10390" );
options.put( "contextFactory", "com.sun.jndi.ldap.LdapCtxFactory" );
options.put( "bindDn", "uid=admin,ou=system" );
options.put( "bindPassword", "secret" );
options.put( "userBaseDn", "dc=jetty,dc=mortbay,dc=org" );
options.put( "forceBindingLogin", "false" );
lm.initialize( null, null, null, options );
UserInfo info = lm.getUserInfo( "jesse" );
assertTrue( lm.credentialLogin( info, "foo" ) );
}
*/
// -----------------------------------------------------------------------
// Private
// -----------------------------------------------------------------------
private void makeUsers() throws Exception
{
InitialDirContext context = apacheDs.getAdminContext();
unbind(context, createDn("ldap-admin"));
unbind(context, createDn( "jesse" ) );
String jesse = bindUserObject( context, "jesse" );
bindGroupObject( context, "ldap-admin", jesse );
assertExist( context, "cn", "jesse" );
}
private String bindUserObject(DirContext context, String cn)
throws Exception
{
Attributes attributes = new BasicAttributes(true);
BasicAttribute objectClass = new BasicAttribute("objectClass");
objectClass.add("top");
objectClass.add("inetOrgPerson");
objectClass.add("person");
objectClass.add("organizationalperson");
attributes.put(objectClass);
attributes.put("cn", cn);
attributes.put("sn", "foo");
attributes.put("mail", "foo");
//System.out.println("setting password to : " + LdapLoginModule.convertCredentialJettyToLdap( Credential.MD5.digest( "foo" ) ));
String pwd = Credential.MD5.digest( "foo" );
pwd = pwd.substring("MD5:".length(), pwd.length() );
//System.out.println(Credential.MD5.digest( "foo" ));
//System.out.println(pwd);
//System.out.println(Base64.encode( pwd.getBytes("ISO-8859-1") ));
//System.out.println(Base64.encode( pwd.getBytes("UTF-8") ));
attributes.put("userPassword", "{MD5}" + doStuff(pwd) );
//attributes.put( "userPassword", "foo");
attributes.put("givenName", "foo");
String dn = createDn(cn);
context.createSubcontext(dn, attributes );
return dn;
}
private void bindGroupObject(DirContext context, String cn, String initialMember)
throws Exception
{
Attributes attributes = new BasicAttributes(true);
BasicAttribute objectClass = new BasicAttribute("objectClass");
objectClass.add("top");
objectClass.add("groupOfUniqueNames");
attributes.put(objectClass);
attributes.put("cn", cn);
attributes.put("uniqueMember", initialMember);
context.createSubcontext( createDn( cn ), attributes );
}
private String doStuff( String hpwd )
{
String HEX_VAL = "0123456789abcdef";
byte[] bpwd = new byte[hpwd.length()>>1];
byte b = 0;
boolean high = true;
int pos = 0;
//for ( char c:hpwd.toCharArray() )
for ( int i = 0 ; i < hpwd.toCharArray().length; ++i )
{
char c = hpwd.toCharArray()[i];
if ( high )
{
high = false;
b = (byte)HEX_VAL.indexOf( c );
}
else
{
high = true;
b <<= 4;
b += HEX_VAL.indexOf( c );
bpwd[pos++] = b;
}
}
return new String( Base64.encode( bpwd ) );
}
private String createDn( String cn )
{
return "cn=" + cn + "," + suffix;
}
private void assertExist( DirContext context, String attribute, String value ) throws NamingException
{
SearchControls ctls = new SearchControls();
ctls.setDerefLinkFlag( true );
ctls.setSearchScope( SearchControls.ONELEVEL_SCOPE );
ctls.setReturningAttributes( new String[] { "*" } );
BasicAttributes matchingAttributes = new BasicAttributes();
matchingAttributes.put( attribute, value );
BasicAttribute objectClass = new BasicAttribute("objectClass");
objectClass.add("inetOrgPerson");
matchingAttributes.put(objectClass);
NamingEnumeration results = context.search( suffix, matchingAttributes );
// NamingEnumeration<SearchResult> results = context.search( suffix, "(" + attribute + "=" + value + ")", ctls
// );
assertTrue( results.hasMoreElements() );
SearchResult result = (SearchResult)results.nextElement();
Attributes attrs = result.getAttributes();
Attribute testAttr = attrs.get( attribute );
assertEquals( value, testAttr.get() );
}
private void unbind(InitialDirContext context, String dn) throws NamingException {
try {
context.unbind(dn);
} catch (LdapNameNotFoundException e) {
// ignore
}
}
}
|
|
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.server.job.controllers;
import android.app.job.JobInfo;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.ConnectivityManager;
import android.net.INetworkPolicyListener;
import android.net.NetworkInfo;
import android.net.NetworkPolicyManager;
import android.os.UserHandle;
import android.util.Slog;
import com.android.internal.annotations.GuardedBy;
import com.android.server.job.JobSchedulerService;
import com.android.server.job.StateChangedListener;
import java.io.PrintWriter;
import java.util.ArrayList;
/**
* Handles changes in connectivity.
* <p>
* Each app can have a different default networks or different connectivity
* status due to user-requested network policies, so we need to check
* constraints on a per-UID basis.
*/
public class ConnectivityController extends StateController implements
ConnectivityManager.OnNetworkActiveListener {
private static final String TAG = "JobScheduler.Conn";
private final ConnectivityManager mConnManager;
private final NetworkPolicyManager mNetPolicyManager;
@GuardedBy("mLock")
private final ArrayList<JobStatus> mTrackedJobs = new ArrayList<JobStatus>();
/** Singleton. */
private static ConnectivityController mSingleton;
private static Object sCreationLock = new Object();
public static ConnectivityController get(JobSchedulerService jms) {
synchronized (sCreationLock) {
if (mSingleton == null) {
mSingleton = new ConnectivityController(jms, jms.getContext(), jms.getLock());
}
return mSingleton;
}
}
private ConnectivityController(StateChangedListener stateChangedListener, Context context,
Object lock) {
super(stateChangedListener, context, lock);
mConnManager = mContext.getSystemService(ConnectivityManager.class);
mNetPolicyManager = mContext.getSystemService(NetworkPolicyManager.class);
final IntentFilter intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
mContext.registerReceiverAsUser(
mConnectivityReceiver, UserHandle.SYSTEM, intentFilter, null, null);
mNetPolicyManager.registerListener(mNetPolicyListener);
}
@Override
public void maybeStartTrackingJobLocked(JobStatus jobStatus, JobStatus lastJob) {
if (jobStatus.hasConnectivityConstraint() || jobStatus.hasUnmeteredConstraint()
|| jobStatus.hasNotRoamingConstraint()) {
updateConstraintsSatisfied(jobStatus);
mTrackedJobs.add(jobStatus);
}
}
@Override
public void maybeStopTrackingJobLocked(JobStatus jobStatus, JobStatus incomingJob,
boolean forUpdate) {
if (jobStatus.hasConnectivityConstraint() || jobStatus.hasUnmeteredConstraint()
|| jobStatus.hasNotRoamingConstraint()) {
mTrackedJobs.remove(jobStatus);
}
}
private boolean updateConstraintsSatisfied(JobStatus jobStatus) {
final boolean ignoreBlocked = (jobStatus.getFlags() & JobInfo.FLAG_WILL_BE_FOREGROUND) != 0;
final NetworkInfo info = mConnManager.getActiveNetworkInfoForUid(jobStatus.getSourceUid(),
ignoreBlocked);
final boolean connected = (info != null) && info.isConnected();
final boolean unmetered = connected && !info.isMetered();
final boolean notRoaming = connected && !info.isRoaming();
boolean changed = false;
changed |= jobStatus.setConnectivityConstraintSatisfied(connected);
changed |= jobStatus.setUnmeteredConstraintSatisfied(unmetered);
changed |= jobStatus.setNotRoamingConstraintSatisfied(notRoaming);
return changed;
}
/**
* Update all jobs tracked by this controller.
*
* @param uid only update jobs belonging to this UID, or {@code -1} to
* update all tracked jobs.
*/
private void updateTrackedJobs(int uid) {
synchronized (mLock) {
boolean changed = false;
for (int i = 0; i < mTrackedJobs.size(); i++) {
final JobStatus js = mTrackedJobs.get(i);
if (uid == -1 || uid == js.getSourceUid()) {
changed |= updateConstraintsSatisfied(js);
}
}
if (changed) {
mStateChangedListener.onControllerStateChanged();
}
}
}
/**
* We know the network has just come up. We want to run any jobs that are ready.
*/
@Override
public synchronized void onNetworkActive() {
synchronized (mLock) {
for (int i = 0; i < mTrackedJobs.size(); i++) {
final JobStatus js = mTrackedJobs.get(i);
if (js.isReady()) {
if (DEBUG) {
Slog.d(TAG, "Running " + js + " due to network activity.");
}
mStateChangedListener.onRunJobNow(js);
}
}
}
}
private BroadcastReceiver mConnectivityReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
updateTrackedJobs(-1);
}
};
private INetworkPolicyListener mNetPolicyListener = new INetworkPolicyListener.Stub() {
@Override
public void onUidRulesChanged(int uid, int uidRules) {
updateTrackedJobs(uid);
}
@Override
public void onMeteredIfacesChanged(String[] meteredIfaces) {
updateTrackedJobs(-1);
}
@Override
public void onRestrictBackgroundChanged(boolean restrictBackground) {
updateTrackedJobs(-1);
}
@Override
public void onRestrictBackgroundWhitelistChanged(int uid, boolean whitelisted) {
updateTrackedJobs(uid);
}
@Override
public void onRestrictBackgroundBlacklistChanged(int uid, boolean blacklisted) {
updateTrackedJobs(uid);
}
};
@Override
public void dumpControllerStateLocked(PrintWriter pw, int filterUid) {
pw.println("Connectivity.");
pw.print("Tracking ");
pw.print(mTrackedJobs.size());
pw.println(":");
for (int i = 0; i < mTrackedJobs.size(); i++) {
final JobStatus js = mTrackedJobs.get(i);
if (js.shouldDump(filterUid)) {
pw.print(" #");
js.printUniqueId(pw);
pw.print(" from ");
UserHandle.formatUid(pw, js.getSourceUid());
pw.print(": C="); pw.print(js.hasConnectivityConstraint());
pw.print(": UM="); pw.print(js.hasUnmeteredConstraint());
pw.print(": NR="); pw.println(js.hasNotRoamingConstraint());
}
}
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/CheckSchemaVersionValidity" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CheckSchemaVersionValidityRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The data format of the schema definition. Currently <code>AVRO</code>, <code>JSON</code> and
* <code>PROTOBUF</code> are supported.
* </p>
*/
private String dataFormat;
/**
* <p>
* The definition of the schema that has to be validated.
* </p>
*/
private String schemaDefinition;
/**
* <p>
* The data format of the schema definition. Currently <code>AVRO</code>, <code>JSON</code> and
* <code>PROTOBUF</code> are supported.
* </p>
*
* @param dataFormat
* The data format of the schema definition. Currently <code>AVRO</code>, <code>JSON</code> and
* <code>PROTOBUF</code> are supported.
* @see DataFormat
*/
public void setDataFormat(String dataFormat) {
this.dataFormat = dataFormat;
}
/**
* <p>
* The data format of the schema definition. Currently <code>AVRO</code>, <code>JSON</code> and
* <code>PROTOBUF</code> are supported.
* </p>
*
* @return The data format of the schema definition. Currently <code>AVRO</code>, <code>JSON</code> and
* <code>PROTOBUF</code> are supported.
* @see DataFormat
*/
public String getDataFormat() {
return this.dataFormat;
}
/**
* <p>
* The data format of the schema definition. Currently <code>AVRO</code>, <code>JSON</code> and
* <code>PROTOBUF</code> are supported.
* </p>
*
* @param dataFormat
* The data format of the schema definition. Currently <code>AVRO</code>, <code>JSON</code> and
* <code>PROTOBUF</code> are supported.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DataFormat
*/
public CheckSchemaVersionValidityRequest withDataFormat(String dataFormat) {
setDataFormat(dataFormat);
return this;
}
/**
* <p>
* The data format of the schema definition. Currently <code>AVRO</code>, <code>JSON</code> and
* <code>PROTOBUF</code> are supported.
* </p>
*
* @param dataFormat
* The data format of the schema definition. Currently <code>AVRO</code>, <code>JSON</code> and
* <code>PROTOBUF</code> are supported.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DataFormat
*/
public CheckSchemaVersionValidityRequest withDataFormat(DataFormat dataFormat) {
this.dataFormat = dataFormat.toString();
return this;
}
/**
* <p>
* The definition of the schema that has to be validated.
* </p>
*
* @param schemaDefinition
* The definition of the schema that has to be validated.
*/
public void setSchemaDefinition(String schemaDefinition) {
this.schemaDefinition = schemaDefinition;
}
/**
* <p>
* The definition of the schema that has to be validated.
* </p>
*
* @return The definition of the schema that has to be validated.
*/
public String getSchemaDefinition() {
return this.schemaDefinition;
}
/**
* <p>
* The definition of the schema that has to be validated.
* </p>
*
* @param schemaDefinition
* The definition of the schema that has to be validated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CheckSchemaVersionValidityRequest withSchemaDefinition(String schemaDefinition) {
setSchemaDefinition(schemaDefinition);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDataFormat() != null)
sb.append("DataFormat: ").append(getDataFormat()).append(",");
if (getSchemaDefinition() != null)
sb.append("SchemaDefinition: ").append(getSchemaDefinition());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CheckSchemaVersionValidityRequest == false)
return false;
CheckSchemaVersionValidityRequest other = (CheckSchemaVersionValidityRequest) obj;
if (other.getDataFormat() == null ^ this.getDataFormat() == null)
return false;
if (other.getDataFormat() != null && other.getDataFormat().equals(this.getDataFormat()) == false)
return false;
if (other.getSchemaDefinition() == null ^ this.getSchemaDefinition() == null)
return false;
if (other.getSchemaDefinition() != null && other.getSchemaDefinition().equals(this.getSchemaDefinition()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDataFormat() == null) ? 0 : getDataFormat().hashCode());
hashCode = prime * hashCode + ((getSchemaDefinition() == null) ? 0 : getSchemaDefinition().hashCode());
return hashCode;
}
@Override
public CheckSchemaVersionValidityRequest clone() {
return (CheckSchemaVersionValidityRequest) super.clone();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.execute.FunctionService;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.management.CacheServerMXBean;
import org.apache.geode.management.ClientHealthStatus;
import org.apache.geode.management.ManagementService;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.cli.Result;
import org.apache.geode.management.internal.cli.CliUtil;
import org.apache.geode.management.internal.cli.LogWrapper;
import org.apache.geode.management.internal.cli.functions.ContunuousQueryFunction;
import org.apache.geode.management.internal.cli.functions.ContunuousQueryFunction.ClientInfo;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.result.CompositeResultData;
import org.apache.geode.management.internal.cli.result.CompositeResultData.SectionResultData;
import org.apache.geode.management.internal.cli.result.ResultBuilder;
import org.apache.geode.management.internal.cli.result.TabularResultData;
import org.apache.geode.management.internal.cli.shell.Gfsh;
import org.apache.geode.management.internal.security.ResourceOperation;
import org.apache.geode.security.ResourcePermission.Operation;
import org.apache.geode.security.ResourcePermission.Resource;
import org.springframework.shell.core.CommandMarker;
import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.management.ObjectName;
/**
* @since GemFire 8.0
*/
public class ClientCommands implements CommandMarker {
private Gfsh getGfsh() {
return Gfsh.getCurrentInstance();
}
@CliCommand(value = CliStrings.LIST_CLIENTS, help = CliStrings.LIST_CLIENT__HELP)
@CliMetaData(relatedTopic = {CliStrings.TOPIC_CLIENT})
@ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
public Result listClient() {
Result result = null;
try {
CompositeResultData compositeResultData = ResultBuilder.createCompositeResultData();
SectionResultData section = compositeResultData.addSection("section1");
TabularResultData resultTable = section.addTable("TableForClientList");
String headerText = "ClientList";
resultTable = resultTable.setHeader(headerText);
InternalCache cache = getCache();
ManagementService service = ManagementService.getExistingManagementService(cache);
ObjectName[] cacheServers = service.getDistributedSystemMXBean().listCacheServerObjectNames();
if (cacheServers.length == 0) {
return ResultBuilder.createGemFireErrorResult(
CliStrings.format(CliStrings.LIST_CLIENT_COULD_NOT_RETRIEVE_SERVER_LIST));
}
Map<String, List<String>> clientServerMap = new HashMap<String, List<String>>();
for (ObjectName objName : cacheServers) {
CacheServerMXBean serverMbean = service.getMBeanInstance(objName, CacheServerMXBean.class);
String[] listOfClient = serverMbean.getClientIds();
if (listOfClient == null || listOfClient.length == 0) {
continue;
}
for (String clietName : listOfClient) {
String serverDetails = "member=" + objName.getKeyProperty("member") + ",port="
+ objName.getKeyProperty("port");
if (clientServerMap.containsKey(clietName)) {
List<String> listServers = clientServerMap.get(clietName);
listServers.add(serverDetails);
} else {
List<String> listServer = new ArrayList<String>();
listServer.add(serverDetails);
clientServerMap.put(clietName, listServer);
}
}
}
if (clientServerMap.size() == 0) {
return ResultBuilder.createGemFireErrorResult(
CliStrings.format(CliStrings.LIST_COULD_NOT_RETRIEVE_CLIENT_LIST));
}
String memberSeparator = "; ";
Iterator<Entry<String, List<String>>> it = clientServerMap.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, List<String>> pairs = (Map.Entry<String, List<String>>) it.next();
String client = (String) pairs.getKey();
List<String> servers = (List<String>) pairs.getValue();
StringBuilder serverListForClient = new StringBuilder();
int serversSize = servers.size();
int i = 0;
for (String server : servers) {
serverListForClient.append(server);
if (i < serversSize - 1) {
serverListForClient.append(memberSeparator);
}
i++;
}
resultTable.accumulate(CliStrings.LIST_CLIENT_COLUMN_Clients, client);
resultTable.accumulate(CliStrings.LIST_CLIENT_COLUMN_SERVERS,
serverListForClient.toString());
}
result = ResultBuilder.buildResult(compositeResultData);
} catch (Exception e) {
LogWrapper.getInstance()
.warning("Error in list clients. stack trace" + CliUtil.stackTraceAsString(e));
result = ResultBuilder.createGemFireErrorResult(CliStrings
.format(CliStrings.LIST_CLIENT_COULD_NOT_RETRIEVE_CLIENT_LIST_0, e.getMessage()));
}
LogWrapper.getInstance().info("list client result " + result);
return result;
}
private InternalCache getCache() {
return (InternalCache) CacheFactory.getAnyInstance();
}
@CliCommand(value = CliStrings.DESCRIBE_CLIENT, help = CliStrings.DESCRIBE_CLIENT__HELP)
@CliMetaData(relatedTopic = {CliStrings.TOPIC_CLIENT})
@ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
public Result describeClient(@CliOption(key = CliStrings.DESCRIBE_CLIENT__ID, mandatory = true,
help = CliStrings.DESCRIBE_CLIENT__ID__HELP) String clientId) {
Result result = null;
if (clientId.startsWith("\"")) {
clientId = clientId.substring(1);
}
if (clientId.endsWith("\"")) {
clientId = clientId.substring(0, clientId.length() - 2);
}
if (clientId.endsWith("\";")) {
clientId = clientId.substring(0, clientId.length() - 2);
}
try {
CompositeResultData compositeResultData = ResultBuilder.createCompositeResultData();
SectionResultData sectionResult = compositeResultData.addSection("InfoSection");
InternalCache cache = getCache();
ManagementService service = ManagementService.getExistingManagementService(cache);
ObjectName[] cacheServers = service.getDistributedSystemMXBean().listCacheServerObjectNames();
if (cacheServers.length == 0) {
return ResultBuilder.createGemFireErrorResult(
CliStrings.format(CliStrings.DESCRIBE_CLIENT_COULD_NOT_RETRIEVE_SERVER_LIST));
}
ClientHealthStatus clientHealthStatus = null;
for (ObjectName objName : cacheServers) {
CacheServerMXBean serverMbean = service.getMBeanInstance(objName, CacheServerMXBean.class);
List<String> listOfClient =
new ArrayList<String>(Arrays.asList((String[]) serverMbean.getClientIds()));
if (listOfClient.contains(clientId)) {
if (clientHealthStatus == null) {
try {
clientHealthStatus = serverMbean.showClientStats(clientId);
if (clientHealthStatus == null) {
return ResultBuilder.createGemFireErrorResult(CliStrings.format(
CliStrings.DESCRIBE_CLIENT_COULD_NOT_RETRIEVE_STATS_FOR_CLIENT_0, clientId));
}
} catch (Exception eee) {
return ResultBuilder.createGemFireErrorResult(CliStrings.format(
CliStrings.DESCRIBE_CLIENT_COULD_NOT_RETRIEVE_STATS_FOR_CLIENT_0_REASON_1,
clientId, eee.getMessage()));
}
}
}
}
if (clientHealthStatus == null) {
return ResultBuilder.createGemFireErrorResult(
CliStrings.format(CliStrings.DESCRIBE_CLIENT__CLIENT__ID__NOT__FOUND__0, clientId));
}
Set<DistributedMember> dsMembers = CliUtil.getAllMembers(cache);
String isDurable = null;
List<String> primaryServers = new ArrayList<String>();
List<String> secondaryServers = new ArrayList<String>();
if (dsMembers.size() > 0) {
ContunuousQueryFunction contunuousQueryFunction = new ContunuousQueryFunction();
FunctionService.registerFunction(contunuousQueryFunction);
List<?> resultList = (List<?>) CliUtil
.executeFunction(contunuousQueryFunction, clientId, dsMembers).getResult();
for (int i = 0; i < resultList.size(); i++) {
try {
Object object = resultList.get(i);
if (object instanceof Throwable) {
LogWrapper.getInstance().warning(
"Exception in Describe Client " + ((Throwable) object).getMessage(),
((Throwable) object));
continue;
}
if (object != null) {
ClientInfo objectResult = (ClientInfo) object;
isDurable = objectResult.isDurable;
if (objectResult.primaryServer != null && objectResult.primaryServer.length() > 0) {
if (primaryServers.size() == 0) {
primaryServers.add(objectResult.primaryServer);
} else {
primaryServers.add(" ,");
primaryServers.add(objectResult.primaryServer);
}
}
if (objectResult.secondaryServer != null
&& objectResult.secondaryServer.length() > 0) {
if (secondaryServers.size() == 0) {
secondaryServers.add(objectResult.secondaryServer);
} else {
secondaryServers.add(" ,");
secondaryServers.add(objectResult.secondaryServer);
}
}
}
} catch (Exception e) {
LogWrapper.getInstance().info(CliStrings.DESCRIBE_CLIENT_ERROR_FETCHING_STATS_0 + " :: "
+ CliUtil.stackTraceAsString(e));
return ResultBuilder.createGemFireErrorResult(CliStrings
.format(CliStrings.DESCRIBE_CLIENT_ERROR_FETCHING_STATS_0, e.getMessage()));
}
}
buildTableResult(sectionResult, clientHealthStatus, isDurable, primaryServers,
secondaryServers);
result = ResultBuilder.buildResult(compositeResultData);
} else {
return ResultBuilder.createGemFireErrorResult(CliStrings.DESCRIBE_CLIENT_NO_MEMBERS);
}
} catch (Exception e) {
LogWrapper.getInstance()
.info("Error in decribe clients. stack trace" + CliUtil.stackTraceAsString(e));
result = ResultBuilder.createGemFireErrorResult(CliStrings
.format(CliStrings.DESCRIBE_CLIENT_COULD_NOT_RETRIEVE_CLIENT_0, e.getMessage()));
}
LogWrapper.getInstance().info("decribe client result " + result);
return result;
}
private void buildTableResult(SectionResultData sectionResult,
ClientHealthStatus clientHealthStatus, String isDurable, List<String> primaryServers,
List<String> secondaryServers) {
StringBuilder primServers = new StringBuilder();
for (String primaryServer : primaryServers) {
primServers.append(primaryServer);
}
StringBuilder secondServers = new StringBuilder();
for (String secondServer : secondaryServers) {
secondServers.append(secondServer);
}
if (clientHealthStatus != null) {
sectionResult.addSeparator('-');
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_PRIMARY_SERVERS, primServers);
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_SECONDARY_SERVERS, secondServers);
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_CPU, clientHealthStatus.getCpus());
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_LISTNER_CALLS,
clientHealthStatus.getNumOfCacheListenerCalls());
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_GETS,
clientHealthStatus.getNumOfGets());
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_MISSES,
clientHealthStatus.getNumOfMisses());
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_PUTS,
clientHealthStatus.getNumOfPuts());
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_THREADS,
clientHealthStatus.getNumOfThreads());
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_PROCESS_CPU_TIME,
clientHealthStatus.getProcessCpuTime());
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_QUEUE_SIZE,
clientHealthStatus.getQueueSize());
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_UP_TIME,
clientHealthStatus.getUpTime());
sectionResult.addData(CliStrings.DESCRIBE_CLIENT_COLUMN_DURABLE, isDurable);
sectionResult.addSeparator('-');
Map<String, String> poolStats = clientHealthStatus.getPoolStats();
if (poolStats.size() > 0) {
Iterator<Entry<String, String>> it = poolStats.entrySet().iterator();
while (it.hasNext()) {
Entry<String, String> entry = it.next();
TabularResultData poolStatsResultTable =
sectionResult.addTable("Pool Stats For Pool Name = " + entry.getKey());
poolStatsResultTable.setHeader("Pool Stats For Pool Name = " + entry.getKey());
String poolStatsStr = entry.getValue();
String str[] = poolStatsStr.split(";");
LogWrapper.getInstance().info("decribe client clientHealthStatus min conn="
+ str[0].substring(str[0].indexOf("=") + 1));
LogWrapper.getInstance().info("decribe client clientHealthStatus max conn ="
+ str[1].substring(str[1].indexOf("=") + 1));
LogWrapper.getInstance().info("decribe client clientHealthStatus redundancy ="
+ str[2].substring(str[2].indexOf("=") + 1));
LogWrapper.getInstance().info("decribe client clientHealthStatus CQs ="
+ str[3].substring(str[3].indexOf("=") + 1));
poolStatsResultTable.accumulate(CliStrings.DESCRIBE_CLIENT_MIN_CONN,
str[0].substring(str[0].indexOf("=") + 1));
poolStatsResultTable.accumulate(CliStrings.DESCRIBE_CLIENT_MAX_CONN,
str[1].substring(str[1].indexOf("=") + 1));
poolStatsResultTable.accumulate(CliStrings.DESCRIBE_CLIENT_REDUDANCY,
str[2].substring(str[2].indexOf("=") + 1));
poolStatsResultTable.accumulate(CliStrings.DESCRIBE_CLIENT_CQs,
str[3].substring(str[3].indexOf("=") + 1));
}
}
}
}
@CliAvailabilityIndicator({CliStrings.LIST_CLIENTS, CliStrings.DESCRIBE_CLIENT})
public boolean clientCommandsAvailable() {
boolean isAvailable = true; // always available on server
if (CliUtil.isGfshVM()) { // in gfsh check if connected
isAvailable = getGfsh() != null && getGfsh().isConnectedAndReady();
}
return isAvailable;
}
}
|
|
package org.batfish.datamodel.matchers;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import javax.annotation.Nonnull;
import org.batfish.common.Warning;
import org.batfish.common.Warnings;
import org.batfish.datamodel.DefinedStructureInfo;
import org.batfish.datamodel.answers.ConvertConfigurationAnswerElement;
import org.batfish.vendor.StructureType;
import org.batfish.vendor.StructureUsage;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeDiagnosingMatcher;
final class ConvertConfigurationAnswerElementMatchers {
static final class HasRedFlagWarning
extends TypeSafeDiagnosingMatcher<ConvertConfigurationAnswerElement> {
private final @Nonnull Matcher<? super String> _subMatcher;
private final @Nonnull String _hostname;
HasRedFlagWarning(@Nonnull String hostname, @Nonnull Matcher<? super String> subMatcher) {
_hostname = hostname;
_subMatcher = subMatcher;
}
@Override
public void describeTo(Description description) {
description
.appendText("A ConvertConfigurationAnswerElement with a red-flag warning with text:")
.appendDescriptionOf(_subMatcher);
}
@Override
protected boolean matchesSafely(
ConvertConfigurationAnswerElement item, Description mismatchDescription) {
Warnings warnings = item.getWarnings().get(_hostname);
if (warnings == null) {
mismatchDescription.appendText(String.format("No warnings for host '%s'", _hostname));
return false;
}
if (warnings.getRedFlagWarnings().stream()
.map(Warning::getText)
.noneMatch(_subMatcher::matches)) {
mismatchDescription.appendText(
String.format("No red-flag warnings for host '%s' match", _hostname));
return false;
}
return true;
}
}
static final class HasDefinedStructure
extends TypeSafeDiagnosingMatcher<ConvertConfigurationAnswerElement> {
private final String _filename;
private final String _structureName;
private final String _type;
HasDefinedStructure(
@Nonnull String filename, @Nonnull StructureType type, @Nonnull String structureName) {
_filename = filename;
_type = type.getDescription();
_structureName = structureName;
}
@Override
public void describeTo(Description description) {
description.appendText(
String.format(
"A ConvertConfigurationAnswerElement for which file '%s' has a defined structure "
+ "of type '%s' named '%s'",
_filename, _type, _structureName));
}
@Override
protected boolean matchesSafely(
ConvertConfigurationAnswerElement item, Description mismatchDescription) {
SortedMap<String, SortedMap<String, SortedMap<String, DefinedStructureInfo>>> byFile =
item.getDefinedStructures();
if (!byFile.containsKey(_filename)) {
mismatchDescription.appendText(
String.format("File '%s' has no defined structures", _filename));
return false;
}
SortedMap<String, SortedMap<String, DefinedStructureInfo>> byType = byFile.get(_filename);
if (!byType.containsKey(_type)) {
mismatchDescription.appendText(
String.format("File '%s' has no defined structure of type '%s'", _filename, _type));
return false;
}
SortedMap<String, DefinedStructureInfo> byStructureName = byType.get(_type);
if (!byStructureName.containsKey(_structureName)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no defined structure of type '%s' named '%s'",
_filename, _type, _structureName));
return false;
}
return true;
}
}
static final class HasDefinedStructureWithDefinitionLines
extends TypeSafeDiagnosingMatcher<ConvertConfigurationAnswerElement> {
private final Matcher<? super Set<Integer>> _subMatcher;
private final String _filename;
private final String _structureName;
private final String _type;
HasDefinedStructureWithDefinitionLines(
@Nonnull String filename,
@Nonnull StructureType type,
@Nonnull String structureName,
@Nonnull Matcher<? super Set<Integer>> subMatcher) {
_subMatcher = subMatcher;
_filename = filename;
_type = type.getDescription();
_structureName = structureName;
}
@Override
public void describeTo(Description description) {
description.appendText(
String.format(
"A ConvertConfigurationAnswerElement for which file '%s' has a defined structure "
+ "of type '%s' named '%s' with definition lines '%s'",
_filename, _type, _structureName, _subMatcher));
}
@Override
protected boolean matchesSafely(
ConvertConfigurationAnswerElement item, Description mismatchDescription) {
SortedMap<String, SortedMap<String, SortedMap<String, DefinedStructureInfo>>> byFile =
item.getDefinedStructures();
if (!byFile.containsKey(_filename)) {
mismatchDescription.appendText(
String.format("File '%s' has no defined structures", _filename));
return false;
}
SortedMap<String, SortedMap<String, DefinedStructureInfo>> byType = byFile.get(_filename);
if (!byType.containsKey(_type)) {
mismatchDescription.appendText(
String.format("File '%s' has no defined structure of type '%s'", _filename, _type));
return false;
}
SortedMap<String, DefinedStructureInfo> byStructureName = byType.get(_type);
if (!byStructureName.containsKey(_structureName)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no defined structure of type '%s' named '%s'",
_filename, _type, _structureName));
return false;
}
Set<Integer> definitionLines =
byStructureName.get(_structureName).getDefinitionLines().enumerate();
if (!_subMatcher.matches(definitionLines)) {
mismatchDescription.appendText(
String.format(
"File '%s' defined structure of type '%s' named '%s' definition lines were %s",
_filename, _type, _structureName, definitionLines));
return false;
}
return true;
}
}
static final class HasUndefinedReference
extends TypeSafeDiagnosingMatcher<ConvertConfigurationAnswerElement> {
private final String _filename;
private final String _structureName;
private final String _type;
HasUndefinedReference(
@Nonnull String filename, @Nonnull StructureType type, @Nonnull String structureName) {
_filename = filename;
_type = type.getDescription();
_structureName = structureName;
}
@Override
public void describeTo(Description description) {
description.appendText(
String.format(
"A ConvertConfigurationAnswerElement for which file '%s' has an undefined reference "
+ "to a structure of type '%s' named '%s'",
_filename, _type, _structureName));
}
@Override
protected boolean matchesSafely(
ConvertConfigurationAnswerElement item, Description mismatchDescription) {
SortedMap<String, SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>>>
byFile = item.getUndefinedReferences();
if (!byFile.containsKey(_filename)) {
mismatchDescription.appendText(
String.format("File '%s' has no undefined references", _filename));
return false;
}
SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>> byType =
byFile.get(_filename);
if (!byType.containsKey(_type)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no undefined references to structures of type '%s'",
_filename, _type));
return false;
}
SortedMap<String, SortedMap<String, SortedSet<Integer>>> byStructureName = byType.get(_type);
if (!byStructureName.containsKey(_structureName)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no undefined references to structures of type '%s' named '%s'",
_filename, _type, _structureName));
return false;
}
return true;
}
}
static final class HasNoUndefinedReferences
extends TypeSafeDiagnosingMatcher<ConvertConfigurationAnswerElement> {
HasNoUndefinedReferences() {}
@Override
public void describeTo(Description description) {
description.appendText(
"A ConvertConfigurationAnswerElement which has no undefined references");
}
@Override
protected boolean matchesSafely(
ConvertConfigurationAnswerElement item, Description mismatchDescription) {
SortedMap<String, SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>>>
byFile = item.getUndefinedReferences();
if (byFile.isEmpty()) {
return true;
}
for (Entry<
String, SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>>>
fileToTypeMap : byFile.entrySet()) {
for (Entry<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>>
typeToStructNameMap : fileToTypeMap.getValue().entrySet()) {
for (Entry<String, SortedMap<String, SortedSet<Integer>>> structNameToUsageMap :
typeToStructNameMap.getValue().entrySet()) {
SortedMap<String, SortedSet<Integer>> structUsageMap = structNameToUsageMap.getValue();
if (!structUsageMap.isEmpty()) {
mismatchDescription.appendText(
String.format(
"ConvertConfigurationAnswerElement has undefined references, including for"
+ " file '%s', structure type '%s', named '%s', with usage '%s'",
fileToTypeMap.getKey(),
typeToStructNameMap.getKey(),
structNameToUsageMap.getKey(),
structUsageMap.firstKey()));
return false;
}
}
}
}
return true;
}
}
static final class HasUndefinedReferenceWithUsage
extends TypeSafeDiagnosingMatcher<ConvertConfigurationAnswerElement> {
private final String _filename;
private final String _structureName;
private final String _type;
private final String _usage;
HasUndefinedReferenceWithUsage(
@Nonnull String filename,
@Nonnull StructureType type,
@Nonnull String structureName,
@Nonnull StructureUsage usage) {
_filename = filename;
_type = type.getDescription();
_structureName = structureName;
_usage = usage.getDescription();
}
@Override
public void describeTo(Description description) {
description.appendText(
String.format(
"A ConvertConfigurationAnswerElement for which file '%s' has an undefined reference "
+ "to a structure of type '%s' named '%s'",
_filename, _type, _structureName));
}
@Override
protected boolean matchesSafely(
ConvertConfigurationAnswerElement item, Description mismatchDescription) {
SortedMap<String, SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>>>
byFile = item.getUndefinedReferences();
if (!byFile.containsKey(_filename)) {
mismatchDescription.appendText(
String.format("File '%s' has no undefined references", _filename));
return false;
}
SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>> byType =
byFile.get(_filename);
if (!byType.containsKey(_type)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no undefined references to structures of type '%s'",
_filename, _type));
return false;
}
SortedMap<String, SortedMap<String, SortedSet<Integer>>> byStructureName = byType.get(_type);
if (!byStructureName.containsKey(_structureName)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no undefined references to structures of type '%s' named '%s'",
_filename, _type, _structureName));
return false;
}
SortedMap<String, SortedSet<Integer>> byUsage = byStructureName.get(_structureName);
if (!byUsage.containsKey(_usage)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no undefined references to structures of type '%s' named '%s' of "
+ "usage '%s'",
_filename, _type, _structureName, _usage));
return false;
}
return true;
}
}
static final class HasReferenceWithUsage
extends TypeSafeDiagnosingMatcher<ConvertConfigurationAnswerElement> {
private final String _filename;
private final String _structureName;
private final String _type;
private final String _usage;
HasReferenceWithUsage(
@Nonnull String filename,
@Nonnull StructureType type,
@Nonnull String structureName,
@Nonnull StructureUsage usage) {
_filename = filename;
_type = type.getDescription();
_structureName = structureName;
_usage = usage.getDescription();
}
@Override
public void describeTo(Description description) {
description.appendText(
String.format(
"A ConvertConfigurationAnswerElement for which file '%s' has a reference "
+ "to a structure of type '%s' named '%s'",
_filename, _type, _structureName));
}
@Override
protected boolean matchesSafely(
ConvertConfigurationAnswerElement item, Description mismatchDescription) {
SortedMap<String, SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>>>
byFile = item.getReferencedStructures();
if (!byFile.containsKey(_filename)) {
mismatchDescription.appendText(String.format("File '%s' has no references", _filename));
return false;
}
SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>> byType =
byFile.get(_filename);
if (!byType.containsKey(_type)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no references to structures of type '%s'", _filename, _type));
return false;
}
SortedMap<String, SortedMap<String, SortedSet<Integer>>> byStructureName = byType.get(_type);
if (!byStructureName.containsKey(_structureName)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no references to structures of type '%s' named '%s'",
_filename, _type, _structureName));
return false;
}
SortedMap<String, SortedSet<Integer>> byUsage = byStructureName.get(_structureName);
if (!byUsage.containsKey(_usage)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no references to structures of type '%s' named '%s' of "
+ "usage '%s'",
_filename, _type, _structureName, _usage));
return false;
}
return true;
}
}
static final class HasUndefinedReferenceWithReferenceLines
extends TypeSafeDiagnosingMatcher<ConvertConfigurationAnswerElement> {
private final Matcher<? super Set<Integer>> _subMatcher;
private final String _filename;
private final String _structureName;
private final String _type;
private final String _usage;
HasUndefinedReferenceWithReferenceLines(
@Nonnull String filename,
@Nonnull StructureType type,
@Nonnull String structureName,
@Nonnull StructureUsage usage,
@Nonnull Matcher<? super Set<Integer>> subMatcher) {
_subMatcher = subMatcher;
_filename = filename;
_type = type.getDescription();
_structureName = structureName;
_usage = usage.getDescription();
}
@Override
public void describeTo(Description description) {
description.appendText(
String.format(
"A ConvertConfigurationAnswerElement for which file '%s' has an undefined reference "
+ "of type '%s' named '%s' with reference lines '%s'",
_filename, _type, _structureName, _subMatcher));
}
@Override
protected boolean matchesSafely(
ConvertConfigurationAnswerElement item, Description mismatchDescription) {
SortedMap<String, SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>>>
byFile = item.getUndefinedReferences();
if (!byFile.containsKey(_filename)) {
mismatchDescription.appendText(
String.format("File '%s' has no undefined references", _filename));
return false;
}
SortedMap<String, SortedMap<String, SortedMap<String, SortedSet<Integer>>>> byType =
byFile.get(_filename);
if (!byType.containsKey(_type)) {
mismatchDescription.appendText(
String.format("File '%s' has no undefined reference of type '%s'", _filename, _type));
return false;
}
SortedMap<String, SortedMap<String, SortedSet<Integer>>> byStructureName = byType.get(_type);
if (!byStructureName.containsKey(_structureName)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no undefined reference of type '%s' named '%s'",
_filename, _type, _structureName));
return false;
}
SortedMap<String, SortedSet<Integer>> byUsage = byStructureName.get(_structureName);
if (!byUsage.containsKey(_usage)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no undefined references to structures of type '%s' named '%s' of "
+ "usage '%s'",
_filename, _type, _structureName, _usage));
return false;
}
if (!_subMatcher.matches(byUsage.get(_usage))) {
mismatchDescription.appendText(
String.format(
"File '%s' has no undefined reference of type '%s' named '%s' of usage '%s'"
+ " matching reference lines '%s'",
_filename, _type, _structureName, _usage, _subMatcher));
return false;
}
return true;
}
}
static final class HasNumReferrers
extends TypeSafeDiagnosingMatcher<ConvertConfigurationAnswerElement> {
private final String _filename;
private final int _numReferrers;
private final String _structureName;
private final String _type;
HasNumReferrers(
@Nonnull String filename,
@Nonnull StructureType type,
@Nonnull String structureName,
int numReferrers) {
_filename = filename;
_numReferrers = numReferrers;
_type = type.getDescription();
_structureName = structureName;
}
@Override
public void describeTo(Description description) {
description.appendText(
String.format(
"A ConvertConfigurationAnswerElement for which file '%s' has defined structure of "
+ "type '%s' named '%s' with %d referrers",
_filename, _type, _structureName, _numReferrers));
}
@Override
protected boolean matchesSafely(
ConvertConfigurationAnswerElement item, Description mismatchDescription) {
SortedMap<String, SortedMap<String, SortedMap<String, DefinedStructureInfo>>> byFile =
item.getDefinedStructures();
if (!byFile.containsKey(_filename)) {
mismatchDescription.appendText(
String.format("File '%s' has no defined structures", _filename));
return false;
}
SortedMap<String, SortedMap<String, DefinedStructureInfo>> byType = byFile.get(_filename);
if (!byType.containsKey(_type)) {
mismatchDescription.appendText(
String.format("File '%s' has no defined structures of type '%s'", _filename, _type));
return false;
}
SortedMap<String, DefinedStructureInfo> byStructureName = byType.get(_type);
if (!byStructureName.containsKey(_structureName)) {
mismatchDescription.appendText(
String.format(
"File '%s' has no defined structures of type '%s' named '%s'",
_filename, _type, _structureName));
return false;
}
if (byStructureName.get(_structureName).getNumReferrers() != _numReferrers) {
mismatchDescription.appendText(
String.format(
"In file '%s', defined structure of type '%s' named '%s' has %d referrers",
_filename,
_type,
_structureName,
byStructureName.get(_structureName).getNumReferrers()));
return false;
}
return true;
}
}
private ConvertConfigurationAnswerElementMatchers() {}
}
|