code
stringlengths 10
749k
| repo_name
stringlengths 5
108
| path
stringlengths 7
333
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 10
749k
|
---|---|---|---|---|---|
package com.caozeal.practice;
import static org.assertj.core.api.Assertions.*;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.firefox.FirefoxDriver;
public class TestTryTest {
// @OdevityMain2
// public void seleniumTest(){
// WebDriver driver = new FirefoxDriver();
// driver.get("http://www.baidu.com");
//// WebElement query = driver.findElement(By.name("search"));
//// query.sendKeys("傲然绝唳的测试");
////
//// WebElement goButton = driver.findElement(By.name("go"));
//// goButton.click();
////
//// assertThat(driver.getTitle()).startsWith("傲然绝唳的测试");
// driver.quit();
// }
}
|
caozeal/Utopia
|
Source/UtopiaLand/test/com/caozeal/someTry/TestTryTest.java
|
Java
|
apache-2.0
| 786 |
package org.javarosa.model.xform;
import org.javarosa.core.data.IDataPointer;
import org.javarosa.core.model.IAnswerDataSerializer;
import org.javarosa.core.model.instance.FormInstance;
import org.javarosa.core.model.instance.TreeElement;
import org.javarosa.core.model.instance.TreeReference;
import org.javarosa.core.model.utils.IInstanceSerializingVisitor;
import org.javarosa.core.services.transport.payload.ByteArrayPayload;
import org.javarosa.core.services.transport.payload.DataPointerPayload;
import org.javarosa.core.services.transport.payload.IDataPayload;
import org.javarosa.core.services.transport.payload.MultiMessagePayload;
import org.javarosa.xform.util.XFormAnswerDataSerializer;
import org.javarosa.xform.util.XFormSerializer;
import org.kxml2.kdom.Document;
import org.kxml2.kdom.Element;
import org.kxml2.kdom.Node;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Vector;
/**
* A visitor-esque class which walks a FormInstance and constructs an XML document
* containing its instance.
*
* The XML node elements are constructed in a depth-first manner, consistent with
* standard XML document parsing.
*
* @author Clayton Sims
*/
public class XFormSerializingVisitor implements IInstanceSerializingVisitor {
/**
* The XML document containing the instance that is to be returned
*/
Document theXmlDoc;
/**
* The serializer to be used in constructing XML for AnswerData elements
*/
IAnswerDataSerializer serializer;
/**
* The root of the xml document which should be included in the serialization *
*/
TreeReference rootRef;
Vector<IDataPointer> dataPointers;
boolean respectRelevance = true;
public XFormSerializingVisitor() {
this(true);
}
public XFormSerializingVisitor(boolean respectRelevance) {
this.respectRelevance = respectRelevance;
}
private void init() {
theXmlDoc = null;
dataPointers = new Vector<IDataPointer>();
}
@Override
public byte[] serializeInstance(FormInstance model) throws IOException {
return serializeInstance(model, new XPathReference("/"));
}
@Override
public byte[] serializeInstance(FormInstance model, XPathReference ref) throws IOException {
init();
rootRef = FormInstance.unpackReference(ref);
if (this.serializer == null) {
this.setAnswerDataSerializer(new XFormAnswerDataSerializer());
}
model.accept(this);
if (theXmlDoc != null) {
return XFormSerializer.getUtfBytesFromDocument(theXmlDoc);
} else {
return null;
}
}
@Override
public IDataPayload createSerializedPayload(FormInstance model) throws IOException {
return createSerializedPayload(model, new XPathReference("/"));
}
@Override
public IDataPayload createSerializedPayload(FormInstance model, XPathReference ref) throws IOException {
init();
rootRef = FormInstance.unpackReference(ref);
if (this.serializer == null) {
this.setAnswerDataSerializer(new XFormAnswerDataSerializer());
}
model.accept(this);
if (theXmlDoc != null) {
//TODO: Did this strip necessary data?
byte[] form = XFormSerializer.getUtfBytesFromDocument(theXmlDoc);
if (dataPointers.size() == 0) {
return new ByteArrayPayload(form, null, IDataPayload.PAYLOAD_TYPE_XML);
}
MultiMessagePayload payload = new MultiMessagePayload();
payload.addPayload(new ByteArrayPayload(form, "xml_submission_file", IDataPayload.PAYLOAD_TYPE_XML));
Enumeration en = dataPointers.elements();
while (en.hasMoreElements()) {
IDataPointer pointer = (IDataPointer)en.nextElement();
payload.addPayload(new DataPointerPayload(pointer));
}
return payload;
} else {
return null;
}
}
@Override
public void visit(FormInstance tree) {
theXmlDoc = new Document();
TreeElement root = tree.resolveReference(rootRef);
//For some reason resolveReference won't ever return the root, so we'll
//catch that case and just start at the root.
if (root == null) {
root = tree.getRoot();
}
if (root != null) {
theXmlDoc.addChild(Node.ELEMENT, serializeNode(root));
}
Element top = theXmlDoc.getElement(0);
String[] prefixes = tree.getNamespacePrefixes();
for (String prefix : prefixes) {
top.setPrefix(prefix, tree.getNamespaceURI(prefix));
}
if (tree.schema != null) {
top.setNamespace(tree.schema);
top.setPrefix("", tree.schema);
}
}
private Element serializeNode(TreeElement instanceNode) {
Element e = new Element(); //don't set anything on this element yet, as it might get overwritten
//don't serialize template nodes or non-relevant nodes
if ((respectRelevance && !instanceNode.isRelevant()) || instanceNode.getMult() == TreeReference.INDEX_TEMPLATE) {
return null;
}
if (instanceNode.getValue() != null) {
Object serializedAnswer = serializer.serializeAnswerData(instanceNode.getValue(), instanceNode.getDataType());
if (serializedAnswer instanceof Element) {
e = (Element)serializedAnswer;
} else if (serializedAnswer instanceof String) {
e = new Element();
e.addChild(Node.TEXT, serializedAnswer);
} else {
throw new RuntimeException("Can't handle serialized output for" + instanceNode.getValue().toString() + ", " + serializedAnswer);
}
if (serializer.containsExternalData(instanceNode.getValue()).booleanValue()) {
IDataPointer[] pointers = serializer.retrieveExternalDataPointer(instanceNode.getValue());
for (IDataPointer pointer : pointers) {
dataPointers.addElement(pointer);
}
}
} else {
//make sure all children of the same tag name are written en bloc
Vector<String> childNames = new Vector<String>();
for (int i = 0; i < instanceNode.getNumChildren(); i++) {
String childName = instanceNode.getChildAt(i).getName();
if (!childNames.contains(childName))
childNames.addElement(childName);
}
for (int i = 0; i < childNames.size(); i++) {
String childName = childNames.elementAt(i);
int mult = instanceNode.getChildMultiplicity(childName);
for (int j = 0; j < mult; j++) {
Element child = serializeNode(instanceNode.getChild(childName, j));
if (child != null) {
e.addChild(Node.ELEMENT, child);
}
}
}
}
e.setName(instanceNode.getName());
// add hard-coded attributes
for (int i = 0; i < instanceNode.getAttributeCount(); i++) {
String namespace = instanceNode.getAttributeNamespace(i);
String name = instanceNode.getAttributeName(i);
String val = instanceNode.getAttributeValue(i);
// is it legal for getAttributeValue() to return null? playing it safe for now and assuming yes
if (val == null) {
val = "";
}
e.setAttribute(namespace, name, val);
}
if (instanceNode.getNamespace() != null) {
e.setNamespace(instanceNode.getNamespace());
}
return e;
}
@Override
public void setAnswerDataSerializer(IAnswerDataSerializer ads) {
this.serializer = ads;
}
@Override
public IInstanceSerializingVisitor newInstance() {
XFormSerializingVisitor modelSerializer = new XFormSerializingVisitor();
modelSerializer.setAnswerDataSerializer(this.serializer);
return modelSerializer;
}
}
|
dimagi/javarosa
|
javarosa/core/src/main/java/org/javarosa/model/xform/XFormSerializingVisitor.java
|
Java
|
apache-2.0
| 8,206 |
package io.silverspoon.bulldog.beagleboneblack.devicetree;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
public class DeviceTreeCompiler {
private static final String FIRMWARE_PATH = "/lib/firmware/";
private static final String OBJECT_FILE_PATTERN = "%s%s.dtbo";
private static final String DEFINITION_FILE_PATTERN = "%s%s.dts";
private static final String COMPILER_CALL = "dtc -O dtb -o %s -b 0 -@ %s";
public static void compileOverlay(String overlay, String deviceName) throws IOException, InterruptedException {
String objectFile = String.format(OBJECT_FILE_PATTERN, FIRMWARE_PATH, deviceName);
String overlayFile = String.format(DEFINITION_FILE_PATTERN, FIRMWARE_PATH, deviceName);
File file = new File(overlayFile);
FileOutputStream outputStream = new FileOutputStream(file);
PrintWriter writer = new PrintWriter(outputStream);
writer.write(overlay);
writer.close();
Process compile = Runtime.getRuntime().exec(String.format(COMPILER_CALL, objectFile, overlayFile));
int code = compile.waitFor();
if (code > 0) {
throw new RuntimeException("Device Tree Overlay compilation failed: " + overlayFile + " could not be compiled");
}
}
}
|
xjaros1/bulldog
|
bulldog-board-beagleboneblack/src/main/java/io/silverspoon/bulldog/beagleboneblack/devicetree/DeviceTreeCompiler.java
|
Java
|
apache-2.0
| 1,303 |
package com.xyp.sapidoc.idoc.enumeration;
import java.util.HashSet;
import java.util.Set;
/**
*
* @author Yunpeng_Xu
*/
public enum TagEnum {
FIELDS("FIELDS"),
RECORD_SECTION("RECORD_SECTION"),
CONTROL_RECORD("CONTROL_RECORD"),
DATA_RECORD("DATA_RECORD"),
STATUS_RECORD("STATUS_RECORD"),
SEGMENT_SECTION("SEGMENT_SECTION"),
IDOC("IDOC"),
SEGMENT("SEGMENT"),
GROUP("GROUP"),
;
private String tag;
private TagEnum(String tag) {
this.tag = tag;
}
public String getTagBegin() {
return "BEGIN_" + tag;
}
public String getTagEnd() {
return "END_" + tag;
}
public static Set<String> getAllTags(){
Set<String> tags = new HashSet<String>();
TagEnum[] tagEnums = TagEnum.values();
for (TagEnum tagEnum : tagEnums) {
tags.add(tagEnum.getTagBegin());
tags.add(tagEnum.getTagEnd());
}
return tags;
}
}
|
PeterXyp/sapidoc
|
src/main/java/com/xyp/sapidoc/idoc/enumeration/TagEnum.java
|
Java
|
apache-2.0
| 1,036 |
/*******************************************************************************
* Copyright 2016 Intuit
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.intuit.wasabi.auditlogobjects;
import com.intuit.wasabi.eventlog.events.BucketCreateEvent;
import com.intuit.wasabi.eventlog.events.BucketEvent;
import com.intuit.wasabi.eventlog.events.ChangeEvent;
import com.intuit.wasabi.eventlog.events.EventLogEvent;
import com.intuit.wasabi.eventlog.events.ExperimentChangeEvent;
import com.intuit.wasabi.eventlog.events.ExperimentCreateEvent;
import com.intuit.wasabi.eventlog.events.ExperimentEvent;
import com.intuit.wasabi.eventlog.events.SimpleEvent;
import com.intuit.wasabi.experimentobjects.Bucket;
import com.intuit.wasabi.experimentobjects.Experiment;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import java.lang.reflect.Field;
/**
* Tests for {@link AuditLogEntryFactory}.
*/
public class AuditLogEntryFactoryTest {
@Test
public void testCreateFromEvent() throws Exception {
new AuditLogEntryFactory();
EventLogEvent[] events = new EventLogEvent[]{
new SimpleEvent("SimpleEvent"),
new ExperimentChangeEvent(Mockito.mock(Experiment.class), "Property", "before", "after"),
new ExperimentCreateEvent(Mockito.mock(Experiment.class)),
new BucketCreateEvent(Mockito.mock(Experiment.class), Mockito.mock(Bucket.class))
};
Field[] fields = AuditLogEntry.class.getFields();
for (Field field : fields) {
field.setAccessible(true);
}
for (EventLogEvent event : events) {
AuditLogEntry aleFactory = AuditLogEntryFactory.createFromEvent(event);
AuditLogEntry aleManual = new AuditLogEntry(
event.getTime(), event.getUser(), AuditLogAction.getActionForEvent(event),
event instanceof ExperimentEvent ? ((ExperimentEvent) event).getExperiment() : null,
event instanceof BucketEvent ? ((BucketEvent) event).getBucket().getLabel() : null,
event instanceof ChangeEvent ? ((ChangeEvent) event).getPropertyName() : null,
event instanceof ChangeEvent ? ((ChangeEvent) event).getBefore() : null,
event instanceof ChangeEvent ? ((ChangeEvent) event).getAfter() : null
);
for (Field field : fields) {
Assert.assertEquals(field.get(aleManual), field.get(aleFactory));
}
}
}
}
|
intuit/wasabi
|
modules/auditlog-objects/src/test/java/com/intuit/wasabi/auditlogobjects/AuditLogEntryFactoryTest.java
|
Java
|
apache-2.0
| 3,157 |
package org.artifactory.ui.rest.service.admin.configuration.mail;
import org.artifactory.api.config.CentralConfigService;
import org.artifactory.descriptor.config.MutableCentralConfigDescriptor;
import org.artifactory.rest.common.service.ArtifactoryRestRequest;
import org.artifactory.rest.common.service.RestResponse;
import org.artifactory.rest.common.service.RestService;
import org.artifactory.ui.rest.model.admin.configuration.mail.MailServer;
import org.artifactory.ui.rest.service.utils.AolUtils;
import org.artifactory.util.HttpUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
/**
* @author Chen Keinan
*/
@Component
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
public class GetMailService implements RestService {
@Autowired
private CentralConfigService centralConfigService;
@Override
public void execute(ArtifactoryRestRequest request, RestResponse response) {
AolUtils.assertNotAol("GetMail");
String contextUrl = HttpUtils.getServletContextUrl(request.getServletRequest());
MailServer mailServer = getMailServerFromConfigDescriptor(contextUrl);
// update response with mail server model
response.iModel(mailServer);
}
/**
* get mail server from config descriptor and populate data to mail server model
*
* @return mail server model
* @param contextUrl
*/
private MailServer getMailServerFromConfigDescriptor(String contextUrl) {
MutableCentralConfigDescriptor configDescriptor = centralConfigService.getMutableDescriptor();
if (configDescriptor.getMailServer() != null) {
return new MailServer(configDescriptor.getMailServer());
} else {
MailServer mailServer = new MailServer();
mailServer.setArtifactoryUrl(contextUrl);
return mailServer;
}
}
}
|
alancnet/artifactory
|
web/rest-ui/src/main/java/org/artifactory/ui/rest/service/admin/configuration/mail/GetMailService.java
|
Java
|
apache-2.0
| 2,025 |
/**
* Autogenerated by Thrift Compiler (0.9.3)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class WriteNotificationLogRequest implements org.apache.thrift.TBase<WriteNotificationLogRequest, WriteNotificationLogRequest._Fields>, java.io.Serializable, Cloneable, Comparable<WriteNotificationLogRequest> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("WriteNotificationLogRequest");
private static final org.apache.thrift.protocol.TField TXN_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("txnId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField WRITE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("writeId", org.apache.thrift.protocol.TType.I64, (short)2);
private static final org.apache.thrift.protocol.TField DB_FIELD_DESC = new org.apache.thrift.protocol.TField("db", org.apache.thrift.protocol.TType.STRING, (short)3);
private static final org.apache.thrift.protocol.TField TABLE_FIELD_DESC = new org.apache.thrift.protocol.TField("table", org.apache.thrift.protocol.TType.STRING, (short)4);
private static final org.apache.thrift.protocol.TField FILE_INFO_FIELD_DESC = new org.apache.thrift.protocol.TField("fileInfo", org.apache.thrift.protocol.TType.STRUCT, (short)5);
private static final org.apache.thrift.protocol.TField PARTITION_VALS_FIELD_DESC = new org.apache.thrift.protocol.TField("partitionVals", org.apache.thrift.protocol.TType.LIST, (short)6);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new WriteNotificationLogRequestStandardSchemeFactory());
schemes.put(TupleScheme.class, new WriteNotificationLogRequestTupleSchemeFactory());
}
private long txnId; // required
private long writeId; // required
private String db; // required
private String table; // required
private InsertEventRequestData fileInfo; // required
private List<String> partitionVals; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
TXN_ID((short)1, "txnId"),
WRITE_ID((short)2, "writeId"),
DB((short)3, "db"),
TABLE((short)4, "table"),
FILE_INFO((short)5, "fileInfo"),
PARTITION_VALS((short)6, "partitionVals");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // TXN_ID
return TXN_ID;
case 2: // WRITE_ID
return WRITE_ID;
case 3: // DB
return DB;
case 4: // TABLE
return TABLE;
case 5: // FILE_INFO
return FILE_INFO;
case 6: // PARTITION_VALS
return PARTITION_VALS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __TXNID_ISSET_ID = 0;
private static final int __WRITEID_ISSET_ID = 1;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.PARTITION_VALS};
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.TXN_ID, new org.apache.thrift.meta_data.FieldMetaData("txnId", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.WRITE_ID, new org.apache.thrift.meta_data.FieldMetaData("writeId", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.DB, new org.apache.thrift.meta_data.FieldMetaData("db", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TABLE, new org.apache.thrift.meta_data.FieldMetaData("table", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.FILE_INFO, new org.apache.thrift.meta_data.FieldMetaData("fileInfo", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, InsertEventRequestData.class)));
tmpMap.put(_Fields.PARTITION_VALS, new org.apache.thrift.meta_data.FieldMetaData("partitionVals", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(WriteNotificationLogRequest.class, metaDataMap);
}
public WriteNotificationLogRequest() {
}
public WriteNotificationLogRequest(
long txnId,
long writeId,
String db,
String table,
InsertEventRequestData fileInfo)
{
this();
this.txnId = txnId;
setTxnIdIsSet(true);
this.writeId = writeId;
setWriteIdIsSet(true);
this.db = db;
this.table = table;
this.fileInfo = fileInfo;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public WriteNotificationLogRequest(WriteNotificationLogRequest other) {
__isset_bitfield = other.__isset_bitfield;
this.txnId = other.txnId;
this.writeId = other.writeId;
if (other.isSetDb()) {
this.db = other.db;
}
if (other.isSetTable()) {
this.table = other.table;
}
if (other.isSetFileInfo()) {
this.fileInfo = new InsertEventRequestData(other.fileInfo);
}
if (other.isSetPartitionVals()) {
List<String> __this__partitionVals = new ArrayList<String>(other.partitionVals);
this.partitionVals = __this__partitionVals;
}
}
public WriteNotificationLogRequest deepCopy() {
return new WriteNotificationLogRequest(this);
}
@Override
public void clear() {
setTxnIdIsSet(false);
this.txnId = 0;
setWriteIdIsSet(false);
this.writeId = 0;
this.db = null;
this.table = null;
this.fileInfo = null;
this.partitionVals = null;
}
public long getTxnId() {
return this.txnId;
}
public void setTxnId(long txnId) {
this.txnId = txnId;
setTxnIdIsSet(true);
}
public void unsetTxnId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __TXNID_ISSET_ID);
}
/** Returns true if field txnId is set (has been assigned a value) and false otherwise */
public boolean isSetTxnId() {
return EncodingUtils.testBit(__isset_bitfield, __TXNID_ISSET_ID);
}
public void setTxnIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __TXNID_ISSET_ID, value);
}
public long getWriteId() {
return this.writeId;
}
public void setWriteId(long writeId) {
this.writeId = writeId;
setWriteIdIsSet(true);
}
public void unsetWriteId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __WRITEID_ISSET_ID);
}
/** Returns true if field writeId is set (has been assigned a value) and false otherwise */
public boolean isSetWriteId() {
return EncodingUtils.testBit(__isset_bitfield, __WRITEID_ISSET_ID);
}
public void setWriteIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __WRITEID_ISSET_ID, value);
}
public String getDb() {
return this.db;
}
public void setDb(String db) {
this.db = db;
}
public void unsetDb() {
this.db = null;
}
/** Returns true if field db is set (has been assigned a value) and false otherwise */
public boolean isSetDb() {
return this.db != null;
}
public void setDbIsSet(boolean value) {
if (!value) {
this.db = null;
}
}
public String getTable() {
return this.table;
}
public void setTable(String table) {
this.table = table;
}
public void unsetTable() {
this.table = null;
}
/** Returns true if field table is set (has been assigned a value) and false otherwise */
public boolean isSetTable() {
return this.table != null;
}
public void setTableIsSet(boolean value) {
if (!value) {
this.table = null;
}
}
public InsertEventRequestData getFileInfo() {
return this.fileInfo;
}
public void setFileInfo(InsertEventRequestData fileInfo) {
this.fileInfo = fileInfo;
}
public void unsetFileInfo() {
this.fileInfo = null;
}
/** Returns true if field fileInfo is set (has been assigned a value) and false otherwise */
public boolean isSetFileInfo() {
return this.fileInfo != null;
}
public void setFileInfoIsSet(boolean value) {
if (!value) {
this.fileInfo = null;
}
}
public int getPartitionValsSize() {
return (this.partitionVals == null) ? 0 : this.partitionVals.size();
}
public java.util.Iterator<String> getPartitionValsIterator() {
return (this.partitionVals == null) ? null : this.partitionVals.iterator();
}
public void addToPartitionVals(String elem) {
if (this.partitionVals == null) {
this.partitionVals = new ArrayList<String>();
}
this.partitionVals.add(elem);
}
public List<String> getPartitionVals() {
return this.partitionVals;
}
public void setPartitionVals(List<String> partitionVals) {
this.partitionVals = partitionVals;
}
public void unsetPartitionVals() {
this.partitionVals = null;
}
/** Returns true if field partitionVals is set (has been assigned a value) and false otherwise */
public boolean isSetPartitionVals() {
return this.partitionVals != null;
}
public void setPartitionValsIsSet(boolean value) {
if (!value) {
this.partitionVals = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case TXN_ID:
if (value == null) {
unsetTxnId();
} else {
setTxnId((Long)value);
}
break;
case WRITE_ID:
if (value == null) {
unsetWriteId();
} else {
setWriteId((Long)value);
}
break;
case DB:
if (value == null) {
unsetDb();
} else {
setDb((String)value);
}
break;
case TABLE:
if (value == null) {
unsetTable();
} else {
setTable((String)value);
}
break;
case FILE_INFO:
if (value == null) {
unsetFileInfo();
} else {
setFileInfo((InsertEventRequestData)value);
}
break;
case PARTITION_VALS:
if (value == null) {
unsetPartitionVals();
} else {
setPartitionVals((List<String>)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case TXN_ID:
return getTxnId();
case WRITE_ID:
return getWriteId();
case DB:
return getDb();
case TABLE:
return getTable();
case FILE_INFO:
return getFileInfo();
case PARTITION_VALS:
return getPartitionVals();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case TXN_ID:
return isSetTxnId();
case WRITE_ID:
return isSetWriteId();
case DB:
return isSetDb();
case TABLE:
return isSetTable();
case FILE_INFO:
return isSetFileInfo();
case PARTITION_VALS:
return isSetPartitionVals();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof WriteNotificationLogRequest)
return this.equals((WriteNotificationLogRequest)that);
return false;
}
public boolean equals(WriteNotificationLogRequest that) {
if (that == null)
return false;
boolean this_present_txnId = true;
boolean that_present_txnId = true;
if (this_present_txnId || that_present_txnId) {
if (!(this_present_txnId && that_present_txnId))
return false;
if (this.txnId != that.txnId)
return false;
}
boolean this_present_writeId = true;
boolean that_present_writeId = true;
if (this_present_writeId || that_present_writeId) {
if (!(this_present_writeId && that_present_writeId))
return false;
if (this.writeId != that.writeId)
return false;
}
boolean this_present_db = true && this.isSetDb();
boolean that_present_db = true && that.isSetDb();
if (this_present_db || that_present_db) {
if (!(this_present_db && that_present_db))
return false;
if (!this.db.equals(that.db))
return false;
}
boolean this_present_table = true && this.isSetTable();
boolean that_present_table = true && that.isSetTable();
if (this_present_table || that_present_table) {
if (!(this_present_table && that_present_table))
return false;
if (!this.table.equals(that.table))
return false;
}
boolean this_present_fileInfo = true && this.isSetFileInfo();
boolean that_present_fileInfo = true && that.isSetFileInfo();
if (this_present_fileInfo || that_present_fileInfo) {
if (!(this_present_fileInfo && that_present_fileInfo))
return false;
if (!this.fileInfo.equals(that.fileInfo))
return false;
}
boolean this_present_partitionVals = true && this.isSetPartitionVals();
boolean that_present_partitionVals = true && that.isSetPartitionVals();
if (this_present_partitionVals || that_present_partitionVals) {
if (!(this_present_partitionVals && that_present_partitionVals))
return false;
if (!this.partitionVals.equals(that.partitionVals))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_txnId = true;
list.add(present_txnId);
if (present_txnId)
list.add(txnId);
boolean present_writeId = true;
list.add(present_writeId);
if (present_writeId)
list.add(writeId);
boolean present_db = true && (isSetDb());
list.add(present_db);
if (present_db)
list.add(db);
boolean present_table = true && (isSetTable());
list.add(present_table);
if (present_table)
list.add(table);
boolean present_fileInfo = true && (isSetFileInfo());
list.add(present_fileInfo);
if (present_fileInfo)
list.add(fileInfo);
boolean present_partitionVals = true && (isSetPartitionVals());
list.add(present_partitionVals);
if (present_partitionVals)
list.add(partitionVals);
return list.hashCode();
}
@Override
public int compareTo(WriteNotificationLogRequest other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetTxnId()).compareTo(other.isSetTxnId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTxnId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.txnId, other.txnId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetWriteId()).compareTo(other.isSetWriteId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetWriteId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.writeId, other.writeId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetDb()).compareTo(other.isSetDb());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDb()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.db, other.db);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetTable()).compareTo(other.isSetTable());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTable()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.table, other.table);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetFileInfo()).compareTo(other.isSetFileInfo());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetFileInfo()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fileInfo, other.fileInfo);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetPartitionVals()).compareTo(other.isSetPartitionVals());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetPartitionVals()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.partitionVals, other.partitionVals);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("WriteNotificationLogRequest(");
boolean first = true;
sb.append("txnId:");
sb.append(this.txnId);
first = false;
if (!first) sb.append(", ");
sb.append("writeId:");
sb.append(this.writeId);
first = false;
if (!first) sb.append(", ");
sb.append("db:");
if (this.db == null) {
sb.append("null");
} else {
sb.append(this.db);
}
first = false;
if (!first) sb.append(", ");
sb.append("table:");
if (this.table == null) {
sb.append("null");
} else {
sb.append(this.table);
}
first = false;
if (!first) sb.append(", ");
sb.append("fileInfo:");
if (this.fileInfo == null) {
sb.append("null");
} else {
sb.append(this.fileInfo);
}
first = false;
if (isSetPartitionVals()) {
if (!first) sb.append(", ");
sb.append("partitionVals:");
if (this.partitionVals == null) {
sb.append("null");
} else {
sb.append(this.partitionVals);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetTxnId()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'txnId' is unset! Struct:" + toString());
}
if (!isSetWriteId()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'writeId' is unset! Struct:" + toString());
}
if (!isSetDb()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'db' is unset! Struct:" + toString());
}
if (!isSetTable()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'table' is unset! Struct:" + toString());
}
if (!isSetFileInfo()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'fileInfo' is unset! Struct:" + toString());
}
// check for sub-struct validity
if (fileInfo != null) {
fileInfo.validate();
}
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class WriteNotificationLogRequestStandardSchemeFactory implements SchemeFactory {
public WriteNotificationLogRequestStandardScheme getScheme() {
return new WriteNotificationLogRequestStandardScheme();
}
}
private static class WriteNotificationLogRequestStandardScheme extends StandardScheme<WriteNotificationLogRequest> {
public void read(org.apache.thrift.protocol.TProtocol iprot, WriteNotificationLogRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // TXN_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.txnId = iprot.readI64();
struct.setTxnIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // WRITE_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.writeId = iprot.readI64();
struct.setWriteIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // DB
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.db = iprot.readString();
struct.setDbIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // TABLE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.table = iprot.readString();
struct.setTableIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // FILE_INFO
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.fileInfo = new InsertEventRequestData();
struct.fileInfo.read(iprot);
struct.setFileInfoIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // PARTITION_VALS
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list812 = iprot.readListBegin();
struct.partitionVals = new ArrayList<String>(_list812.size);
String _elem813;
for (int _i814 = 0; _i814 < _list812.size; ++_i814)
{
_elem813 = iprot.readString();
struct.partitionVals.add(_elem813);
}
iprot.readListEnd();
}
struct.setPartitionValsIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, WriteNotificationLogRequest struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(TXN_ID_FIELD_DESC);
oprot.writeI64(struct.txnId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(WRITE_ID_FIELD_DESC);
oprot.writeI64(struct.writeId);
oprot.writeFieldEnd();
if (struct.db != null) {
oprot.writeFieldBegin(DB_FIELD_DESC);
oprot.writeString(struct.db);
oprot.writeFieldEnd();
}
if (struct.table != null) {
oprot.writeFieldBegin(TABLE_FIELD_DESC);
oprot.writeString(struct.table);
oprot.writeFieldEnd();
}
if (struct.fileInfo != null) {
oprot.writeFieldBegin(FILE_INFO_FIELD_DESC);
struct.fileInfo.write(oprot);
oprot.writeFieldEnd();
}
if (struct.partitionVals != null) {
if (struct.isSetPartitionVals()) {
oprot.writeFieldBegin(PARTITION_VALS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.partitionVals.size()));
for (String _iter815 : struct.partitionVals)
{
oprot.writeString(_iter815);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class WriteNotificationLogRequestTupleSchemeFactory implements SchemeFactory {
public WriteNotificationLogRequestTupleScheme getScheme() {
return new WriteNotificationLogRequestTupleScheme();
}
}
private static class WriteNotificationLogRequestTupleScheme extends TupleScheme<WriteNotificationLogRequest> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, WriteNotificationLogRequest struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeI64(struct.txnId);
oprot.writeI64(struct.writeId);
oprot.writeString(struct.db);
oprot.writeString(struct.table);
struct.fileInfo.write(oprot);
BitSet optionals = new BitSet();
if (struct.isSetPartitionVals()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetPartitionVals()) {
{
oprot.writeI32(struct.partitionVals.size());
for (String _iter816 : struct.partitionVals)
{
oprot.writeString(_iter816);
}
}
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, WriteNotificationLogRequest struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.txnId = iprot.readI64();
struct.setTxnIdIsSet(true);
struct.writeId = iprot.readI64();
struct.setWriteIdIsSet(true);
struct.db = iprot.readString();
struct.setDbIsSet(true);
struct.table = iprot.readString();
struct.setTableIsSet(true);
struct.fileInfo = new InsertEventRequestData();
struct.fileInfo.read(iprot);
struct.setFileInfoIsSet(true);
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
{
org.apache.thrift.protocol.TList _list817 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
struct.partitionVals = new ArrayList<String>(_list817.size);
String _elem818;
for (int _i819 = 0; _i819 < _list817.size; ++_i819)
{
_elem818 = iprot.readString();
struct.partitionVals.add(_elem818);
}
}
struct.setPartitionValsIsSet(true);
}
}
}
}
|
alanfgates/hive
|
standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/WriteNotificationLogRequest.java
|
Java
|
apache-2.0
| 30,759 |
/*
* Copyright (c) 2009, Rickard Öberg. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.qi4j.bootstrap;
/**
* Base class for assembly visitors. Subclass and override
* the particular methods you are interested in.
*/
public class AssemblyVisitorAdapter<ThrowableType extends Throwable>
implements AssemblyVisitor<ThrowableType>
{
public void visitApplication( ApplicationAssembly assembly )
throws ThrowableType
{
}
public void visitLayer( LayerAssembly assembly )
throws ThrowableType
{
}
public void visitModule( ModuleAssembly assembly )
throws ThrowableType
{
}
public void visitComposite( TransientDeclaration declaration )
throws ThrowableType
{
}
public void visitEntity( EntityDeclaration declaration )
throws ThrowableType
{
}
public void visitService( ServiceDeclaration declaration )
throws ThrowableType
{
}
public void visitImportedService( ImportedServiceDeclaration declaration )
throws ThrowableType
{
}
public void visitValue( ValueDeclaration declaration )
throws ThrowableType
{
}
public void visitObject( ObjectDeclaration declaration )
throws ThrowableType
{
}
}
|
Qi4j/qi4j-core
|
bootstrap/src/main/java/org/qi4j/bootstrap/AssemblyVisitorAdapter.java
|
Java
|
apache-2.0
| 1,820 |
package com.ctrip.framework.cs.enterprise;
import com.ctrip.framework.cs.configuration.ConfigurationManager;
import com.ctrip.framework.cs.configuration.InitConfigurationException;
import com.ctrip.framework.cs.util.HttpUtil;
import com.ctrip.framework.cs.util.PomUtil;
import com.google.gson.Gson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* Created by jiang.j on 2016/10/20.
*/
public class NexusEnMaven implements EnMaven {
class NexusPomInfo{
String groupId;
String artifactId;
String version;
}
class RepoDetail{
String repositoryURL;
String repositoryKind;
}
class SearchResult{
RepoDetail[] repoDetails;
NexusPomInfo[] data;
}
class ResourceResult{
ResourceInfo[] data;
}
class ResourceInfo{
String text;
}
Logger logger = LoggerFactory.getLogger(getClass());
private InputStream getContentByName(String[] av,String fileName) throws InitConfigurationException {
InputStream rtn = null;
String endsWith = ".pom";
if(av == null && fileName == null){
return null;
}else if(av==null) {
endsWith = "-sources.jar";
av = PomUtil.getArtifactIdAndVersion(fileName);
}
if(av == null){
return null;
}
String searchUrl = (ConfigurationManager.getConfigInstance().getString("vi.maven.repository.url") +
"/nexus/service/local/lucene/search?a=" + av[0] + "&v=" + av[1]);
if(av.length >2){
searchUrl += "&g="+av[2];
}
logger.debug(searchUrl);
try {
URL url = new URL(searchUrl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setConnectTimeout(200);
conn.setReadTimeout(500);
conn.setRequestMethod("GET");
conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
conn.setRequestProperty("Accept", "application/json");
try (Reader rd = new InputStreamReader(conn.getInputStream(), "UTF-8")) {
Gson gson = new Gson();
SearchResult results = gson.fromJson(rd, SearchResult.class);
if(results.repoDetails!=null && results.data !=null && results.repoDetails.length>0 && results.data.length>0){
NexusPomInfo pomInfo = results.data[0];
String repositoryUrl = null;
if(results.repoDetails.length>1){
for(RepoDetail repoDetail:results.repoDetails){
if("hosted".equalsIgnoreCase(repoDetail.repositoryKind)){
repositoryUrl = repoDetail.repositoryURL;
break;
}
}
}
if(repositoryUrl == null)
{
repositoryUrl = results.repoDetails[0].repositoryURL;
}
String pomUrl = repositoryUrl +"/content/"+pomInfo.groupId.replace(".","/")+"/"+pomInfo.artifactId+"/"
+pomInfo.version+"/";
if(fileName == null){
ResourceResult resourceResult = HttpUtil.doGet(new URL(pomUrl), ResourceResult.class);
for(ResourceInfo rinfo:resourceResult.data){
if(rinfo.text.endsWith(endsWith) && (
fileName == null || fileName.compareTo(rinfo.text)>0)){
fileName = rinfo.text;
}
}
pomUrl += fileName;
}else {
pomUrl += fileName + endsWith;
}
logger.debug(pomUrl);
HttpURLConnection pomConn = (HttpURLConnection) new URL(pomUrl).openConnection();
pomConn.setRequestMethod("GET");
rtn = pomConn.getInputStream();
}
}
}catch (Throwable e){
logger.warn("get pominfo by jar name["+av[0] + ' '+av[1]+"] failed",e);
}
return rtn;
}
@Override
public InputStream getPomInfoByFileName(String[] av, String fileName) {
try {
return getContentByName(av, fileName);
}catch (Throwable e){
logger.warn("getPomInfoByFileName failed!",e);
return null;
}
}
@Override
public InputStream getSourceJarByFileName(String fileName) {
try {
return getContentByName(null,fileName);
}catch (Throwable e){
logger.warn("getPomInfoByFileName failed!",e);
return null;
}
}
}
|
ctripcorp/cornerstone
|
cornerstone/src/main/java/com/ctrip/framework/cs/enterprise/NexusEnMaven.java
|
Java
|
apache-2.0
| 5,024 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.speech.v1.model;
/**
* Word-specific information for recognized words.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Speech-to-Text API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class WordInfo extends com.google.api.client.json.GenericJson {
/**
* The confidence estimate between 0.0 and 1.0. A higher number indicates an estimated greater
* likelihood that the recognized words are correct. This field is set only for the top
* alternative of a non-streaming result or, of a streaming result where `is_final=true`. This
* field is not guaranteed to be accurate and users should not rely on it to be always provided.
* The default of 0.0 is a sentinel value indicating `confidence` was not set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float confidence;
/**
* Time offset relative to the beginning of the audio, and corresponding to the end of the spoken
* word. This field is only set if `enable_word_time_offsets=true` and only in the top hypothesis.
* This is an experimental feature and the accuracy of the time offset can vary.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String endTime;
/**
* Output only. A distinct integer value is assigned for every speaker within the audio. This
* field specifies which one of those speakers was detected to have spoken this word. Value ranges
* from '1' to diarization_speaker_count. speaker_tag is set if enable_speaker_diarization =
* 'true' and only in the top alternative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer speakerTag;
/**
* Time offset relative to the beginning of the audio, and corresponding to the start of the
* spoken word. This field is only set if `enable_word_time_offsets=true` and only in the top
* hypothesis. This is an experimental feature and the accuracy of the time offset can vary.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String startTime;
/**
* The word corresponding to this set of information.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String word;
/**
* The confidence estimate between 0.0 and 1.0. A higher number indicates an estimated greater
* likelihood that the recognized words are correct. This field is set only for the top
* alternative of a non-streaming result or, of a streaming result where `is_final=true`. This
* field is not guaranteed to be accurate and users should not rely on it to be always provided.
* The default of 0.0 is a sentinel value indicating `confidence` was not set.
* @return value or {@code null} for none
*/
public java.lang.Float getConfidence() {
return confidence;
}
/**
* The confidence estimate between 0.0 and 1.0. A higher number indicates an estimated greater
* likelihood that the recognized words are correct. This field is set only for the top
* alternative of a non-streaming result or, of a streaming result where `is_final=true`. This
* field is not guaranteed to be accurate and users should not rely on it to be always provided.
* The default of 0.0 is a sentinel value indicating `confidence` was not set.
* @param confidence confidence or {@code null} for none
*/
public WordInfo setConfidence(java.lang.Float confidence) {
this.confidence = confidence;
return this;
}
/**
* Time offset relative to the beginning of the audio, and corresponding to the end of the spoken
* word. This field is only set if `enable_word_time_offsets=true` and only in the top hypothesis.
* This is an experimental feature and the accuracy of the time offset can vary.
* @return value or {@code null} for none
*/
public String getEndTime() {
return endTime;
}
/**
* Time offset relative to the beginning of the audio, and corresponding to the end of the spoken
* word. This field is only set if `enable_word_time_offsets=true` and only in the top hypothesis.
* This is an experimental feature and the accuracy of the time offset can vary.
* @param endTime endTime or {@code null} for none
*/
public WordInfo setEndTime(String endTime) {
this.endTime = endTime;
return this;
}
/**
* Output only. A distinct integer value is assigned for every speaker within the audio. This
* field specifies which one of those speakers was detected to have spoken this word. Value ranges
* from '1' to diarization_speaker_count. speaker_tag is set if enable_speaker_diarization =
* 'true' and only in the top alternative.
* @return value or {@code null} for none
*/
public java.lang.Integer getSpeakerTag() {
return speakerTag;
}
/**
* Output only. A distinct integer value is assigned for every speaker within the audio. This
* field specifies which one of those speakers was detected to have spoken this word. Value ranges
* from '1' to diarization_speaker_count. speaker_tag is set if enable_speaker_diarization =
* 'true' and only in the top alternative.
* @param speakerTag speakerTag or {@code null} for none
*/
public WordInfo setSpeakerTag(java.lang.Integer speakerTag) {
this.speakerTag = speakerTag;
return this;
}
/**
* Time offset relative to the beginning of the audio, and corresponding to the start of the
* spoken word. This field is only set if `enable_word_time_offsets=true` and only in the top
* hypothesis. This is an experimental feature and the accuracy of the time offset can vary.
* @return value or {@code null} for none
*/
public String getStartTime() {
return startTime;
}
/**
* Time offset relative to the beginning of the audio, and corresponding to the start of the
* spoken word. This field is only set if `enable_word_time_offsets=true` and only in the top
* hypothesis. This is an experimental feature and the accuracy of the time offset can vary.
* @param startTime startTime or {@code null} for none
*/
public WordInfo setStartTime(String startTime) {
this.startTime = startTime;
return this;
}
/**
* The word corresponding to this set of information.
* @return value or {@code null} for none
*/
public java.lang.String getWord() {
return word;
}
/**
* The word corresponding to this set of information.
* @param word word or {@code null} for none
*/
public WordInfo setWord(java.lang.String word) {
this.word = word;
return this;
}
@Override
public WordInfo set(String fieldName, Object value) {
return (WordInfo) super.set(fieldName, value);
}
@Override
public WordInfo clone() {
return (WordInfo) super.clone();
}
}
|
googleapis/google-api-java-client-services
|
clients/google-api-services-speech/v1/1.31.0/com/google/api/services/speech/v1/model/WordInfo.java
|
Java
|
apache-2.0
| 7,866 |
/*
* PartitioningOperators.java Feb 3 2014, 03:44
*
* Copyright 2014 Drunken Dev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.drunkendev.lambdas;
import com.drunkendev.lambdas.domain.DomainService;
import com.drunkendev.lambdas.domain.Order;
import com.drunkendev.lambdas.helper.IndexHolder;
import com.drunkendev.lambdas.helper.MutableBoolean;
import java.util.ArrayList;
import java.util.Arrays;
/**
*
* @author Brett Ryan
*/
public class PartitioningOperators {
private final DomainService ds;
/**
* Creates a new {@code PartitioningOperators} instance.
*/
public PartitioningOperators() {
this.ds = new DomainService();
}
public static void main(String[] args) {
PartitioningOperators po = new PartitioningOperators();
po.lambda20();
po.lambda21();
po.lambda22();
po.lambda23();
po.lambda24();
po.lambda25();
po.lambda26();
po.lambda27();
}
public void lambda20() {
System.out.println("\nFirst 3 numbers:");
int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0};
Arrays.stream(numbers)
.limit(3)
.forEach(System.out::println);
}
public void lambda21() {
System.out.println("\nFirst 3 orders in WA:");
ds.getCustomerList().stream()
.filter(c -> "WA".equalsIgnoreCase(c.getRegion()))
.flatMap(c -> c.getOrders().stream()
.map(n -> new CustOrder(c.getCustomerId(), n))
).limit(3)
.forEach(System.out::println);
}
public void lambda22() {
System.out.println("\nAll but first 4 numbers:");
int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0};
Arrays.stream(numbers)
.skip(4)
.forEach(System.out::println);
}
public void lambda23() {
System.out.println("\nAll but first 2 orders in WA:");
ds.getCustomerList().stream()
.filter(c -> "WA".equalsIgnoreCase(c.getRegion()))
.flatMap(c -> c.getOrders().stream()
.map(n -> new CustOrder(c.getCustomerId(), n))
).skip(2)
.forEach(System.out::println);
}
/**
* Unfortunately this method will not short circuit and will continue to
* iterate until the end of the stream. I need to figure out a better way to
* handle this.
*/
public void lambda24() {
System.out.println("\nFirst numbers less than 6:");
int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0};
MutableBoolean mb = new MutableBoolean(true);
Arrays.stream(numbers)
.collect(ArrayList<Integer>::new,
(output, v) -> {
if (mb.isTrue()) {
if (v < 6) {
output.add(v);
} else {
mb.flip();
}
}
},
(c1, c2) -> c1.addAll(c2))
.forEach(System.out::println);
}
public void lambda25() {
System.out.println("\nFirst numbers not less than their position:");
int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0};
IndexHolder i = new IndexHolder();
MutableBoolean mb = new MutableBoolean(true);
Arrays.stream(numbers)
.collect(ArrayList<Integer>::new,
(output, v) -> {
if (mb.isTrue()) {
if (v > i.postIncrement()) {
output.add(v);
} else {
mb.flip();
}
}
},
(c1, c2) -> c1.addAll(c2))
.forEach(System.out::println);
}
public void lambda26() {
System.out.println("\nAll elements starting from first element divisible by 3:");
int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0};
MutableBoolean mb = new MutableBoolean(false);
Arrays.stream(numbers)
.collect(ArrayList<Integer>::new,
(output, v) -> {
if (mb.isTrue()) {
output.add(v);
} else if (v % 3 == 0) {
output.add(v);
mb.flip();
}
},
(c1, c2) -> c1.addAll(c2))
.forEach(System.out::println);
}
public void lambda27() {
System.out.println("\nAll elements starting from first element less than its position:");
int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0};
IndexHolder i = new IndexHolder();
MutableBoolean mb = new MutableBoolean(false);
Arrays.stream(numbers)
.collect(ArrayList<Integer>::new,
(output, v) -> {
if (mb.isTrue()) {
output.add(v);
} else if (v < i.postIncrement()) {
output.add(v);
mb.flip();
}
},
(c1, c2) -> c1.addAll(c2)
)
.forEach(System.out::println);
}
private static class CustOrder {
private final String customerId;
private final Order order;
public CustOrder(String customerId, Order order) {
this.customerId = customerId;
this.order = order;
}
public String getCustomerId() {
return customerId;
}
public Order getOrder() {
return order;
}
@Override
public String toString() {
return String.format("CustOrder[customerId=%s,orderId=%d,orderDate=%s]",
customerId, order.getOrderId(), order.getOrderDate());
}
}
}
|
brettryan/jdk8-lambda-samples
|
src/main/java/com/drunkendev/lambdas/PartitioningOperators.java
|
Java
|
apache-2.0
| 6,846 |
package com.esri.mapred;
import com.esri.io.PointFeatureWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import java.io.IOException;
/**
*/
public class PointFeatureInputFormat
extends AbstractInputFormat<PointFeatureWritable>
{
private final class PointFeatureReader
extends AbstractFeatureReader<PointFeatureWritable>
{
private final PointFeatureWritable m_pointFeatureWritable = new PointFeatureWritable();
public PointFeatureReader(
final InputSplit inputSplit,
final JobConf jobConf) throws IOException
{
super(inputSplit, jobConf);
}
@Override
public PointFeatureWritable createValue()
{
return m_pointFeatureWritable;
}
@Override
protected void next() throws IOException
{
m_shpReader.queryPoint(m_pointFeatureWritable.point);
putAttributes(m_pointFeatureWritable.attributes);
}
}
@Override
public RecordReader<LongWritable, PointFeatureWritable> getRecordReader(
final InputSplit inputSplit,
final JobConf jobConf,
final Reporter reporter) throws IOException
{
return new PointFeatureReader(inputSplit, jobConf);
}
}
|
syntelos/shapefile-java
|
src/com/esri/mapred/PointFeatureInputFormat.java
|
Java
|
apache-2.0
| 1,483 |
/* Copyright 2018 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.swarm.sqlserver.migration.common;
import java.util.HashMap;
public enum SqlDataType {
VARCHAR(0),
NVARCHAR(1),
CHAR(2),
NCHAR(3),
TEXT(4),
NTEXT(5),
BIGINT(6),
INT(7),
TINYINT(8),
SMALLINT(9),
NUMERIC(10),
DECIMAL(11),
MONEY(12),
SMALLMONEY(13),
FLOAT(14),
REAL(15),
BIT(16),
DATE(17),
TIME(18),
DATETIME(19),
DATETIME2(20),
DATETIMEOFFSET(21),
SMALLDATETIME(22),
BINARY(23),
IMAGE(24),
VARBINARY(25),
UNIQUEIDENTIFIER(26),
TIMESTAMP(27);
private int codeValue;
private static HashMap<Integer, SqlDataType> codeValueMap = new HashMap<Integer, SqlDataType>();
private SqlDataType(int codeValue) {
this.codeValue = codeValue;
}
static {
for (SqlDataType type : SqlDataType.values()) {
codeValueMap.put(type.codeValue, type);
}
}
public static SqlDataType getInstanceFromCodeValue(int codeValue) {
return codeValueMap.get(codeValue);
}
public int getCodeValue() {
return codeValue;
}
}
|
GoogleCloudPlatform/dlp-rdb-bq-import
|
src/main/java/com/google/swarm/sqlserver/migration/common/SqlDataType.java
|
Java
|
apache-2.0
| 1,596 |
/*
* Copyright 2004-2013 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.mysql.dbflute.bsentity;
import java.util.List;
import java.util.ArrayList;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.AbstractEntity;
import org.dbflute.dbmeta.accessory.DomainEntity;
import org.docksidestage.mysql.dbflute.allcommon.DBMetaInstanceHandler;
import org.docksidestage.mysql.dbflute.exentity.*;
/**
* The entity of WHITE_IMPLICIT_REVERSE_FK_REF as TABLE. <br>
* <pre>
* [primary-key]
* WHITE_IMPLICIT_REVERSE_FK_REF_ID
*
* [column]
* WHITE_IMPLICIT_REVERSE_FK_REF_ID, WHITE_IMPLICIT_REVERSE_FK_ID, VALID_BEGIN_DATE, VALID_END_DATE
*
* [sequence]
*
*
* [identity]
* WHITE_IMPLICIT_REVERSE_FK_REF_ID
*
* [version-no]
*
*
* [foreign table]
*
*
* [referrer table]
*
*
* [foreign property]
*
*
* [referrer property]
*
*
* [get/set template]
* /= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* Integer whiteImplicitReverseFkRefId = entity.getWhiteImplicitReverseFkRefId();
* Integer whiteImplicitReverseFkId = entity.getWhiteImplicitReverseFkId();
* java.time.LocalDate validBeginDate = entity.getValidBeginDate();
* java.time.LocalDate validEndDate = entity.getValidEndDate();
* entity.setWhiteImplicitReverseFkRefId(whiteImplicitReverseFkRefId);
* entity.setWhiteImplicitReverseFkId(whiteImplicitReverseFkId);
* entity.setValidBeginDate(validBeginDate);
* entity.setValidEndDate(validEndDate);
* = = = = = = = = = =/
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class BsWhiteImplicitReverseFkRef extends AbstractEntity implements DomainEntity {
// ===================================================================================
// Definition
// ==========
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
// ===================================================================================
// Attribute
// =========
/** WHITE_IMPLICIT_REVERSE_FK_REF_ID: {PK, ID, NotNull, INT(10)} */
protected Integer _whiteImplicitReverseFkRefId;
/** WHITE_IMPLICIT_REVERSE_FK_ID: {UQ+, NotNull, INT(10)} */
protected Integer _whiteImplicitReverseFkId;
/** VALID_BEGIN_DATE: {+UQ, NotNull, DATE(10)} */
protected java.time.LocalDate _validBeginDate;
/** VALID_END_DATE: {NotNull, DATE(10)} */
protected java.time.LocalDate _validEndDate;
// ===================================================================================
// DB Meta
// =======
/** {@inheritDoc} */
public DBMeta asDBMeta() {
return DBMetaInstanceHandler.findDBMeta(asTableDbName());
}
/** {@inheritDoc} */
public String asTableDbName() {
return "white_implicit_reverse_fk_ref";
}
// ===================================================================================
// Key Handling
// ============
/** {@inheritDoc} */
public boolean hasPrimaryKeyValue() {
if (_whiteImplicitReverseFkRefId == null) { return false; }
return true;
}
/**
* To be unique by the unique column. <br>
* You can update the entity by the key when entity update (NOT batch update).
* @param whiteImplicitReverseFkId : UQ+, NotNull, INT(10). (NotNull)
* @param validBeginDate : +UQ, NotNull, DATE(10). (NotNull)
*/
public void uniqueBy(Integer whiteImplicitReverseFkId, java.time.LocalDate validBeginDate) {
__uniqueDrivenProperties.clear();
__uniqueDrivenProperties.addPropertyName("whiteImplicitReverseFkId");
__uniqueDrivenProperties.addPropertyName("validBeginDate");
setWhiteImplicitReverseFkId(whiteImplicitReverseFkId);setValidBeginDate(validBeginDate);
}
// ===================================================================================
// Foreign Property
// ================
// ===================================================================================
// Referrer Property
// =================
protected <ELEMENT> List<ELEMENT> newReferrerList() { // overriding to import
return new ArrayList<ELEMENT>();
}
// ===================================================================================
// Basic Override
// ==============
@Override
protected boolean doEquals(Object obj) {
if (obj instanceof BsWhiteImplicitReverseFkRef) {
BsWhiteImplicitReverseFkRef other = (BsWhiteImplicitReverseFkRef)obj;
if (!xSV(_whiteImplicitReverseFkRefId, other._whiteImplicitReverseFkRefId)) { return false; }
return true;
} else {
return false;
}
}
@Override
protected int doHashCode(int initial) {
int hs = initial;
hs = xCH(hs, asTableDbName());
hs = xCH(hs, _whiteImplicitReverseFkRefId);
return hs;
}
@Override
protected String doBuildStringWithRelation(String li) {
return "";
}
@Override
protected String doBuildColumnString(String dm) {
StringBuilder sb = new StringBuilder();
sb.append(dm).append(xfND(_whiteImplicitReverseFkRefId));
sb.append(dm).append(xfND(_whiteImplicitReverseFkId));
sb.append(dm).append(xfND(_validBeginDate));
sb.append(dm).append(xfND(_validEndDate));
if (sb.length() > dm.length()) {
sb.delete(0, dm.length());
}
sb.insert(0, "{").append("}");
return sb.toString();
}
@Override
protected String doBuildRelationString(String dm) {
return "";
}
@Override
public WhiteImplicitReverseFkRef clone() {
return (WhiteImplicitReverseFkRef)super.clone();
}
// ===================================================================================
// Accessor
// ========
/**
* [get] WHITE_IMPLICIT_REVERSE_FK_REF_ID: {PK, ID, NotNull, INT(10)} <br>
* @return The value of the column 'WHITE_IMPLICIT_REVERSE_FK_REF_ID'. (basically NotNull if selected: for the constraint)
*/
public Integer getWhiteImplicitReverseFkRefId() {
checkSpecifiedProperty("whiteImplicitReverseFkRefId");
return _whiteImplicitReverseFkRefId;
}
/**
* [set] WHITE_IMPLICIT_REVERSE_FK_REF_ID: {PK, ID, NotNull, INT(10)} <br>
* @param whiteImplicitReverseFkRefId The value of the column 'WHITE_IMPLICIT_REVERSE_FK_REF_ID'. (basically NotNull if update: for the constraint)
*/
public void setWhiteImplicitReverseFkRefId(Integer whiteImplicitReverseFkRefId) {
registerModifiedProperty("whiteImplicitReverseFkRefId");
_whiteImplicitReverseFkRefId = whiteImplicitReverseFkRefId;
}
/**
* [get] WHITE_IMPLICIT_REVERSE_FK_ID: {UQ+, NotNull, INT(10)} <br>
* @return The value of the column 'WHITE_IMPLICIT_REVERSE_FK_ID'. (basically NotNull if selected: for the constraint)
*/
public Integer getWhiteImplicitReverseFkId() {
checkSpecifiedProperty("whiteImplicitReverseFkId");
return _whiteImplicitReverseFkId;
}
/**
* [set] WHITE_IMPLICIT_REVERSE_FK_ID: {UQ+, NotNull, INT(10)} <br>
* @param whiteImplicitReverseFkId The value of the column 'WHITE_IMPLICIT_REVERSE_FK_ID'. (basically NotNull if update: for the constraint)
*/
public void setWhiteImplicitReverseFkId(Integer whiteImplicitReverseFkId) {
registerModifiedProperty("whiteImplicitReverseFkId");
_whiteImplicitReverseFkId = whiteImplicitReverseFkId;
}
/**
* [get] VALID_BEGIN_DATE: {+UQ, NotNull, DATE(10)} <br>
* @return The value of the column 'VALID_BEGIN_DATE'. (basically NotNull if selected: for the constraint)
*/
public java.time.LocalDate getValidBeginDate() {
checkSpecifiedProperty("validBeginDate");
return _validBeginDate;
}
/**
* [set] VALID_BEGIN_DATE: {+UQ, NotNull, DATE(10)} <br>
* @param validBeginDate The value of the column 'VALID_BEGIN_DATE'. (basically NotNull if update: for the constraint)
*/
public void setValidBeginDate(java.time.LocalDate validBeginDate) {
registerModifiedProperty("validBeginDate");
_validBeginDate = validBeginDate;
}
/**
* [get] VALID_END_DATE: {NotNull, DATE(10)} <br>
* @return The value of the column 'VALID_END_DATE'. (basically NotNull if selected: for the constraint)
*/
public java.time.LocalDate getValidEndDate() {
checkSpecifiedProperty("validEndDate");
return _validEndDate;
}
/**
* [set] VALID_END_DATE: {NotNull, DATE(10)} <br>
* @param validEndDate The value of the column 'VALID_END_DATE'. (basically NotNull if update: for the constraint)
*/
public void setValidEndDate(java.time.LocalDate validEndDate) {
registerModifiedProperty("validEndDate");
_validEndDate = validEndDate;
}
}
|
dbflute-test/dbflute-test-dbms-mysql
|
src/main/java/org/docksidestage/mysql/dbflute/bsentity/BsWhiteImplicitReverseFkRef.java
|
Java
|
apache-2.0
| 10,962 |
/**
* Copyright (c) 2016 Lemur Consulting Ltd.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.flax.biosolr.elasticsearch;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.threadpool.ThreadPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.co.flax.biosolr.elasticsearch.mapper.ontology.ElasticOntologyHelperFactory;
import uk.co.flax.biosolr.elasticsearch.mapper.ontology.OntologySettings;
import uk.co.flax.biosolr.ontology.core.OntologyHelper;
import uk.co.flax.biosolr.ontology.core.OntologyHelperException;
import java.io.Closeable;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Created by mlp on 09/02/16.
* @author mlp
*/
public class OntologyHelperBuilder implements Closeable {
private static final Logger LOGGER = LoggerFactory.getLogger(OntologyHelperBuilder.class);
private ThreadPool threadPool;
private static OntologyHelperBuilder instance;
private Map<String, OntologyHelper> helpers = new ConcurrentHashMap<>();
@Inject
public OntologyHelperBuilder(ThreadPool threadPool) {
this.threadPool = threadPool;
setInstance(this);
}
private static void setInstance(OntologyHelperBuilder odb) {
instance = odb;
}
public static OntologyHelperBuilder getInstance() {
return instance;
}
private OntologyHelper getHelper(OntologySettings settings) throws OntologyHelperException {
String helperKey = buildHelperKey(settings);
OntologyHelper helper = helpers.get(helperKey);
if (helper == null) {
helper = new ElasticOntologyHelperFactory(settings).buildOntologyHelper();
OntologyCheckRunnable checker = new OntologyCheckRunnable(helperKey, settings.getThreadCheckMs());
threadPool.scheduleWithFixedDelay(checker, TimeValue.timeValueMillis(settings.getThreadCheckMs()));
helpers.put(helperKey, helper);
helper.updateLastCallTime();
}
return helper;
}
public static OntologyHelper getOntologyHelper(OntologySettings settings) throws OntologyHelperException {
OntologyHelperBuilder builder = getInstance();
return builder.getHelper(settings);
}
@Override
public void close() {
// Explicitly dispose of any remaining helpers
for (Map.Entry<String, OntologyHelper> helperEntry : helpers.entrySet()) {
if (helperEntry.getValue() != null) {
LOGGER.info("Disposing of helper for {}", helperEntry.getKey());
helperEntry.getValue().dispose();
}
}
}
private static String buildHelperKey(OntologySettings settings) {
String key;
if (StringUtils.isNotBlank(settings.getOntologyUri())) {
key = settings.getOntologyUri();
} else {
if (StringUtils.isNotBlank(settings.getOlsOntology())) {
key = settings.getOlsBaseUrl() + "_" + settings.getOlsOntology();
} else {
key = settings.getOlsBaseUrl();
}
}
return key;
}
private final class OntologyCheckRunnable implements Runnable {
final String threadKey;
final long deleteCheckMs;
public OntologyCheckRunnable(String threadKey, long deleteCheckMs) {
this.threadKey = threadKey;
this.deleteCheckMs = deleteCheckMs;
}
@Override
public void run() {
OntologyHelper helper = helpers.get(threadKey);
if (helper != null) {
// Check if the last call time was longer ago than the maximum
if (System.currentTimeMillis() - deleteCheckMs > helper.getLastCallTime()) {
// Assume helper is out of use - dispose of it to allow memory to be freed
helper.dispose();
helpers.remove(threadKey);
}
}
}
}
}
|
flaxsearch/BioSolr
|
ontology/ontology-annotator/elasticsearch-ontology-annotator/es-ontology-annotator-es2.2/src/main/java/uk/co/flax/biosolr/elasticsearch/OntologyHelperBuilder.java
|
Java
|
apache-2.0
| 4,116 |
/*
* Copyright 2004-2013 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.mysql.dbflute.immuhama.bsbhv.loader;
import java.util.List;
import org.dbflute.bhv.*;
import org.docksidestage.mysql.dbflute.immuhama.exbhv.*;
import org.docksidestage.mysql.dbflute.immuhama.exentity.*;
/**
* The referrer loader of (会員ログイン情報)MEMBER_LOGIN as TABLE. <br>
* <pre>
* [primary key]
* MEMBER_LOGIN_ID
*
* [column]
* MEMBER_LOGIN_ID, MEMBER_ID, LOGIN_DATETIME, MOBILE_LOGIN_FLG, LOGIN_MEMBER_STATUS_CODE
*
* [sequence]
*
*
* [identity]
* MEMBER_LOGIN_ID
*
* [version-no]
*
*
* [foreign table]
* MEMBER_STATUS, MEMBER
*
* [referrer table]
*
*
* [foreign property]
* memberStatus, member
*
* [referrer property]
*
* </pre>
* @author DBFlute(AutoGenerator)
*/
public class ImmuLoaderOfMemberLogin {
// ===================================================================================
// Attribute
// =========
protected List<ImmuMemberLogin> _selectedList;
protected BehaviorSelector _selector;
protected ImmuMemberLoginBhv _myBhv; // lazy-loaded
// ===================================================================================
// Ready for Loading
// =================
public ImmuLoaderOfMemberLogin ready(List<ImmuMemberLogin> selectedList, BehaviorSelector selector)
{ _selectedList = selectedList; _selector = selector; return this; }
protected ImmuMemberLoginBhv myBhv()
{ if (_myBhv != null) { return _myBhv; } else { _myBhv = _selector.select(ImmuMemberLoginBhv.class); return _myBhv; } }
// ===================================================================================
// Pull out Foreign
// ================
protected ImmuLoaderOfMemberStatus _foreignMemberStatusLoader;
public ImmuLoaderOfMemberStatus pulloutMemberStatus() {
if (_foreignMemberStatusLoader == null)
{ _foreignMemberStatusLoader = new ImmuLoaderOfMemberStatus().ready(myBhv().pulloutMemberStatus(_selectedList), _selector); }
return _foreignMemberStatusLoader;
}
protected ImmuLoaderOfMember _foreignMemberLoader;
public ImmuLoaderOfMember pulloutMember() {
if (_foreignMemberLoader == null)
{ _foreignMemberLoader = new ImmuLoaderOfMember().ready(myBhv().pulloutMember(_selectedList), _selector); }
return _foreignMemberLoader;
}
// ===================================================================================
// Accessor
// ========
public List<ImmuMemberLogin> getSelectedList() { return _selectedList; }
public BehaviorSelector getSelector() { return _selector; }
}
|
dbflute-test/dbflute-test-dbms-mysql
|
src/main/java/org/docksidestage/mysql/dbflute/immuhama/bsbhv/loader/ImmuLoaderOfMemberLogin.java
|
Java
|
apache-2.0
| 3,938 |
/*
* Copyright © 2009 HotPads (admin@hotpads.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datarouter.webappinstance.storage.webappinstancelog;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import io.datarouter.model.databean.FieldlessIndexEntry;
import io.datarouter.scanner.Scanner;
import io.datarouter.storage.Datarouter;
import io.datarouter.storage.client.ClientId;
import io.datarouter.storage.dao.BaseDao;
import io.datarouter.storage.dao.BaseRedundantDaoParams;
import io.datarouter.storage.node.factory.IndexingNodeFactory;
import io.datarouter.storage.node.factory.NodeFactory;
import io.datarouter.storage.node.op.combo.IndexedSortedMapStorage.IndexedSortedMapStorageNode;
import io.datarouter.storage.node.op.index.IndexReader;
import io.datarouter.storage.tag.Tag;
import io.datarouter.util.tuple.Range;
import io.datarouter.virtualnode.redundant.RedundantIndexedSortedMapStorageNode;
import io.datarouter.webappinstance.storage.webappinstancelog.WebappInstanceLog.WebappInstanceLogFielder;
@Singleton
public class DatarouterWebappInstanceLogDao extends BaseDao{
public static class DatarouterWebappInstanceLogDaoParams extends BaseRedundantDaoParams{
public DatarouterWebappInstanceLogDaoParams(List<ClientId> clientIds){
super(clientIds);
}
}
private final IndexedSortedMapStorageNode<WebappInstanceLogKey,WebappInstanceLog,WebappInstanceLogFielder> node;
private final IndexReader<WebappInstanceLogKey,WebappInstanceLog,WebappInstanceLogByBuildInstantKey,
FieldlessIndexEntry<WebappInstanceLogByBuildInstantKey,WebappInstanceLogKey,WebappInstanceLog>>
byBuildInstant;
@Inject
public DatarouterWebappInstanceLogDao(
Datarouter datarouter,
NodeFactory nodeFactory,
IndexingNodeFactory indexingNodeFactory,
DatarouterWebappInstanceLogDaoParams params){
super(datarouter);
node = Scanner.of(params.clientIds)
.map(clientId -> {
IndexedSortedMapStorageNode<WebappInstanceLogKey,WebappInstanceLog,WebappInstanceLogFielder> node =
nodeFactory.create(clientId, WebappInstanceLog::new, WebappInstanceLogFielder::new)
.withTag(Tag.DATAROUTER)
.build();
return node;
})
.listTo(RedundantIndexedSortedMapStorageNode::makeIfMulti);
byBuildInstant = indexingNodeFactory.createKeyOnlyManagedIndex(WebappInstanceLogByBuildInstantKey::new, node)
.build();
datarouter.register(node);
}
public void put(WebappInstanceLog log){
node.put(log);
}
public Scanner<WebappInstanceLog> scan(){
return node.scan();
}
public Scanner<WebappInstanceLog> scanWithPrefix(WebappInstanceLogKey key){
return node.scanWithPrefix(key);
}
public Scanner<WebappInstanceLog> scanDatabeans(Range<WebappInstanceLogByBuildInstantKey> range){
return byBuildInstant.scanDatabeans(range);
}
}
|
hotpads/datarouter
|
datarouter-webapp-instance/src/main/java/io/datarouter/webappinstance/storage/webappinstancelog/DatarouterWebappInstanceLogDao.java
|
Java
|
apache-2.0
| 3,351 |
package nl.galesloot_ict.efjenergy.MeterReading;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.util.ArrayList;
/**
* Created by FlorisJan on 23-11-2014.
*/
public class MeterReadingsList extends ArrayList<MeterReading> {
@JsonCreator
public static MeterReadingsList Create(String jsonString) throws JsonParseException, JsonMappingException, IOException {
ObjectMapper mapper = new ObjectMapper();
MeterReadingsList meterReading = null;
meterReading = mapper.readValue(jsonString, MeterReadingsList.class);
return meterReading;
}
}
|
fjgalesloot/eFJenergy
|
Android/app/src/main/java/nl/galesloot_ict/efjenergy/MeterReading/MeterReadingsList.java
|
Java
|
apache-2.0
| 794 |
package com.walmart.labs.pcs.normalize.MongoDB.SpringBoot.service;
import com.walmart.labs.pcs.normalize.MongoDB.SpringBoot.entity.Person;
import com.walmart.labs.pcs.normalize.MongoDB.SpringBoot.repository.PersonRepository;
import com.walmart.labs.pcs.normalize.MongoDB.SpringBoot.repository.PersonRepositoryImp;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
/**
* Created by pzhong1 on 1/23/15.
*/
public class PersonService {
@Autowired
private PersonRepository personRepository;
public List<Person> getAllPersons(){
return personRepository.findAll();
}
public Person searchPerson(String id){
return personRepository.findOne(id);
}
public void insertPersonWithNameJohnAndRandomAge(Person person){
personRepository.save(person);
}
public void dropPersonCollection() {
personRepository.deleteAll();
}
}
|
ArthurZhong/SparkStormKafkaTest
|
src/main/java/com/walmart/labs/pcs/normalize/MongoDB/SpringBoot/service/PersonService.java
|
Java
|
apache-2.0
| 925 |
package edu.wsu.weather.agweathernet.helpers;
import java.io.Serializable;
public class StationModel implements Serializable {
private static final long serialVersionUID = 1L;
private String unitId;
private String name;
private String county;
private String city;
private String state;
private String installationDate;
private String distance;
private boolean isFavourite;
// stations details data
private String airTemp;
private String relHumid;
private String windSpeed;
private String precip;
public StationModel() {
}
public StationModel(String unitId, String name, String county,
String installationDate) {
this.setUnitId(unitId);
this.setName(name);
this.setCounty(county);
this.setInstallationDate(installationDate);
}
public String getUnitId() {
return unitId;
}
public void setUnitId(String unitId) {
this.unitId = unitId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCounty() {
return county;
}
public void setCounty(String county) {
this.county = county;
}
public String getInstallationDate() {
return installationDate;
}
public void setInstallationDate(String installationDate) {
this.installationDate = installationDate;
}
@Override
public String toString() {
return this.name + " " + this.county;
}
public boolean isFavourite() {
return isFavourite;
}
public void setFavourite(boolean isFavourite) {
this.isFavourite = isFavourite;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getDistance() {
return distance;
}
public void setDistance(String distance) {
this.distance = distance;
}
public String getAirTemp() {
return airTemp;
}
public void setAirTemp(String airTemp) {
this.airTemp = airTemp;
}
public String getWindSpeed() {
return windSpeed;
}
public void setWindSpeed(String windSpeed) {
this.windSpeed = windSpeed;
}
public String getPrecip() {
return precip;
}
public void setPrecip(String precip) {
this.precip = precip;
}
public String getRelHumid() {
return relHumid;
}
public void setRelHumid(String relHumid) {
this.relHumid = relHumid;
}
}
|
levanlevi/AgWeatherNet
|
src/edu/wsu/weather/agweathernet/helpers/StationModel.java
|
Java
|
apache-2.0
| 2,366 |
package ca.uhn.fhir.jpa.term;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2016 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class VersionIndependentConcept {
private String mySystem;
private String myCode;
public VersionIndependentConcept(String theSystem, String theCode) {
setSystem(theSystem);
setCode(theCode);
}
public String getSystem() {
return mySystem;
}
public void setSystem(String theSystem) {
mySystem = theSystem;
}
public String getCode() {
return myCode;
}
public void setCode(String theCode) {
myCode = theCode;
}
}
|
Gaduo/hapi-fhir
|
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/VersionIndependentConcept.java
|
Java
|
apache-2.0
| 1,149 |
/**
* Copyright 2015-2017 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin.benchmarks;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import zipkin.Annotation;
import zipkin.BinaryAnnotation;
import zipkin.Constants;
import zipkin.Endpoint;
import zipkin.TraceKeys;
import zipkin2.Span;
import zipkin.internal.V2SpanConverter;
import zipkin.internal.Util;
@Measurement(iterations = 5, time = 1)
@Warmup(iterations = 10, time = 1)
@Fork(3)
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@State(Scope.Thread)
@Threads(1)
public class Span2ConverterBenchmarks {
Endpoint frontend = Endpoint.create("frontend", 127 << 24 | 1);
Endpoint backend = Endpoint.builder()
.serviceName("backend")
.ipv4(192 << 24 | 168 << 16 | 99 << 8 | 101)
.port(9000)
.build();
zipkin.Span shared = zipkin.Span.builder()
.traceIdHigh(Util.lowerHexToUnsignedLong("7180c278b62e8f6a"))
.traceId(Util.lowerHexToUnsignedLong("216a2aea45d08fc9"))
.parentId(Util.lowerHexToUnsignedLong("6b221d5bc9e6496c"))
.id(Util.lowerHexToUnsignedLong("5b4185666d50f68b"))
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(Annotation.create(1472470996199000L, Constants.CLIENT_SEND, frontend))
.addAnnotation(Annotation.create(1472470996238000L, Constants.WIRE_SEND, frontend))
.addAnnotation(Annotation.create(1472470996250000L, Constants.SERVER_RECV, backend))
.addAnnotation(Annotation.create(1472470996350000L, Constants.SERVER_SEND, backend))
.addAnnotation(Annotation.create(1472470996403000L, Constants.WIRE_RECV, frontend))
.addAnnotation(Annotation.create(1472470996406000L, Constants.CLIENT_RECV, frontend))
.addBinaryAnnotation(BinaryAnnotation.create(TraceKeys.HTTP_PATH, "/api", frontend))
.addBinaryAnnotation(BinaryAnnotation.create(TraceKeys.HTTP_PATH, "/backend", backend))
.addBinaryAnnotation(BinaryAnnotation.create("clnt/finagle.version", "6.45.0", frontend))
.addBinaryAnnotation(BinaryAnnotation.create("srv/finagle.version", "6.44.0", backend))
.addBinaryAnnotation(BinaryAnnotation.address(Constants.CLIENT_ADDR, frontend))
.addBinaryAnnotation(BinaryAnnotation.address(Constants.SERVER_ADDR, backend))
.build();
zipkin.Span server = zipkin.Span.builder()
.traceIdHigh(Util.lowerHexToUnsignedLong("7180c278b62e8f6a"))
.traceId(Util.lowerHexToUnsignedLong("216a2aea45d08fc9"))
.parentId(Util.lowerHexToUnsignedLong("6b221d5bc9e6496c"))
.id(Util.lowerHexToUnsignedLong("5b4185666d50f68b"))
.name("get")
.addAnnotation(Annotation.create(1472470996250000L, Constants.SERVER_RECV, backend))
.addAnnotation(Annotation.create(1472470996350000L, Constants.SERVER_SEND, backend))
.addBinaryAnnotation(BinaryAnnotation.create(TraceKeys.HTTP_PATH, "/backend", backend))
.addBinaryAnnotation(BinaryAnnotation.create("srv/finagle.version", "6.44.0", backend))
.addBinaryAnnotation(BinaryAnnotation.address(Constants.CLIENT_ADDR, frontend))
.build();
Span server2 = Span.newBuilder()
.traceId("7180c278b62e8f6a216a2aea45d08fc9")
.parentId("6b221d5bc9e6496c")
.id("5b4185666d50f68b")
.name("get")
.kind(Span.Kind.SERVER)
.shared(true)
.localEndpoint(backend.toV2())
.remoteEndpoint(frontend.toV2())
.timestamp(1472470996250000L)
.duration(100000L)
.putTag(TraceKeys.HTTP_PATH, "/backend")
.putTag("srv/finagle.version", "6.44.0")
.build();
@Benchmark public List<Span> fromSpan_splitShared() {
return V2SpanConverter.fromSpan(shared);
}
@Benchmark public List<Span> fromSpan() {
return V2SpanConverter.fromSpan(server);
}
@Benchmark public zipkin.Span toSpan() {
return V2SpanConverter.toSpan(server2);
}
// Convenience main entry-point
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(".*" + Span2ConverterBenchmarks.class.getSimpleName() + ".*")
.build();
new Runner(opt).run();
}
}
|
soundcloud/zipkin
|
benchmarks/src/main/java/zipkin/benchmarks/Span2ConverterBenchmarks.java
|
Java
|
apache-2.0
| 5,211 |
/*
* Copyright 2015 Adobe Systems Incorporated
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ${package}.core.servlets;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.servlets.HttpConstants;
import org.apache.sling.api.servlets.SlingAllMethodsServlet;
import org.apache.sling.api.servlets.SlingSafeMethodsServlet;
import org.apache.sling.api.resource.ValueMap;
import org.osgi.framework.Constants;
import org.osgi.service.component.annotations.Component;
import javax.servlet.Servlet;
import javax.servlet.ServletException;
import java.io.IOException;
/**
* Servlet that writes some sample content into the response. It is mounted for
* all resources of a specific Sling resource type. The
* {@link SlingSafeMethodsServlet} shall be used for HTTP methods that are
* idempotent. For write operations use the {@link SlingAllMethodsServlet}.
*/
@Component(service=Servlet.class,
property={
Constants.SERVICE_DESCRIPTION + "=Simple Demo Servlet",
"sling.servlet.methods=" + HttpConstants.METHOD_GET,
"sling.servlet.resourceTypes="+ "${appsFolderName}/components/structure/page",
"sling.servlet.extensions=" + "txt"
})
public class SimpleServlet extends SlingSafeMethodsServlet {
private static final long serialVersionUid = 1L;
@Override
protected void doGet(final SlingHttpServletRequest req,
final SlingHttpServletResponse resp) throws ServletException, IOException {
final Resource resource = req.getResource();
resp.setContentType("text/plain");
resp.getWriter().write("Title = " + resource.adaptTo(ValueMap.class).get("jcr:title"));
}
}
|
MyAccInt/aem-project-archetype
|
src/main/archetype/core/src/main/java/core/servlets/SimpleServlet.java
|
Java
|
apache-2.0
| 2,367 |
package com.comp.ninti.sportsmanager;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import android.widget.ListView;
import com.comp.ninti.adapter.LeaderBoardAdapter;
import com.comp.ninti.database.DbHandler;
import com.comp.ninti.general.core.Event;
public class LeaderBoard extends AppCompatActivity {
private Event event;
private DbHandler dbHandler;
private LeaderBoardAdapter leaderBoardAdapter;
private ListView listView;
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
super.onBackPressed();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
event = getIntent().getExtras().getParcelable("com.comp.ninti.general.core.Event");
Intent intent = new Intent();
intent.putExtra("com.comp.ninti.general.core.Event", event);
setResult(RESULT_CANCELED, intent);
setContentView(R.layout.activity_leader_board);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
listView = (ListView) findViewById(R.id.lvLeaderBoard);
listView.setTextFilterEnabled(true);
displayItems();
}
private void displayItems() {
dbHandler = new DbHandler(LeaderBoard.this, "", null, 1);
new Handler().post(new Runnable() {
@Override
public void run() {
leaderBoardAdapter = new LeaderBoardAdapter(
LeaderBoard.this,
dbHandler.getLeaderBoard(event.getId()),
0);
listView.setAdapter(leaderBoardAdapter);
}
});
dbHandler.close();
}
@Override
protected void onResume() {
super.onResume();
displayItems();
}
}
|
Nintinugga/SportsManager
|
app/src/main/java/com/comp/ninti/sportsmanager/LeaderBoard.java
|
Java
|
apache-2.0
| 2,193 |
/**
* www.bplow.com
*/
package com.bplow.netconn.systemmng.domain;
/**
* @desc 角色
* @author wangxiaolei
* @date 2016年5月8日 下午4:30:39
*/
public class RoleDomain {
private String roleId;
private String userId;
private String roleName;
private String roleDesc;
public String getRoleId() {
return roleId;
}
public void setRoleId(String roleId) {
this.roleId = roleId;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getRoleName() {
return roleName;
}
public void setRoleName(String roleName) {
this.roleName = roleName;
}
public String getRoleDesc() {
return roleDesc;
}
public void setRoleDesc(String roleDesc) {
this.roleDesc = roleDesc;
}
}
|
ahwxl/ads
|
ads/src/main/java/com/bplow/netconn/systemmng/domain/RoleDomain.java
|
Java
|
apache-2.0
| 824 |
package it.breex.bus.impl.jms;
import it.breex.bus.event.AbstractResponseEvent;
import it.breex.bus.event.EventData;
import it.breex.bus.event.EventHandler;
import it.breex.bus.event.RequestEvent;
import it.breex.bus.impl.AbstractEventManager;
import java.util.UUID;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.ObjectMessage;
import javax.jms.Queue;
import javax.jms.Session;
public class JmsEventManager extends AbstractEventManager {
private final static String DEFAULT_REQUEST_QUEUE = "breexDefaulRequestQueue";
private final String nodeId = UUID.randomUUID().toString();
private final boolean transacted = false;
private final int acknowledgeMode = Session.AUTO_ACKNOWLEDGE;
private final Connection jmsConnection;
private final Session session;
private final Queue requestQueue;
private final MessageProducer requestMessageProducer;
private final Queue responseQueue;
private final MessageProducer responseMessageProducer;
public JmsEventManager(ConnectionFactory jmsConnectionFactory) {
try {
jmsConnection = jmsConnectionFactory.createConnection();
jmsConnection.start();
session = jmsConnection.createSession(transacted, acknowledgeMode);
requestQueue = session.createQueue(DEFAULT_REQUEST_QUEUE);
requestMessageProducer = session.createProducer(requestQueue);
responseQueue = session.createTemporaryQueue();
responseMessageProducer = session.createProducer(null);
session.createConsumer(responseQueue).setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
try {
EventData<?> eventData = (EventData<?>) ((ObjectMessage) message).getObject();
getLogger().debug("Event Response received. Event name: [{}], sender id: [{}]", eventData.getName(),
eventData.getSenderId());
//logger.debug("Event Response received. Event name: [{}], sender id: [{}]", eventData.eventId.eventName, eventData.eventId.nodeId);
AbstractResponseEvent responseEvent = new AbstractResponseEvent(eventData) {
};
processResponse(responseEvent, getResponseHandlers().remove(eventData.getId()));
} catch (JMSException e) {
new RuntimeException(e);
}
}
});
} catch (JMSException e) {
throw new RuntimeException(e);
}
}
@Override
public String getLocalNodeId() {
return nodeId;
}
@Override
protected <I, O> void prepareResponse(EventData<I> requestEventData, EventData<O> responseEventData) {
try {
Message responseMessage = session.createObjectMessage(responseEventData);
responseMessageProducer.send((Destination) requestEventData.getTransportData(), responseMessage);
} catch (JMSException e) {
new RuntimeException(e);
}
}
@Override
protected <I, O> void registerCallback(String eventName, EventHandler<RequestEvent<I, O>> eventHandler) {
getLogger().debug("Registering event. Event name: [{}]", eventName);
MessageConsumer eventConsumer;
try {
eventConsumer = session.createConsumer(requestQueue, "JMSCorrelationID='" + eventName + "'");
eventConsumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
EventData<I> requestEventData;
try {
requestEventData = (EventData<I>) ((ObjectMessage) message).getObject();
getLogger().debug("Received event. Event name: [{}] CorrelationID: [{}]", requestEventData.getName(),
message.getJMSCorrelationID());
processRequest(requestEventData);
} catch (JMSException e) {
new RuntimeException(e);
}
}
});
} catch (JMSException e) {
new RuntimeException(e);
}
}
@Override
protected <I> void prepareRequest(EventData<I> eventData) {
try {
eventData.setTransportData(responseQueue);
ObjectMessage message = session.createObjectMessage(eventData);
message.setJMSCorrelationID(eventData.getName());
message.setJMSReplyTo(responseQueue);
requestMessageProducer.send(message);
} catch (JMSException e) {
new RuntimeException(e);
}
}
}
|
breex-it/breex-bus
|
breex-bus-jms/src/main/java/it/breex/bus/impl/jms/JmsEventManager.java
|
Java
|
apache-2.0
| 4,233 |
/**
* Copyright 2013 Oak Ridge National Laboratory
* Author: James Horey <horeyjl@ornl.gov>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package gov.ornl.paja.storage;
/**
* Java libs.
**/
import java.util.Iterator;
import java.nio.ByteBuffer;
/**
* A log message is the thing that gets written to the log.
*/
public class LogMessage {
private int logNum; // Current log ID.
private byte[] id; // ID of the log message.
private byte[] msg; // Actual log message
/**
* @param logNum Each log message has a unique log number
* @param id Application defined identification label
* @param msg Actual log message
*/
public LogMessage(int logNum, byte[] id, byte[] msg) {
this.logNum = logNum;
this.id = id;
this.msg = msg;
}
/**
* Get/set the log message ID.
*/
public void setID(byte[] id) {
this.id = id;
}
public byte[] getID() {
return id;
}
/**
* Get/set the log message.
*/
public void setMsg(byte[] msg) {
this.msg = msg;
}
public byte[] getMsg() {
return msg;
}
/**
* Get/set the log message num.
*/
public void setNum(int i) {
logNum = i;
}
public int getNum() {
return logNum;
}
}
|
jhorey/Paja
|
src/gov/ornl/paja/storage/LogMessage.java
|
Java
|
apache-2.0
| 1,764 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package michid.jsonjerk;
import michid.jsonjerk.JsonValue.JsonArray;
import michid.jsonjerk.JsonValue.JsonAtom;
import michid.jsonjerk.JsonValue.JsonObject;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Utility class for parsing JSON objects and arrays into {@link JsonObject}s
* and {@link JsonArray}s, respectively. In contrast to {@link FullJsonParser},
* this implementation resolves nested structures lazily. That, is it does a
* level order traverse of the JSON tree.
* <p/>
* The parser looks for 'hints' in the JSON text to speed up parsing: when it
* encounters an integer value with the key ":size" in an object, that value
* is used for the size of the entire object (including sub-objects).
*
* @see FullJsonParser
*/
public final class LevelOrderJsonParser {
private LevelOrderJsonParser() { }
/**
* Parse a JSON object from {@code tokenizer}
* @param tokenizer
* @return a {@code JsonObject}
* @throws ParseException
*/
public static JsonObject parseObject(JsonTokenizer tokenizer) {
ObjectHandler objectHandler = new ObjectHandler();
new JsonParser(objectHandler).parseObject(tokenizer);
return objectHandler.getObject();
}
/**
* Parse a JSON array from {@code tokenizer}
* @param tokenizer
* @return a {@code JsonArray}
* @throws ParseException
*/
public static JsonArray parseArray(JsonTokenizer tokenizer) {
ArrayHandler arrayHandler = new ArrayHandler();
new JsonParser(arrayHandler).parseArray(tokenizer);
return arrayHandler.getArray();
}
/**
* This implementation of a {@code JsonHandler} builds up a {@code JsonObject}
* from its constituents. Nested objects are not fully parsed though, but a
* reference to the parser is kept which is only invoked when that nested object
* is actually accessed.
*/
public static class ObjectHandler extends JsonHandler {
private final JsonObject object = new JsonObject(new LinkedHashMap<String, JsonValue>());
@Override
public void atom(Token key, Token value) {
object.put(key.text(), new JsonAtom(value));
}
@Override
public void object(JsonParser parser, Token key, JsonTokenizer tokenizer) {
object.put(key.text(), new DeferredObjectValue(tokenizer.copy()));
tokenizer.setPos(getNextPairPos(tokenizer.copy()));
}
@Override
public void array(JsonParser parser, Token key, JsonTokenizer tokenizer) {
object.put(key.text(), parseArray(tokenizer));
}
public JsonObject getObject() {
return object;
}
}
/**
* This implementation of a {@code JsonHandler} builds up a {@code JsonArray}
* from its constituents. Nested objects are not fully parsed though, but a
* reference to the parser is kept which is only invoked when that nested object
* is actually accessed.
*/
public static class ArrayHandler extends JsonHandler {
private final JsonArray array = new JsonArray(new ArrayList<JsonValue>());
@Override
public void atom(Token key, Token value) {
array.add(new JsonAtom(value));
}
@Override
public void object(JsonParser parser, Token key, JsonTokenizer tokenizer) {
array.add(new DeferredObjectValue(tokenizer.copy()));
tokenizer.setPos(getNextPairPos(tokenizer.copy()));
}
@Override
public void array(JsonParser parser, Token key, JsonTokenizer tokenizer) {
array.add(parseArray(tokenizer));
}
public JsonArray getArray() {
return array;
}
}
//------------------------------------------< private >---
private static class BreakException extends RuntimeException{
private static final BreakException BREAK = new BreakException();
}
private static int getNextPairPos(JsonTokenizer tokenizer) {
SkipObjectHandler skipObjectHandler = new SkipObjectHandler(tokenizer.pos());
try {
new JsonParser(skipObjectHandler).parseObject(tokenizer);
}
catch (BreakException e) {
return skipObjectHandler.newPos;
}
return tokenizer.pos();
}
private static class DeferredObjectValue extends JsonObject {
private final JsonTokenizer tokenizer;
public DeferredObjectValue(JsonTokenizer tokenizer) {
super(null);
this.tokenizer = tokenizer;
}
@Override
public void put(String key, JsonValue value) {
throw new IllegalStateException("Cannot add value");
}
@Override
public JsonValue get(String key) {
return value().get(key);
}
@Override
public Map<String, JsonValue> value() {
return parseObject(tokenizer.copy()).value();
}
@Override
public String toString() {
return "<deferred>";
}
}
private static class SkipObjectHandler extends JsonHandler {
private final int startPos;
private int newPos;
public SkipObjectHandler(int startPos) {
this.startPos = startPos;
}
@Override
public void atom(Token key, Token value) {
if (key != null && ":size".equals(key.text()) && Token.Type.NUMBER == value.type()) {
newPos = startPos + Integer.parseInt(value.text());
throw BreakException.BREAK;
}
}
}
}
|
mduerig/json-jerk
|
src/main/java/michid/jsonjerk/LevelOrderJsonParser.java
|
Java
|
apache-2.0
| 6,473 |
/**
* For JavaDocs.
*@author dgagarsky
*@since 01.12.2016
*/
package ru.job4j;
|
degauhta/dgagarsky
|
chapter_001/src/test/java/ru/job4j/package-info.java
|
Java
|
apache-2.0
| 81 |
/*
* Copyright 2005-2007 Maarten Billemont
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lyndir.lhunath.opal.gui;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.*;
/**
* <i>{@link ListenerAction} - [in short] (TODO).</i><br> <br> [description / usage].<br> <br>
*
* @author lhunath
*/
public class ListenerAction extends AbstractAction {
private final ActionListener listener;
/**
* Create a new {@link ListenerAction} instance.
*
* @param listener The listener that will be notified of this action.
*/
public ListenerAction(final ActionListener listener) {
this.listener = listener;
}
/**
* Create a new {@link ListenerAction} instance.
*
* @param name The name of the action.
* @param listener The listener that will be notified of this action.
*/
public ListenerAction(final String name, final ActionListener listener) {
super( name );
this.listener = listener;
}
/**
* Create a new {@link ListenerAction} instance.
*
* @param name The name of the action.
* @param command The string that will identify the action that must be taken.
* @param icon The icon of the action.
* @param listener The listener that will be notified of this action.
*/
public ListenerAction(final String name, final String command, final Icon icon, final ActionListener listener) {
super( name, icon );
this.listener = listener;
setActionCommand( command );
}
/**
* Specify an action command string for this action.
*
* @param command The string that will identify the action that must be taken.
*/
public void setActionCommand(final String command) {
putValue( ACTION_COMMAND_KEY, command );
}
/**
* Specify an action command string for this action.
*
* @return The string that will identify the action that must be taken.
*/
public String getActionCommand() {
return getValue( ACTION_COMMAND_KEY ) == null? null: getValue( ACTION_COMMAND_KEY ).toString();
}
/**
* {@inheritDoc}
*/
@Override
public void actionPerformed(final ActionEvent e) {
if (listener != null)
listener.actionPerformed( e );
}
}
|
Lyndir/Opal
|
discontinued/opal-geo/src/main/java/com/lyndir/lhunath/opal/gui/ListenerAction.java
|
Java
|
apache-2.0
| 2,900 |
/*
* Copyright 2008-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jpa.repository.config;
import static org.junit.Assert.*;
import org.springframework.test.context.ContextConfiguration;
/**
* Integration test to test {@link org.springframework.core.type.filter.TypeFilter} integration into namespace.
*
* @author Oliver Gierke
*/
@ContextConfiguration(locations = "classpath:config/namespace-autoconfig-typefilter-context.xml")
public class TypeFilterConfigTests extends AbstractRepositoryConfigTests {
/*
* (non-Javadoc)
*
* @see
* org.springframework.data.jpa.repository.config.AbstractRepositoryConfigTests
* #testContextCreation()
*/
@Override
public void testContextCreation() {
assertNotNull(userRepository);
assertNotNull(roleRepository);
assertNull(auditableUserRepository);
}
}
|
sdw2330976/Research-spring-data-jpa
|
spring-data-jpa-1.7.1.RELEASE/src/test/java/org/springframework/data/jpa/repository/config/TypeFilterConfigTests.java
|
Java
|
apache-2.0
| 1,407 |
package org.drools.rule;
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This exception is thrown when an invalid package (ie one that has errors)
* it attempted to be added to a RuleBase.
* The package and builder should be interrogated to show the specific errors.
*
* @author Michael Neale
*/
public class InvalidRulePackage extends RuntimeException {
private static final long serialVersionUID = 400L;
public InvalidRulePackage(final String summary) {
super( summary );
}
}
|
bobmcwhirter/drools
|
drools-core/src/main/java/org/drools/rule/InvalidRulePackage.java
|
Java
|
apache-2.0
| 1,064 |
package com.twu.biblioteca.service.impl;
import com.twu.biblioteca.mapper.BookListMapper;
import com.twu.biblioteca.mapper.MyBatisUtil;
import com.twu.biblioteca.model.Book;
import com.twu.biblioteca.service.BookListService;
import org.apache.ibatis.session.SqlSession;
import java.util.ArrayList;
public class BookListServiceImpl implements BookListService {
private SqlSession sqlSession;
private BookListMapper bookListMapper;
public BookListServiceImpl() {
this.sqlSession = MyBatisUtil.getSqlSessionFactory().openSession();
this.bookListMapper = sqlSession.getMapper(BookListMapper.class);
}
public BookListServiceImpl(BookListMapper bookListMapper) {
this.bookListMapper = bookListMapper;
}
@Override
public ArrayList<Book> getBookList() {
return bookListMapper.getBookList();
}
}
|
niuwanlu/twu-biblioteca-niuwanlu-tdd
|
src/main/java/com/twu/biblioteca/service/impl/BookListServiceImpl.java
|
Java
|
apache-2.0
| 863 |
/*
* Copyright 2015 Thomas Hoffmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.j4velin.wifiAutoOff;
import android.app.IntentService;
import android.content.Intent;
import com.google.android.gms.location.FusedLocationProviderApi;
import com.google.android.gms.location.Geofence;
import com.google.android.gms.location.GeofencingEvent;
import com.google.android.gms.maps.model.LatLng;
public class GeoFenceService extends IntentService {
public GeoFenceService() {
super("WiFiAutomaticGeoFenceService");
}
@Override
protected void onHandleIntent(final Intent intent) {
if (intent == null) return;
if (intent.hasExtra(FusedLocationProviderApi.KEY_LOCATION_CHANGED)) {
android.location.Location loc = (android.location.Location) intent.getExtras()
.get(FusedLocationProviderApi.KEY_LOCATION_CHANGED);
if (BuildConfig.DEBUG) Logger.log("Location update received " + loc);
Database db = Database.getInstance(this);
if (db.inRangeOfLocation(loc)) {
sendBroadcast(new Intent(this, Receiver.class)
.setAction(Receiver.LOCATION_ENTERED_ACTION));
}
db.close();
} else {
GeofencingEvent geofencingEvent = GeofencingEvent.fromIntent(intent);
// First check for errors
if (geofencingEvent.hasError()) {
// Get the error code with a static method
// Log the error
if (BuildConfig.DEBUG) Logger.log("Location Services error: " +
Integer.toString(geofencingEvent.getErrorCode()));
} else {
// Test that a valid transition was reported
if (geofencingEvent.getGeofenceTransition() == Geofence.GEOFENCE_TRANSITION_ENTER) {
Database db = Database.getInstance(this);
for (Geofence gf : geofencingEvent.getTriggeringGeofences()) {
if (BuildConfig.DEBUG) Logger.log("geofence entered: " + gf.getRequestId());
String[] data = gf.getRequestId().split("@");
LatLng ll = new LatLng(Double.parseDouble(data[0]),
Double.parseDouble(data[1]));
String name = db.getNameForLocation(ll);
if (name != null) {
sendBroadcast(new Intent(this, Receiver.class)
.setAction(Receiver.LOCATION_ENTERED_ACTION)
.putExtra(Receiver.EXTRA_LOCATION_NAME, name));
break;
}
}
db.close();
}
}
}
}
}
|
j4velin/WiFi-Automatic
|
src/play/java/de/j4velin/wifiAutoOff/GeoFenceService.java
|
Java
|
apache-2.0
| 3,340 |
/*
* Copyright 2015-2020 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.analysis.individual.qc;
import org.junit.Test;
import org.opencb.biodata.models.clinical.qc.MendelianErrorReport;
import org.opencb.biodata.models.clinical.qc.RelatednessReport;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.biodata.models.variant.avro.IssueEntry;
import org.opencb.biodata.models.variant.avro.IssueType;
import org.opencb.opencga.analysis.family.qc.IBDComputation;
import org.opencb.opencga.core.common.JacksonUtils;
import org.opencb.opencga.core.exceptions.ToolException;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Paths;
import java.util.*;
import static org.opencb.opencga.storage.core.variant.VariantStorageBaseTest.getResourceUri;
public class IndividualQcUtilsTest {
@Test
public void buildRelatednessReport() throws ToolException, IOException {
URI resourceUri = getResourceUri("ibd.genome");
File file = Paths.get(resourceUri.getPath()).toFile();
List<RelatednessReport.RelatednessScore> relatednessReport = IBDComputation.parseRelatednessScores(file);
System.out.println(JacksonUtils.getDefaultNonNullObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(relatednessReport));
}
@Test
public void parseMendelianError() throws IOException {
URI resourceUri = getResourceUri("mendelian.error.variants.json");
File file = Paths.get(resourceUri.getPath()).toFile();
List<Variant> variants = Arrays.asList(JacksonUtils.getDefaultNonNullObjectMapper().readValue(file, Variant[].class));
System.out.println(variants.size());
MendelianErrorReport mendelianErrorReport = buildMendelianErrorReport(variants.iterator(), variants.size());
System.out.println(JacksonUtils.getDefaultNonNullObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(mendelianErrorReport));
// List<Variant> variants = JacksonUtils.getDefaultNonNullObjectMapper().readerFor(Variant.class).readValue(path.toFile());
// System.out.println(variants.size());
}
@Test
public void parseKaryotypicSexThresholds() throws IOException {
URI resourceUri = getResourceUri("karyotypic_sex_thresholds.json");
File file = Paths.get(resourceUri.getPath()).toFile();
Map<String, Double> thresholds = JacksonUtils.getDefaultNonNullObjectMapper().readerFor(Map.class).readValue(file);
System.out.println(JacksonUtils.getDefaultNonNullObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(thresholds));
}
private MendelianErrorReport buildMendelianErrorReport(Iterator iterator, long numVariants) {
// Create auxiliary map
// sample chrom error count
Map<String, Map<String, Map<String, Integer>>> counter = new HashMap<>();
int numErrors = 0;
while (iterator.hasNext()) {
Variant variant = (Variant) iterator.next();
// Get sampleId and error code from variant issues
boolean foundError = false;
for (IssueEntry issue : variant.getStudies().get(0).getIssues()) {
if (IssueType.MENDELIAN_ERROR == issue.getType() || IssueType.DE_NOVO == issue.getType()) {
foundError = true;
String sampleId = issue.getSample().getSampleId();
String errorCode = issue.getSample().getData().get(0);
if (!counter.containsKey(sampleId)) {
counter.put(sampleId, new HashMap<>());
}
if (!counter.get(sampleId).containsKey(variant.getChromosome())) {
counter.get(sampleId).put(variant.getChromosome(), new HashMap<>());
}
int val = 0;
if (counter.get(sampleId).get(variant.getChromosome()).containsKey(errorCode)) {
val = counter.get(sampleId).get(variant.getChromosome()).get(errorCode);
}
counter.get(sampleId).get(variant.getChromosome()).put(errorCode, val + 1);
}
}
if (foundError) {
numErrors++;
}
}
// Create mendelian error report from auxiliary map
MendelianErrorReport meReport = new MendelianErrorReport();
meReport.setNumErrors(numErrors);
for (String sampleId : counter.keySet()) {
MendelianErrorReport.SampleAggregation sampleAgg = new MendelianErrorReport.SampleAggregation();
int numSampleErrors = 0;
for (String chrom : counter.get(sampleId).keySet()) {
int numChromErrors = counter.get(sampleId).get(chrom).values().stream().mapToInt(Integer::intValue).sum();
MendelianErrorReport.SampleAggregation.ChromosomeAggregation chromAgg = new MendelianErrorReport.SampleAggregation.ChromosomeAggregation();
chromAgg.setChromosome(chrom);
chromAgg.setNumErrors(numChromErrors);
chromAgg.setErrorCodeAggregation(counter.get(sampleId).get(chrom));
// Update sample aggregation
sampleAgg.getChromAggregation().add(chromAgg);
numSampleErrors += numChromErrors;
}
sampleAgg.setSample(sampleId);
sampleAgg.setNumErrors(numSampleErrors);
sampleAgg.setRatio(1.0d * numSampleErrors / numVariants);
meReport.getSampleAggregation().add(sampleAgg);
}
return meReport;
}
}
|
opencb/opencga
|
opencga-analysis/src/test/java/org/opencb/opencga/analysis/individual/qc/IndividualQcUtilsTest.java
|
Java
|
apache-2.0
| 6,198 |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.siyeh.ig.redundancy;
import com.google.common.collect.ImmutableSet;
import com.intellij.codeInspection.ex.InspectionElementsMergerBase;
import com.intellij.util.ArrayUtilRt;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import java.util.Map;
import java.util.Set;
public class RedundantStringOperationMerger extends InspectionElementsMergerBase {
private static final String OLD_MERGER_NAME = "RedundantStringOperation";
private static final Set<String> OLD_SOURCE_NAMES = ImmutableSet.of("StringToString", "SubstringZero", "ConstantStringIntern");
@NotNull
@Override
public String getMergedToolName() {
return "StringOperationCanBeSimplified";
}
@Override
protected Element getSourceElement(@NotNull Map<String, Element> inspectionElements, @NotNull String sourceToolName) {
if (inspectionElements.containsKey(sourceToolName)) {
return inspectionElements.get(sourceToolName);
}
if (sourceToolName.equals(OLD_MERGER_NAME)) {//need to merge initial tools to get merged redundant string operations
return new InspectionElementsMergerBase(){
@NotNull
@Override
public String getMergedToolName() {
return OLD_MERGER_NAME;
}
@Override
public String @NotNull [] getSourceToolNames() {
return ArrayUtilRt.toStringArray(OLD_SOURCE_NAMES);
}
@Override
public Element merge(@NotNull Map<String, Element> inspectionElements) {
return super.merge(inspectionElements);
}
@Override
protected boolean writeMergedContent(@NotNull Element toolElement) {
return true;
}
}.merge(inspectionElements);
}
else if (OLD_SOURCE_NAMES.contains(sourceToolName)) {
Element merged = inspectionElements.get(OLD_MERGER_NAME);
if (merged != null) { // RedundantStringOperation already replaced the content
Element clone = merged.clone();
clone.setAttribute("class", sourceToolName);
return clone;
}
}
return null;
}
@Override
public String @NotNull [] getSourceToolNames() {
return new String[] {
"StringToString",
"SubstringZero",
"ConstantStringIntern",
"StringConstructor",
OLD_MERGER_NAME
};
}
@Override
public String @NotNull [] getSuppressIds() {
return new String[] {
"StringToString", "RedundantStringToString",
"SubstringZero", "ConstantStringIntern",
"RedundantStringConstructorCall", "StringConstructor", OLD_MERGER_NAME
};
}
}
|
leafclick/intellij-community
|
plugins/InspectionGadgets/src/com/siyeh/ig/redundancy/RedundantStringOperationMerger.java
|
Java
|
apache-2.0
| 2,723 |
package com.app.annotation.aspect;
/**
* Created by baixiaokang on 17/1/31.
*/
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Permission {
String[] value();
}
|
AndroidAdu/material-News
|
lib/src/main/java/com/app/annotation/aspect/Permission.java
|
Java
|
apache-2.0
| 363 |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.module.impl;
import com.intellij.configurationStore.RenameableStateStorageManager;
import com.intellij.ide.highlighter.ModuleFileType;
import com.intellij.ide.plugins.ContainerDescriptor;
import com.intellij.ide.plugins.IdeaPluginDescriptorImpl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.*;
import com.intellij.openapi.components.impl.stores.IComponentStore;
import com.intellij.openapi.components.impl.stores.ModuleStore;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleComponent;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.impl.scopes.ModuleScopeProviderImpl;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ex.ProjectEx;
import com.intellij.openapi.roots.ExternalProjectSystemRegistry;
import com.intellij.openapi.roots.ProjectModelElement;
import com.intellij.openapi.roots.ProjectModelExternalSource;
import com.intellij.openapi.util.SimpleModificationTracker;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.pointers.VirtualFilePointer;
import com.intellij.openapi.vfs.pointers.VirtualFilePointerListener;
import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.serviceContainer.ComponentManagerImpl;
import com.intellij.util.xmlb.annotations.MapAnnotation;
import com.intellij.util.xmlb.annotations.Property;
import kotlin.Unit;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
public class ModuleImpl extends ComponentManagerImpl implements ModuleEx {
private static final Logger LOG = Logger.getInstance(ModuleImpl.class);
@NotNull private final Project myProject;
@Nullable protected VirtualFilePointer myImlFilePointer;
private volatile boolean isModuleAdded;
private String myName;
private final ModuleScopeProvider myModuleScopeProvider;
@ApiStatus.Internal
public ModuleImpl(@NotNull String name, @NotNull Project project, @NotNull String filePath) {
this(name, project);
myImlFilePointer = VirtualFilePointerManager.getInstance().create(
VfsUtilCore.pathToUrl(filePath), this,
new VirtualFilePointerListener() {
@Override
public void validityChanged(@NotNull VirtualFilePointer @NotNull [] pointers) {
if (myImlFilePointer == null) return;
VirtualFile virtualFile = myImlFilePointer.getFile();
if (virtualFile != null) {
((ModuleStore)getStore()).setPath(virtualFile.toNioPath(), virtualFile, false);
ModuleManager.getInstance(myProject).incModificationCount();
}
}
});
}
@ApiStatus.Internal
public ModuleImpl(@NotNull String name, @NotNull Project project, @Nullable VirtualFilePointer virtualFilePointer) {
this(name, project);
myImlFilePointer = virtualFilePointer;
}
@ApiStatus.Internal
public ModuleImpl(@NotNull String name, @NotNull Project project) {
super((ComponentManagerImpl)project);
registerServiceInstance(Module.class, this, ComponentManagerImpl.fakeCorePluginDescriptor);
myProject = project;
myModuleScopeProvider = new ModuleScopeProviderImpl(this);
myName = name;
}
@Override
public void init(@Nullable Runnable beforeComponentCreation) {
// do not measure (activityNamePrefix method not overridden by this class)
// because there are a lot of modules and no need to measure each one
registerComponents();
if (!isPersistent()) {
registerService(IComponentStore.class,
NonPersistentModuleStore.class,
ComponentManagerImpl.fakeCorePluginDescriptor,
true, ServiceDescriptor.PreloadMode.FALSE);
}
if (beforeComponentCreation != null) {
beforeComponentCreation.run();
}
createComponents(null);
}
private boolean isPersistent() {
return myImlFilePointer != null;
}
@Override
protected void setProgressDuringInit(@NotNull ProgressIndicator indicator) {
// Component loading progress is not reported for module, because at this stage minimal reporting unit it is the module itself.
// Stage "Loading modules" progress reported for each loaded module and module component count doesn't matter.
}
@Override
public final boolean isDisposed() {
// in case of light project in tests when it's temporarily disposed, the module should be treated as disposed too.
//noinspection TestOnlyProblems
return super.isDisposed() || ((ProjectEx)myProject).isLight() && myProject.isDisposed();
}
@Override
protected boolean isComponentSuitable(@NotNull ComponentConfig componentConfig) {
if (!super.isComponentSuitable(componentConfig)) {
return false;
}
Map<String, String> options = componentConfig.options;
if (options == null || options.isEmpty()) {
return true;
}
for (String optionName : options.keySet()) {
if ("workspace".equals(optionName) || "overrides".equals(optionName)) {
continue;
}
// we cannot filter using module options because at this moment module file data could be not loaded
String message = "Don't specify " + optionName + " in the component registration, transform component to service and implement your logic in your getInstance() method";
if (ApplicationManager.getApplication().isUnitTestMode()) {
LOG.error(message);
}
else {
LOG.warn(message);
}
}
return true;
}
@Override
@Nullable
public VirtualFile getModuleFile() {
if (myImlFilePointer == null) {
return null;
}
return myImlFilePointer.getFile();
}
@Override
public void rename(@NotNull String newName, boolean notifyStorage) {
myName = newName;
if (notifyStorage) {
((RenameableStateStorageManager)getStore().getStorageManager()).rename(newName + ModuleFileType.DOT_DEFAULT_EXTENSION);
}
}
protected @NotNull IComponentStore getStore() {
return Objects.requireNonNull(getService(IComponentStore.class));
}
@Override
public boolean canStoreSettings() {
return !(getStore() instanceof NonPersistentModuleStore);
}
@Override
@NotNull
public Path getModuleNioFile() {
if (!isPersistent()) {
return Paths.get("");
}
return getStore().getStorageManager().expandMacro(StoragePathMacros.MODULE_FILE);
}
@Override
public synchronized void dispose() {
isModuleAdded = false;
super.dispose();
}
@NotNull
@Override
protected ContainerDescriptor getContainerDescriptor(@NotNull IdeaPluginDescriptorImpl pluginDescriptor) {
return pluginDescriptor.moduleContainerDescriptor;
}
@Override
public void projectOpened() {
//noinspection deprecation
processInitializedComponents(ModuleComponent.class, (component, __) -> {
try {
//noinspection deprecation
component.projectOpened();
}
catch (Exception e) {
LOG.error(e);
}
return Unit.INSTANCE;
});
}
@Override
public void projectClosed() {
//noinspection deprecation
List<ModuleComponent> components = new ArrayList<>();
//noinspection deprecation
processInitializedComponents(ModuleComponent.class, (component, __) -> {
components.add(component);
return Unit.INSTANCE;
});
for (int i = components.size() - 1; i >= 0; i--) {
try {
//noinspection deprecation
components.get(i).projectClosed();
}
catch (Throwable e) {
LOG.error(e);
}
}
}
@Override
@NotNull
public Project getProject() {
return myProject;
}
@Override
@NotNull
public String getName() {
return myName;
}
@Override
public boolean isLoaded() {
return isModuleAdded;
}
@Override
public void moduleAdded() {
isModuleAdded = true;
//noinspection deprecation
processInitializedComponents(ModuleComponent.class, (component, __) -> {
//noinspection deprecation
component.moduleAdded();
return Unit.INSTANCE;
});
}
@Override
public void setOption(@NotNull String key, @Nullable String value) {
DeprecatedModuleOptionManager manager = getOptionManager();
if (value == null) {
if (manager.state.options.remove(key) != null) {
manager.incModificationCount();
}
}
else if (!value.equals(manager.state.options.put(key, value))) {
manager.incModificationCount();
}
}
@NotNull
private DeprecatedModuleOptionManager getOptionManager() {
//noinspection ConstantConditions
return ((Module)this).getService(DeprecatedModuleOptionManager.class);
}
@Override
public String getOptionValue(@NotNull String key) {
return getOptionManager().state.options.get(key);
}
@NotNull
@Override
public GlobalSearchScope getModuleScope() {
return myModuleScopeProvider.getModuleScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleScope(boolean includeTests) {
return myModuleScopeProvider.getModuleScope(includeTests);
}
@NotNull
@Override
public GlobalSearchScope getModuleWithLibrariesScope() {
return myModuleScopeProvider.getModuleWithLibrariesScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleWithDependenciesScope() {
return myModuleScopeProvider.getModuleWithDependenciesScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleContentScope() {
return myModuleScopeProvider.getModuleContentScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleContentWithDependenciesScope() {
return myModuleScopeProvider.getModuleContentWithDependenciesScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleWithDependenciesAndLibrariesScope(boolean includeTests) {
return myModuleScopeProvider.getModuleWithDependenciesAndLibrariesScope(includeTests);
}
@NotNull
@Override
public GlobalSearchScope getModuleWithDependentsScope() {
return myModuleScopeProvider.getModuleWithDependentsScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleTestsWithDependentsScope() {
return myModuleScopeProvider.getModuleTestsWithDependentsScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleRuntimeScope(boolean includeTests) {
return myModuleScopeProvider.getModuleRuntimeScope(includeTests);
}
@Override
public void clearScopesCache() {
myModuleScopeProvider.clearCache();
}
@Override
public String toString() {
if (myName == null) return "Module (not initialized)";
return "Module: '" + getName() + "'" + (isDisposed() ? " (disposed)" : "");
}
@Override
public long getOptionsModificationCount() {
return getOptionManager().getModificationCount();
}
@ApiStatus.Internal
@State(name = "DeprecatedModuleOptionManager", useLoadedStateAsExisting = false /* doesn't make sense to check it */)
public static class DeprecatedModuleOptionManager extends SimpleModificationTracker implements PersistentStateComponent<DeprecatedModuleOptionManager.State>,
ProjectModelElement {
private final Module module;
DeprecatedModuleOptionManager(@NotNull Module module) {
this.module = module;
}
@Override
@Nullable
public ProjectModelExternalSource getExternalSource() {
if (state.options.size() > 1 || state.options.size() == 1 && !state.options.containsKey(Module.ELEMENT_TYPE) /* unrealistic case, but just to be sure */) {
return null;
}
return ExternalProjectSystemRegistry.getInstance().getExternalSource(module);
}
static final class State {
@Property(surroundWithTag = false)
@MapAnnotation(surroundKeyWithTag = false, surroundValueWithTag = false, surroundWithTag = false, entryTagName = "option")
public final Map<String, String> options = new HashMap<>();
}
private State state = new State();
@Nullable
@Override
public State getState() {
return state;
}
@Override
public void loadState(@NotNull State state) {
this.state = state;
}
}
}
|
jwren/intellij-community
|
platform/lang-impl/src/com/intellij/openapi/module/impl/ModuleImpl.java
|
Java
|
apache-2.0
| 12,736 |
/*
* Copyright (c) 2004, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.mirror.declaration;
import java.lang.annotation.Annotation;
import java.util.Collection;
import com.sun.mirror.type.*;
import com.sun.mirror.util.*;
/**
* Represents the declaration of a program element such as a package,
* class, or method. Each declaration represents a static, language-level
* construct (and not, for example, a runtime construct of the virtual
* machine), and typically corresponds one-to-one with a particular
* fragment of source code.
*
* <p> Declarations should be compared using the {@link #equals(Object)}
* method. There is no guarantee that any particular declaration will
* always be represented by the same object.
*
* @deprecated All components of this API have been superseded by the
* standardized annotation processing API. The replacement for the
* functionality of this interface is {@link
* javax.lang.model.element.Element}.
*
* @author Joseph D. Darcy
* @author Scott Seligman
*
* @see Declarations
* @see TypeMirror
* @since 1.5
*/
@Deprecated
@SuppressWarnings("deprecation")
public interface Declaration {
/**
* Tests whether an object represents the same declaration as this.
*
* @param obj the object to be compared with this declaration
* @return <tt>true</tt> if the specified object represents the same
* declaration as this
*/
boolean equals(Object obj);
/**
* Returns the text of the documentation ("javadoc") comment of
* this declaration.
*
* @return the documentation comment of this declaration, or <tt>null</tt>
* if there is none
*/
String getDocComment();
/**
* Returns the annotations that are directly present on this declaration.
*
* @return the annotations directly present on this declaration;
* an empty collection if there are none
*/
Collection<AnnotationMirror> getAnnotationMirrors();
/**
* Returns the annotation of this declaration having the specified
* type. The annotation may be either inherited or directly
* present on this declaration.
*
* <p> The annotation returned by this method could contain an element
* whose value is of type <tt>Class</tt>.
* This value cannot be returned directly: information necessary to
* locate and load a class (such as the class loader to use) is
* not available, and the class might not be loadable at all.
* Attempting to read a <tt>Class</tt> object by invoking the relevant
* method on the returned annotation
* will result in a {@link MirroredTypeException},
* from which the corresponding {@link TypeMirror} may be extracted.
* Similarly, attempting to read a <tt>Class[]</tt>-valued element
* will result in a {@link MirroredTypesException}.
*
* <blockquote>
* <i>Note:</i> This method is unlike
* others in this and related interfaces. It operates on run-time
* reflective information -- representations of annotation types
* currently loaded into the VM -- rather than on the mirrored
* representations defined by and used throughout these
* interfaces. It is intended for callers that are written to
* operate on a known, fixed set of annotation types.
* </blockquote>
*
* @param <A> the annotation type
* @param annotationType the <tt>Class</tt> object corresponding to
* the annotation type
* @return the annotation of this declaration having the specified type
*
* @see #getAnnotationMirrors()
*/
<A extends Annotation> A getAnnotation(Class<A> annotationType);
/**
* Returns the modifiers of this declaration, excluding annotations.
* Implicit modifiers, such as the <tt>public</tt> and <tt>static</tt>
* modifiers of interface members, are included.
*
* @return the modifiers of this declaration in undefined order;
* an empty collection if there are none
*/
Collection<Modifier> getModifiers();
/**
* Returns the simple (unqualified) name of this declaration.
* The name of a generic type does not include any reference
* to its formal type parameters.
* For example, the simple name of the interface declaration
* {@code java.util.Set<E>} is <tt>"Set"</tt>.
* If this declaration represents the empty package, an empty
* string is returned.
* If it represents a constructor, the simple name of its
* declaring class is returned.
*
* @return the simple name of this declaration
*/
String getSimpleName();
/**
* Returns the source position of the beginning of this declaration.
* Returns <tt>null</tt> if the position is unknown or not applicable.
*
* <p> This source position is intended for use in providing
* diagnostics, and indicates only approximately where a declaration
* begins.
*
* @return the source position of the beginning of this declaration,
* or null if the position is unknown or not applicable
*/
SourcePosition getPosition();
/**
* Applies a visitor to this declaration.
*
* @param v the visitor operating on this declaration
*/
void accept(DeclarationVisitor v);
}
|
haikuowuya/android_system_code
|
src/com/sun/mirror/declaration/Declaration.java
|
Java
|
apache-2.0
| 5,514 |
package com.xsolla.android.sdk.data.model;
import java.util.List;
public class XHoldSubscriptionStatus {
private String status;
private List<XMessage> errors;
private XApi api;
public String getStatus() {
return status;
}
public List<XMessage> getErrors() {
return errors;
}
public String getErrorMsg() {
StringBuilder sb = new StringBuilder();
for (XMessage message : errors) {
sb.append(message.getMessage()).append("\n");
}
sb.deleteCharAt(sb.length() - 1);
return sb.toString();
}
@Override
public String toString() {
return "XHoldSubscription{" +
"status='" + status + '\'' +
", errors=" + errors +
", api=" + api +
'}';
}
}
|
xsolla/xsolla-sdk-android
|
xsollasdk/src/main/java/com/xsolla/android/sdk/data/model/XHoldSubscriptionStatus.java
|
Java
|
apache-2.0
| 827 |
package br.eti.arthurgregorio.fulljeearch.domain.security;
/**
*
* @author Arthur
*/
public interface ApplicationRoles {
public final String USER = "Usuario";
public final String ADMINISTRATOR = "Administrador";
}
|
arthurgregorio/exemplos
|
FullJeeArch/src/main/java/br/eti/arthurgregorio/fulljeearch/domain/security/ApplicationRoles.java
|
Java
|
apache-2.0
| 228 |
package com.almende.dialog.example.agent;
import java.io.Serializable;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import com.almende.dialog.Settings;
import com.almende.dialog.model.Answer;
import com.almende.dialog.model.Question;
import com.almende.util.ParallelInit;
import com.almende.util.twigmongo.QueryResultIterator;
import com.almende.util.twigmongo.TwigCompatibleMongoDatastore;
import com.almende.util.twigmongo.TwigCompatibleMongoDatastore.RootFindCommand;
import com.almende.util.twigmongo.annotations.Id;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
@Path("yesno")
public class YesNoAgent {
static final ObjectMapper om =ParallelInit.getObjectMapper();
private static final String URL = "http://"+Settings.HOST+"/yesno/";
private static final String SOUNDURL = "http://ask4604.ask46.customers.luna.net/rest/";
private static final Logger log = Logger
.getLogger("DialogHandler");
public Question getQuestion(int question_no, String preferred_medium, String phonenumber) {
String questionURL = URL+"questions/"+question_no;
String answerYesURL = URL+"answers/0";
String answerNoURL = URL+"answers/1";
if (preferred_medium != null && preferred_medium.startsWith("audio")){
questionURL = this.getAudioFile(question_no);
answerYesURL= SOUNDURL+"14.wav";
answerNoURL= SOUNDURL+"14.wav";
}
Question question=new Question();
question.setRequester(URL+"id/");
question.setType("closed");
question.setQuestion_text(questionURL);
question.setAnswers(new ArrayList<Answer>(Arrays.asList(
new Answer(answerYesURL, URL+"questions/"+question_no+"?preferred_medium="+preferred_medium+"&pn="+phonenumber+"&answer=yes"),
new Answer(answerNoURL, URL+"questions/"+question_no+"?preferred_medium="+preferred_medium+"&pn="+phonenumber+"&answer=no"))));
return question;
}
@GET
@Path("/id/")
public Response getId(@QueryParam("preferred_language") String preferred_language){
ObjectNode node= om.createObjectNode();
node.put("url", URL);
node.put("nickname", "YesNo");
return Response.ok(node.toString()).build();
}
@GET
@Produces("application/json")
public Response firstQuestion(@QueryParam("preferred_medium") String preferred_medium, @QueryParam("remoteAddress") String responder, @QueryParam("requester") String requester){
int questionNo=0;
if(requester.contains("live") || requester.contains("0107421217")){
questionNo=1;
}
try {
responder = URLDecoder.decode(responder, "UTF-8");
} catch (Exception ex) {
log.severe(ex.getMessage());
}
Question question = getQuestion(questionNo, preferred_medium, responder);
return Response.ok(question.toJSON()).build();
}
@Path("/questions/{question_no}")
@POST
@Produces("application/json")
@Consumes("*/*")
public Response answerQuestion(@PathParam("question_no") String question_no, @QueryParam("preferred_medium") String preferred_medium,
@QueryParam("pn") String phonenumber, @QueryParam("answer") String answer){
Group group = this.getGroup("Group."+question_no+"."+answer);
group.addMember(phonenumber);
TwigCompatibleMongoDatastore datastore = new TwigCompatibleMongoDatastore();
datastore.store(group);
int responseQuestion=99;
String questionURL = URL+"questions/"+responseQuestion;
if (preferred_medium != null && preferred_medium.startsWith("audio")){
questionURL = this.getAudioFile(responseQuestion);
}
Question question=new Question();
question.setRequester(URL+"id/");
question.setType("comment");
question.setQuestion_text(questionURL);
return Response.ok( question.toJSON() ).build();
}
@Path("/questions/{question_no}")
@GET
@Produces("text/plain")
@Consumes("*/*")
public Response getQuestionText(@PathParam("question_no") String question_no ){
Integer questionNo = Integer.parseInt(question_no);
String result = "";
// These messages are now static but should be loaded from the LifeRay Database.
switch (questionNo){
case 0: result="Press 1 if you are available, press 2 if you are unavailable."; break;
case 1: result="Are you available?"; break;
case 99: result="Thank you for your input"; break;
default: result="Sorry, for some strange reason I don't have that question text available...";
}
return Response.ok(result).build();
}
@Path("/answers/{answer_no}")
@GET
@Produces("text/plain")
@Consumes("*/*")
public Response getAnswerText(@PathParam("answer_no") String answer_no, @QueryParam("preferred_medium") String prefered_mimeType){
Integer answerNo = Integer.parseInt(answer_no);
String result="";
// These messages can be static, because they are always the same.
switch (answerNo){
case 0: result="Yes"; break;
case 1: result="No"; break;
default: result="Sorry, for some strange reason I don't have that answer text available...";
}
return Response.ok(result).build();
}
// This urls will present the results
@Path("result")
@GET
public Response getResults() {
String result="";
ArrayList<Group> groups = (ArrayList<Group>) this.getAllGroups();
try {
result = om.writeValueAsString(groups);
} catch(Exception ex) {
ex.printStackTrace();
}
return Response.ok( result ).build();
}
// These functions should get there data from the liferay database.
// These are the audio files linked to the questions
public String getAudioFile(int question_no) {
switch(question_no) {
case 0: return SOUNDURL+"571.wav";
case 1: return SOUNDURL+"572.wav";
case 99: return SOUNDURL+"567.wav";
default: return SOUNDURL+"529.wav";
}
}
// These 2 functions are the group management
public Group getGroup(String id) {
TwigCompatibleMongoDatastore datastore = new TwigCompatibleMongoDatastore();
Group group = datastore.load(Group.class, id);
if(group!=null)
return group;
group = new Group();
group.setId(id);
return group;
}
public List<Group> getAllGroups() {
TwigCompatibleMongoDatastore datastore = new TwigCompatibleMongoDatastore();
RootFindCommand<Group> command = datastore.find()
.type(Group.class);
QueryResultIterator<Group> it = command.now();
List<Group> groups = new ArrayList<Group>();
while (it.hasNext()) {
groups.add(it.next());
}
return groups;
}
}
@SuppressWarnings("serial")
class Group implements Serializable {
public Group() {
this.members=new HashSet<String>();
}
public String getId(){
return id;
}
public void setId(String id){
this.id=id;
}
public Set<String> getMembers() {
return this.members;
}
public void addMember(String member) {
this.members.add(member);
}
@Id private String id=null;
private Set<String> members=null;
}
|
almende/dialog
|
dialoghandler/src/main/java/com/almende/dialog/example/agent/YesNoAgent.java
|
Java
|
apache-2.0
| 7,159 |
package com.suscipio_solutions.consecro_mud.Commands;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.core.CMParms;
@SuppressWarnings("rawtypes")
public class NoFollow extends Follow
{
public NoFollow(){}
private final String[] access=I(new String[]{"NOFOLLOW","NOFOL"});
@Override public String[] getAccessWords(){return access;}
@Override
public boolean execute(MOB mob, Vector commands, int metaFlags)
throws java.io.IOException
{
if((commands.size()>1)&&(commands.elementAt(0) instanceof String))
{
if(((String)commands.elementAt(0)).equalsIgnoreCase("UNFOLLOW"))
{
unfollow(mob,((commands.size()>1)&&(commands.elementAt(1) instanceof String)&&(((String)commands.elementAt(1)).equalsIgnoreCase("QUIETLY"))));
return false;
}
MOB M=mob.fetchFollower(CMParms.combine(commands,1));
if((M==null)&&(mob.location()!=null))
{
M=mob.location().fetchInhabitant(CMParms.combine(commands,1));
if(M!=null)
mob.tell(L("@x1 is not following you!",M.name(mob)));
else
mob.tell(L("There is noone here called '@x1' following you!",CMParms.combine(commands,1)));
return false;
}
if((mob.location()!=null)&&(M!=null)&&(M.amFollowing()==mob))
{
nofollow(M,true,false);
return true;
}
mob.tell(L("There is noone called '@x1' following you!",CMParms.combine(commands,1)));
return false;
}
if(!mob.isAttribute(MOB.Attrib.NOFOLLOW))
{
mob.setAttribute(MOB.Attrib.NOFOLLOW,true);
//unfollow(mob,false);
mob.tell(L("You are no longer accepting new followers."));
}
else
{
mob.setAttribute(MOB.Attrib.NOFOLLOW,false);
mob.tell(L("You are now accepting new followers."));
}
return false;
}
@Override public boolean canBeOrdered(){return true;}
}
|
ConsecroMUD/ConsecroMUD
|
com/suscipio_solutions/consecro_mud/Commands/NoFollow.java
|
Java
|
apache-2.0
| 1,837 |
/**
Copyright 2008 University of Rochester
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package edu.ur.ir.researcher;
import edu.ur.ir.FileSystem;
import edu.ur.ir.FileSystemType;
import edu.ur.persistent.CommonPersistent;
/**
* This is a link in the researcher folder. This
* creates a link between a link and a researcher
* folder
*
* @author Sharmila Ranganathan
*
*/
public class ResearcherLink extends CommonPersistent implements FileSystem{
/** Eclipse generated id */
private static final long serialVersionUID = 3144484183634385274L;
/** Link */
private String url;
/** researcher folder the link belongs to. */
private ResearcherFolder parentFolder;
/** Researcher the link belongs to */
private Researcher researcher;
/** represents the file system type for this researcher link */
private FileSystemType fileSystemType = FileSystemType.RESEARCHER_LINK;
/**
* Package protected constructor.
*/
ResearcherLink(){};
/**
* Create a researcher link with a null researcher folder. This means this
* is a root researcher link.
*
* @param linkVersion
*/
ResearcherLink(Researcher researcher, String link)
{
setResearcher(researcher);
setUrl(link);
}
/**
* Create a link between a folder and link.
*
* @param link - link to create a link with
* @param parentFolder - folder the link is in.
*/
ResearcherLink(Researcher researcher, ResearcherFolder parentFolder, String link)
{
if(link == null)
{
throw new IllegalStateException("link cannot be null");
}
setResearcher(researcher);
setUrl(link);
setParentFolder(parentFolder);
}
/**
* Returns the path for this linkVersion.
*
* The path is the path of the parent folder
*
* @return
*/
public String getPath()
{
String path = null;
if(parentFolder == null)
{
path = PATH_SEPERATOR;
}
else
{
path = parentFolder.getFullPath();
}
return path;
}
/**
* Overridden to string method.
*
* @see java.lang.Object#toString()
*/
public String toString()
{
StringBuffer sb = new StringBuffer("[ id = ");
sb.append(id);
sb.append( " path = ");
sb.append(getPath());
sb.append( " parent Folder = ");
sb.append(parentFolder);
sb.append(" name = ");
sb.append(name);
sb.append(" link = ");
sb.append(url);
sb.append("]");
return sb.toString();
}
/**
* Get the full path of this linkVersion. If there is
* no parent folder the path is just the name of
* the link.
*
* @return the full path.
*/
public String getFullPath()
{
return getPath() + getName();
}
/**
* Hash code for a researcher link.
*
* @see java.lang.Object#hashCode()
*/
public int hashCode()
{
int value = 0;
value += parentFolder == null ? 0 : parentFolder.hashCode();
value += getName() == null ? 0 : getName().hashCode();
value += researcher == null ? 0 : researcher.hashCode();
return value;
}
/**
* Equals method for a researcher link.
*
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object o)
{
if (this == o) return true;
if (!(o instanceof ResearcherLink)) return false;
final ResearcherLink other = (ResearcherLink) o;
if( (other.getName() != null && !other.getName().equals(getName())) ||
(other.getName() == null && getName() != null ) ) return false;
if( (other.getResearcher() != null && !other.getResearcher().equals(getResearcher())) ||
(other.getResearcher() == null && getResearcher() != null ) ) return false;
if( (other.getFullPath() != null && !other.getFullPath().equals(getFullPath())) ||
(other.getFullPath() == null && getFullPath() != null ) ) return false;
return true;
}
/**
* Returns the name of the link.
*
* @see edu.ur.simple.type.NameAware#getName()
*/
public String getName() {
return name;
}
/**
* Returns the description of the link.
*
* @see edu.ur.simple.type.DescriptionAware#getDescription()
*/
public String getDescription() {
return description;
}
/* (non-Javadoc)
* @see edu.ur.ir.FileSystem#getFileSystemType()
*/
public FileSystemType getFileSystemType() {
return fileSystemType;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public ResearcherFolder getParentFolder() {
return parentFolder;
}
public void setParentFolder(ResearcherFolder parentFolder) {
this.parentFolder = parentFolder;
}
public Researcher getResearcher() {
return researcher;
}
public void setResearcher(Researcher researcher) {
this.researcher = researcher;
}
}
|
nate-rcl/irplus
|
ir_core/src/edu/ur/ir/researcher/ResearcherLink.java
|
Java
|
apache-2.0
| 5,358 |
package org.dbflute.erflute.db.impl.mysql;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.dbflute.erflute.editor.model.dbimport.DBObject;
import org.dbflute.erflute.editor.model.dbimport.PreImportFromDBManager;
public class MySQLPreTableImportManager extends PreImportFromDBManager {
@Override
protected List<DBObject> importObjects(String[] types, String dbObjectType) throws SQLException {
final List<DBObject> list = new ArrayList<>();
ResultSet resultSet = null;
if (schemaList.isEmpty()) {
schemaList.add(null);
}
final String catalog = (8 <= metaData.getDriverMajorVersion()) ? dbSetting.getDatabase() : null;
for (final String schemaPattern : schemaList) {
try {
resultSet = metaData.getTables(catalog, schemaPattern, null, types);
while (resultSet.next()) {
final String schema = resultSet.getString("TABLE_SCHEM");
final String name = resultSet.getString("TABLE_NAME");
if (DBObject.TYPE_TABLE.equals(dbObjectType)) {
try {
getAutoIncrementColumnName(con, schema, name);
} catch (final SQLException e) {
e.printStackTrace();
// テーブル情報が取得できない場合(他のユーザの所有物などの場合)、
// このテーブルは使用しない。
continue;
}
}
final DBObject dbObject = new DBObject(schema, name, dbObjectType);
list.add(dbObject);
}
} finally {
if (resultSet != null) {
resultSet.close();
resultSet = null;
}
}
}
return list;
}
}
|
dbflute-session/erflute
|
src/org/dbflute/erflute/db/impl/mysql/MySQLPreTableImportManager.java
|
Java
|
apache-2.0
| 2,037 |
package org.gradle.test.performance.mediummonolithicjavaproject.p36;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test730 {
Production730 objectUnderTest = new Production730();
@Test
public void testProperty0() {
String value = "value";
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
String value = "value";
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
String value = "value";
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
}
|
oehme/analysing-gradle-performance
|
my-lib/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p36/Test730.java
|
Java
|
apache-2.0
| 2,107 |
/**
* Copyright 2016 Yahoo Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yahoo.athenz.common.metrics.impl;
import com.yahoo.athenz.common.metrics.Metric;
public class NoOpMetric implements Metric {
/**
* Constructs a new NoOpMetric object in which all methods are stubs.
* No metrics are recorded with this implementation.
*/
public NoOpMetric() {
}
@Override
public void increment(String metric) {
}
@Override
public void increment(String metric, String domainName) {
}
@Override
public void increment(String metric, String domainName, int count) {
}
@Override
public Object startTiming(String metric, String domainName) {
return null;
}
@Override
public void stopTiming(Object timerMetric) {
}
@Override
public void flush() {
}
@Override
public void quit() {
}
}
|
tatyano/athenz
|
libs/java/server_common/src/main/java/com/yahoo/athenz/common/metrics/impl/NoOpMetric.java
|
Java
|
apache-2.0
| 1,436 |
package ru.job4j.polymorphism;
/**
* Created on 01.09.2017.
*
* @author Aleks Sidorenko (alek.sidorenko1979@gmail.com).
* @version $Id$.
* @since 0.1.
*/
public class StubInput implements Input {
/**
* @param answers - array's param.
*/
private String[] answers;
/**
* @param position - param count position.
*/
private int position = 0;
/**
* Constructor.
* @param answers - array's param.
*/
public StubInput(String[] answers) {
this.answers = answers;
}
/**
* Method from interface.
* @param question - param of method interface.
* @return - string.
*/
public String ask(String question) {
return answers[position++];
}
}
|
AlSidorenko/Junior
|
chapter_002/src/main/java/ru/job4j/polymorphism/StubInput.java
|
Java
|
apache-2.0
| 747 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.attribute;
import io.undertow.server.HttpServerExchange;
/**
* The thread name
*
* @author Stuart Douglas
*/
public class ThreadNameAttribute implements ExchangeAttribute {
public static final String THREAD_NAME_SHORT = "%I";
public static final String THREAD_NAME = "%{THREAD_NAME}";
public static final ExchangeAttribute INSTANCE = new ThreadNameAttribute();
private ThreadNameAttribute() {
}
@Override
public String readAttribute(final HttpServerExchange exchange) {
return Thread.currentThread().getName();
}
@Override
public void writeAttribute(final HttpServerExchange exchange, final String newValue) throws ReadOnlyAttributeException {
throw new ReadOnlyAttributeException("Thread name", newValue);
}
public static final class Builder implements ExchangeAttributeBuilder {
@Override
public String name() {
return "Thread name";
}
@Override
public ExchangeAttribute build(final String token) {
if (token.equals(THREAD_NAME) || token.equals(THREAD_NAME_SHORT)) {
return ThreadNameAttribute.INSTANCE;
}
return null;
}
}
}
|
emag/codereading-undertow
|
core/src/main/java/io/undertow/attribute/ThreadNameAttribute.java
|
Java
|
apache-2.0
| 1,955 |
package org.techniche.technothlon.katana.tcd;
import android.content.Context;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.AsyncTask;
import android.os.Looper;
import android.util.Log;
import android.widget.TextView;
import android.widget.Toast;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.techniche.technothlon.katana.R;
import org.techniche.technothlon.katana.db.TCDDatabase;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* Helper class for providing sample content for user interfaces created by
* Android template wizards.
* <p/>
* TODO: Replace all uses of this class before publishing your app.
*/
public class TCDContent {
/**
* An array of sample (dummy) items.
*/
public static List<TCDQuestionMini> ITEMS = new ArrayList<TCDQuestionMini>();
/**
* A map of sample (dummy) items, by ID.
*/
public static Map<String, TCDQuestion> ITEM_MAP = new HashMap<String, TCDQuestion>();
private static String url = "http://localhost/technothlon/technocoupdoeil_app_gateway/android/?technocoupdoeil=fjalkfq2045rudacnavsofu0aswd988q29ra&lastFetchId=";
private static int download(Context context) {
SharedPreferences sharedPref = context.getSharedPreferences(
context.getString(R.string.preference_file_key), Context.MODE_PRIVATE);
long lastFetchID = sharedPref.getLong(context.getString(R.string.tcd_fetch_id), 0);
Log.d("Pref - log", lastFetchID + " from shared pref");
ConnectivityManager connMgr = (ConnectivityManager)
context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = connMgr.getActiveNetworkInfo();
if (networkInfo != null && networkInfo.isConnected()) {
try {
JSONObject json = new JSONObject(downloadUrl(url + lastFetchID));
if (json.getString("status").equals("success")) {
TCDDatabase db = new TCDDatabase(context);
JSONArray questions = json.getJSONArray("questions");
lastFetchID = json.getLong("lastFetchId");
int count = json.getInt("questions_count"), lastID;
for (int i = 0; i < count; i++) {
JSONObject q = questions.getJSONObject(i);
JSONObject links = q.getJSONObject("links");
lastID = q.getInt("uniqueId");
db.insert(
lastID,
q.getString("id"),
q.getString("color"),
q.getString("title"),
q.getString("question"),
links.getString("facebook"),
links.getString("google"),
links.getString("tumblr"),
links.getString("answer"),
q.getString("by"),
q.getString("time"),
q.getString("answer")
);
Log.d("Database - log", lastID + " loaded in database");
}
db.close();
SharedPreferences.Editor edit = sharedPref.edit();
edit.putLong(context.getString(R.string.tcd_fetch_id), lastFetchID);
edit.commit();
} else if (json.getString("status").equals("reset")) {
TCDDatabase db = new TCDDatabase(context);
db.reset();
db.close();
SharedPreferences.Editor edit = sharedPref.edit();
edit.putLong(context.getString(R.string.tcd_fetch_id), 0);
edit.commit();
download(context);
}
final Context ct = context;
new Thread() {
@Override
public void run() {
Looper.prepare();
Toast.makeText(ct, "Sync Completed.", Toast.LENGTH_SHORT).show();
Looper.loop();
}
}.start();
return 0;
} catch (JSONException e) {
e.printStackTrace();
final Context ct = context;
new Thread() {
@Override
public void run() {
Looper.prepare();
Toast.makeText(ct, "Sync Failed.", Toast.LENGTH_SHORT).show();
Looper.loop();
}
}.start();
return 3;
} catch (IOException e) {
e.printStackTrace();
final Context ct = context;
new Thread() {
@Override
public void run() {
Looper.prepare();
Toast.makeText(ct, "Sync Failed.", Toast.LENGTH_SHORT).show();
Looper.loop();
}
}.start();
return 2;
}
} else {
final Context ct = context;
new Thread() {
@Override
public void run() {
Looper.prepare();
Toast.makeText(ct, "No network connection available.", Toast.LENGTH_SHORT).show();
Looper.loop();
}
}.start();
return 1;
}
}
private static String downloadUrl(String myurl) throws IOException {
InputStream is = null;
// Only display the first 500 characters of the retrieved
// web page content.
try {
URL url = new URL(myurl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(10000 /* milliseconds */);
conn.setConnectTimeout(15000 /* milliseconds */);
conn.setRequestMethod("GET");
conn.setDoInput(true);
// Starts the query
conn.connect();
int response = conn.getResponseCode();
Log.d("TCD latest downloads", "The response is: " + response);
int size = conn.getContentLength();
Log.d("TCD latest downloads", "The content-length is: " + size);
is = conn.getInputStream();
// Convert the InputStream into a string
return readTextResponse(is);
// Makes sure that the InputStream is closed after the app is
// finished using it.
} finally {
if (is != null) {
is.close();
}
}
}
private static String readTextResponse(InputStream inputStream) throws IOException {
Reader in = new InputStreamReader(inputStream);
BufferedReader bufferedreader = new BufferedReader(in);
StringBuilder stringBuilder = new StringBuilder();
String stringReadLine;
while ((stringReadLine = bufferedreader.readLine()) != null) {
stringBuilder.append(stringReadLine);
}
return stringBuilder.toString();
}
public static void load(Context context) {
boolean update = ITEMS.isEmpty() ? false : true;
TCDDatabase helper = new TCDDatabase(context);
SQLiteDatabase db = helper.getReadableDatabase();
assert db != null;
Cursor c = db.rawQuery("SELECT * FROM " + TCDDatabase.Contracts.NAME + " ORDER BY " + TCDDatabase.Contracts.FIELD_TIME + " DESC, " + TCDDatabase.Contracts.FIELD_ID + " DESC", null);
Log.d("DB", c.getCount() + " object in database");
c.moveToFirst();
while (!c.isAfterLast()) {
addItem(new TCDQuestion(
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_ID)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_DISPLAY_ID)),
c.getInt(c.getColumnIndex(TCDDatabase.Contracts.FIELD_COLOR)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_TITLE)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_QUESTION)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_FACEBOOK)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_GOOGLE)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_TUMBLR)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_ANSWER_URL)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_BY)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_ANSWER)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_TIME))
), update);
c.moveToNext();
}
c.close();
db.close();
}
private static void addItem(TCDQuestion item, boolean update) {
if (!ITEM_MAP.containsKey(item.uniqueId)) {
if (update) ITEMS.add(0, (new TCDQuestionMini(item.uniqueId)));
else ITEMS.add((new TCDQuestionMini(item.uniqueId)));
ITEM_MAP.put(item.uniqueId, item);
}
}
public abstract static class TCDLoader extends AsyncTask<Object, Integer, Integer> {
@Override
protected Integer doInBackground(Object[] params) {
int d = 4;
try {
d = download((Context) params[0]);
} catch (Exception e) {
e.printStackTrace();
} finally {
load((Context) params[0]);
}
return d;
}
@Override
protected void onPostExecute(Integer o) {
finished(o);
}
public abstract void finished(int result);
}
/**
* A dummy item representing a piece of content.
*/
public static class TCDQuestion {
public String id;
public String question;
public String facebook;
public String google;
public String tumblr;
public String answer_url;
public String by;
public String answer;
public String title;
public java.util.Date date = null;
public String dateString = "";
public int color = R.drawable.tcd_background_1;
public String uniqueId;
private String status;
private boolean ret = false;
public TCDQuestion(String uniqueId, String id, int color, String title, String question, String facebook, String google, String tumblr,
String answer_url, String by, String answer, String status) {
this.uniqueId = uniqueId;
this.id = id;
this.title = title;
this.question = question;
this.facebook = facebook;
this.google = google;
this.tumblr = tumblr;
this.answer_url = answer_url;
this.by = by;
this.color = getBackground(color);
this.answer = answer;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
try {
this.date = sdf.parse(status);
} catch (ParseException e) {
e.printStackTrace();
}
sdf = new SimpleDateFormat("yyyy-MM-dd");
assert this.date != null;
this.dateString = sdf.format(this.date);
this.status = getStatus();
}
private int getBackground(int color) {
switch (color) {
case 10:
return R.drawable.tcd_background_2;
case 20:
return R.drawable.tcd_background_3;
case 30:
return R.drawable.tcd_background_4;
case 40:
return R.drawable.tcd_background_5;
case 50:
return R.drawable.tcd_background_6;
default:
return R.drawable.tcd_background_1;
}
}
public String getStatus() {
if (ret) return status;
long seconds = Math.abs(((new Date()).getTime() - date.getTime()) / 1000);
if (seconds < 60) status = "about " + seconds + " seconds ago";
else if (seconds < 3600) status = "about " + (seconds / 60) + " minutes ago";
else if (seconds < 86400) status = "about " + (seconds / 3600) + " hours ago";
else if (seconds < 172800) status = "yesterday";
else if (seconds < 345600) status = (seconds / 86400) + " days ago";
else {
ret = true;
status = dateString;
}
return status;
}
}
public static class TCDHolder {
public TextView id, title, question, status;
}
public static class TCDQuestionMini {
public String id;
public TCDQuestionMini(String id) {
this.id = id;
}
}
}
|
znck/technothlon-android-app
|
katana/src/main/java/org/techniche/technothlon/katana/tcd/TCDContent.java
|
Java
|
apache-2.0
| 13,524 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.spring.scan;
import java.lang.annotation.Annotation;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.HashSet;
import java.util.Set;
import org.apache.camel.impl.DefaultPackageScanClassResolver;
import org.apache.camel.spring.scan.a.ScanTargetOne;
import org.apache.camel.spring.scan.b.ScanTargetTwo;
import org.apache.camel.spring.scan.c.ScanTargetThree;
import org.junit.Before;
import org.junit.Test;
public class DefaultPackageScanClassResolverTest extends org.apache.camel.spring.scan.ScanTestSupport {
private DefaultPackageScanClassResolver resolver;
private Set<Class<? extends Annotation>> annotations = new HashSet<>();
private String scanPackage = "org.apache.camel.spring.scan";
@Before
public void setUp() throws Exception {
super.setUp();
resolver = new DefaultPackageScanClassResolver();
annotations.add(org.apache.camel.spring.scan.ScannableOne.class);
annotations.add(org.apache.camel.spring.scan.ScannableTwo.class);
}
@Test
public void testAccepableSchema() {
assertFalse("We should not accept the test by default!", resolver.isAcceptableScheme("test://test"));
resolver.setAcceptableSchemes("test:;test2:");
assertTrue("We should accept the test:!", resolver.isAcceptableScheme("test://test"));
assertTrue("We should accept the test2:!", resolver.isAcceptableScheme("test2://test"));
}
@Test
public void testFindByAnnotationWithoutExtraFilters() {
Set<Class<?>> scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class);
scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableTwo.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetThree.class);
}
@Test
public void testFindByAnnotationsWithoutExtraFilters() {
Set<Class<?>> scanned = resolver.findAnnotated(annotations, scanPackage);
validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class, ScanTargetThree.class);
}
@Test
public void testFindImplementationsWithoutExtraFilters() {
Set<Class<?>> scanned = resolver.findImplementations(ScanTargetOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class);
}
@Test
public void testFindByAnnotationWithIncludePackageFilter() {
filter.addIncludePattern(scanPackage + ".b.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetTwo.class);
scanned = resolver.findAnnotated(ScannableTwo.class, scanPackage);
validateMatchingSetContains(scanned);
}
@Test
public void testFindByAnnotationsWithIncludePackageFilter() {
filter.addIncludePattern(scanPackage + ".b.*");
filter.addIncludePattern(scanPackage + ".c.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findAnnotated(annotations, "org.apache.camel.spring.scan");
validateMatchingSetContains(scanned, ScanTargetTwo.class, ScanTargetThree.class);
}
@Test
public void testFindByAnnotationWithExcludePackageFilter() {
filter.addExcludePattern(scanPackage + ".b.*");
filter.addExcludePattern(scanPackage + ".c.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findAnnotated(ScannableOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetOne.class);
scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableTwo.class, scanPackage);
validateMatchingSetContains(scanned);
}
@Test
public void testFindByAnnotationsWithExcludePackageFilter() {
filter.addExcludePattern(scanPackage + ".a.*");
Set<Class<?>> scanned = resolver.findAnnotated(annotations, "org.apache.camel.spring.scan");
validateMatchingSetContains(scanned, ScanTargetTwo.class, ScanTargetThree.class);
}
@Test
public void testFindByFilterWithIncludePackageFilter() {
filter.addIncludePattern(scanPackage + ".**.ScanTarget*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findByFilter(filter, "org.apache.camel.spring.scan");
validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class, ScanTargetThree.class);
}
@Test
public void testFindImplementationsWithIncludePackageFilter() {
filter.addIncludePattern(scanPackage + ".b.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findImplementations(ScanTargetOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetTwo.class);
}
@Test
public void testFindImplementationsWithExcludePackageFilter() {
filter.addExcludePattern(scanPackage + ".a.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findImplementations(ScanTargetOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetTwo.class);
}
@Test
// Need to run the mvn clean install to create the jar file when running it from IDE
public void testFindByFilterPackageInJarUrl() throws Exception {
ClassLoader savedClassLoader = null;
try {
savedClassLoader = Thread.currentThread().getContextClassLoader();
// build a mock URLClassLoader
URL url = getClass().getResource("/package_scan_test.jar");
URL urls[] = {new URL("jar:" + url.toString() + "!/")};
URLClassLoader classLoader = new URLClassLoader(urls, savedClassLoader);
Thread.currentThread().setContextClassLoader(classLoader);
// recreate resolver since we mess with context class loader
resolver = new DefaultPackageScanClassResolver();
filter.addIncludePattern("a.*.c.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findByFilter(filter, "a.b.c");
assertEquals(1, scanned.size());
assertEquals("class a.b.c.Test", scanned.iterator().next().toString());
} finally {
if (savedClassLoader != null) {
Thread.currentThread().setContextClassLoader(savedClassLoader);
}
}
}
@Test
// Need to run the mvn clean install to create the test jar file when running it from IDE
public void testFindByFilterPackageInJarUrlWithPlusChars() throws Exception {
ClassLoader savedClassLoader = null;
try {
savedClassLoader = Thread.currentThread().getContextClassLoader();
URL url = getClass().getResource("/package+scan+test.jar");
URL urls[] = {new URL("jar:" + url.toString() + "!/")};
URLClassLoader classLoader = new URLClassLoader(urls, savedClassLoader);
Thread.currentThread().setContextClassLoader(classLoader);
// recreate resolver since we mess with context class loader
resolver = new DefaultPackageScanClassResolver();
filter.addIncludePattern("a.*.c.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findByFilter(filter, "a.b.c");
assertEquals(1, scanned.size());
assertEquals("class a.b.c.Test", scanned.iterator().next().toString());
} finally {
if (savedClassLoader != null) {
Thread.currentThread().setContextClassLoader(savedClassLoader);
}
}
}
}
|
punkhorn/camel-upstream
|
components/camel-spring/src/test/java/org/apache/camel/spring/scan/DefaultPackageScanClassResolverTest.java
|
Java
|
apache-2.0
| 8,636 |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.snmp.ctl;
import com.btisystems.pronx.ems.core.snmp.ISnmpConfiguration;
import com.btisystems.pronx.ems.core.snmp.ISnmpConfigurationFactory;
import com.btisystems.pronx.ems.core.snmp.ISnmpSession;
import com.btisystems.pronx.ems.core.snmp.ISnmpSessionFactory;
import com.google.common.collect.Maps;
import org.junit.Before;
import org.junit.Test;
import org.onosproject.alarm.Alarm;
import org.onosproject.alarm.AlarmId;
import org.onosproject.alarm.DefaultAlarm;
import java.io.IOException;
import static org.junit.Assert.*;
/**
* DefaultSnmpController test class.
*/
public class DefaultSnmpControllerTest {
ISnmpSessionFactory mockSnmpSessionFactory = new MockISnmpSessionFactory();
DefaultSnmpController snmpController = new DefaultSnmpController();
DefaultSnmpDevice device = new DefaultSnmpDevice("1.1.1.1", 1, "test", "test");
ISnmpSession snmpSession = new ISnmpSessionAdapter();
long time = System.currentTimeMillis();
DefaultAlarm alarm = new DefaultAlarm.Builder(
AlarmId.alarmId(device.deviceId(), Long.toString(time)),
device.deviceId(), "SNMP alarm retrieval failed",
Alarm.SeverityLevel.CRITICAL,
time).build();
@Before
public void setUp() {
snmpController.factoryMap = Maps.newHashMap();
snmpController.factoryMap.put(1, mockSnmpSessionFactory);
}
@Test
public void testActivate() {
snmpController.activate(null);
assertTrue("Snmp session factory map should contain atleast one factory object",
snmpController.factoryMap.size() > 0);
}
@Test
public void testDeactivate() {
snmpController.deactivate();
assertEquals("Device map should be clear", 0, snmpController.getDevices().size());
assertEquals("Session map should be clear", 0, snmpController.sessionMap.size());
}
@Test
public void addDevice() {
snmpController.addDevice(device);
assertEquals("Controller should contain device", device, snmpController.getDevice(device.deviceId()));
}
/**
* tests session creation and get from map if already exists.
*/
@Test
public void getNotExistingSession() throws Exception {
addDevice();
assertEquals("Session should be created", snmpSession, snmpController.getSession(device.deviceId()));
assertEquals("Map should contain session", 1, snmpController.snmpDeviceMap.size());
assertEquals("Session should be fetched from map", snmpSession, snmpController.getSession(device.deviceId()));
}
@Test
public void removeDevice() {
addDevice();
snmpController.removeDevice(device.deviceId());
assertNull("Device shoudl not be present", snmpController.getDevice(device.deviceId()));
}
@Test
public void walkFailedAlarm() {
assertEquals("Alarms should be equals", alarm, snmpController.buildWalkFailedAlarm(device.deviceId()));
}
public class MockISnmpSessionFactory implements ISnmpSessionFactory {
@Override
public ISnmpSession createSession(ISnmpConfiguration configuration, String ipAddress) throws IOException {
new ISnmpSessionAdapter();
return snmpSession;
}
@Override
public ISnmpSession createSession(String ipAddress, String community)
throws IOException {
return snmpSession;
}
@Override
public ISnmpSession createSession(String ipAddress, String community,
String factoryName,
ISnmpConfigurationFactory.AccessType accessType)
throws IOException {
return snmpSession;
}
}
}
|
opennetworkinglab/onos
|
protocols/snmp/ctl/src/test/java/org/onosproject/snmp/ctl/DefaultSnmpControllerTest.java
|
Java
|
apache-2.0
| 4,414 |
package com.zaaach.citypicker.db;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Environment;
import com.zaaach.citypicker.model.City;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* author Bro0cL on 2016/1/26.
*/
public class DBManager {
private static final String ASSETS_NAME = "china_cities.db";
private static final String DB_NAME = "china_cities.db";
private static final String TABLE_NAME = "city";
private static final String NAME = "name";
private static final String PINYIN = "pinyin";
private static final int BUFFER_SIZE = 1024;
private String DB_PATH;
private Context mContext;
public DBManager(Context context) {
this.mContext = context;
DB_PATH = File.separator + "data"
+ Environment.getDataDirectory().getAbsolutePath() + File.separator
+ context.getPackageName() + File.separator + "databases" + File.separator;
}
@SuppressWarnings("ResultOfMethodCallIgnored")
public void copyDBFile(){
File dir = new File(DB_PATH);
if (!dir.exists()){
dir.mkdirs();
}
File dbFile = new File(DB_PATH + DB_NAME);
if (!dbFile.exists()){
InputStream is;
OutputStream os;
try {
is = mContext.getResources().getAssets().open(ASSETS_NAME);
os = new FileOutputStream(dbFile);
byte[] buffer = new byte[BUFFER_SIZE];
int length;
while ((length = is.read(buffer, 0, buffer.length)) > 0){
os.write(buffer, 0, length);
}
os.flush();
os.close();
is.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public List<City> getAllCities(){
SQLiteDatabase db = SQLiteDatabase.openOrCreateDatabase(DB_PATH + DB_NAME, null);
Cursor cursor = db.rawQuery("select * from " + TABLE_NAME, null);
List<City> result = new ArrayList<>();
City city;
while (cursor.moveToNext()){
String name = cursor.getString(cursor.getColumnIndex(NAME));
String pinyin = cursor.getString(cursor.getColumnIndex(PINYIN));
city = new City(name, pinyin);
result.add(city);
}
cursor.close();
db.close();
Collections.sort(result, new CityComparator());
return result;
}
public List<City> searchCity(final String keyword){
SQLiteDatabase db = SQLiteDatabase.openOrCreateDatabase(DB_PATH + DB_NAME, null);
Cursor cursor = db.rawQuery("select * from " + TABLE_NAME +" where name like \"%" + keyword
+ "%\" or pinyin like \"%" + keyword + "%\"", null);
List<City> result = new ArrayList<>();
City city;
while (cursor.moveToNext()){
String name = cursor.getString(cursor.getColumnIndex(NAME));
String pinyin = cursor.getString(cursor.getColumnIndex(PINYIN));
city = new City(name, pinyin);
result.add(city);
}
cursor.close();
db.close();
Collections.sort(result, new CityComparator());
return result;
}
/**
* sort by a-z
*/
private class CityComparator implements Comparator<City>{
@Override
public int compare(City lhs, City rhs) {
String a = lhs.getPinyin().substring(0, 1);
String b = rhs.getPinyin().substring(0, 1);
return a.compareTo(b);
}
}
}
|
weiwenqiang/GitHub
|
SelectWidget/city/CityPicker/citypicker/src/main/java/com/zaaach/citypicker/db/DBManager.java
|
Java
|
apache-2.0
| 3,876 |
package com.huawei.esdk.sms.north.http.common;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import com.huawei.esdk.platform.common.utils.ESDKIOUtils;
import com.huawei.esdk.platform.common.utils.help.DocumentBuilderFactories;
import com.huawei.esdk.sms.north.http.bean.PlaceHolderBean;
public abstract class AbstractXMLProcessor implements IXMLProcessor
{
private static Logger LOGGER = Logger.getLogger(AbstractXMLProcessor.class);
@Override
public List<PlaceHolderBean> processClasspathXMLFile(String fileName)
throws ParserConfigurationException, SAXException, IOException
{
String xmlContent = ESDKIOUtils.getClasspathFileContent(fileName);
return parseXML(xmlContent);
}
@Override
public List<PlaceHolderBean> processXML(String xmlContent)
throws ParserConfigurationException, SAXException, IOException
{
return parseXML(xmlContent);
}
protected List<PlaceHolderBean> parseXML(String xmlAsString)
throws ParserConfigurationException, SAXException, IOException
{
DocumentBuilderFactory dbFactory = DocumentBuilderFactories.newSecurityInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(new InputSource(new ByteArrayInputStream(xmlAsString.getBytes("utf-8"))));
doc.getDocumentElement().normalize();
Element rootElement = doc.getDocumentElement();
List<PlaceHolderBean> result = new ArrayList<PlaceHolderBean>();
return parseNode(rootElement, result);
}
protected List<PlaceHolderBean> parseNode(Node nNode, List<PlaceHolderBean> placerHolders)
{
StringBuilder sb = new StringBuilder();
if (LOGGER.isDebugEnabled())
{
sb.append("Current Node :").append(nNode.getNodeName());
sb.append("|Node Type:").append(nNode.getNodeType());
sb.append("|Node Value:").append(nNode.getNodeValue());
sb.append("|Text Value:" + nNode.getTextContent());
LOGGER.debug(sb.toString());
}
if (nNode.getNodeType() == Node.ELEMENT_NODE)
{
Element eElement = (Element)nNode;
if (hasSubElement(nNode))
{
NodeList nList = nNode.getChildNodes();
Node nodeItem;
for (int temp = 0; temp < nList.getLength(); temp++)
{
nodeItem = nList.item(temp);
parseNode(nodeItem, placerHolders);
}
}
else
{
if (LOGGER.isDebugEnabled())
{
sb.delete(0, sb.length());
sb.append("Tag Name:").append(eElement.getTagName());
sb.append("|Node Name:").append(eElement.getNodeName());
sb.append("|Node Value:").append(eElement.getNodeValue());
sb.append("|Text Content:").append(eElement.getTextContent());
LOGGER.debug(sb.toString());
}
//It's the element which hasn't child element and should be processed
PlaceHolderBean placeHolder = processElement(eElement);
if (null != placeHolder)
{
placerHolders.add(placeHolder);
}
}
}
return placerHolders;
}
private boolean hasSubElement(Node node)
{
if (null == node || Node.ELEMENT_NODE != node.getNodeType())
{
return false;
}
NodeList nList = node.getChildNodes();
Node nodeItem;
for (int temp = 0; temp < nList.getLength(); temp++)
{
nodeItem = nList.item(temp);
if (Node.ELEMENT_NODE == nodeItem.getNodeType())
{
return true;
}
}
return false;
}
protected abstract PlaceHolderBean processElement(Element element);
}
|
eSDK/esdk_sms
|
source/esdk_sms_neadp_http/src/main/java/com/huawei/esdk/sms/north/http/common/AbstractXMLProcessor.java
|
Java
|
apache-2.0
| 4,493 |
package com.example;
/**
* Created by Nish on 2/21/15.
*/
public interface Movable {
public void moveLeft();
public void moveRight();
}
|
nishtahir/Mektory-BeginnersAndroid
|
Week2/mygame/src/main/java/com/example/Movable.java
|
Java
|
apache-2.0
| 147 |
package ch.unibe.scg.regex;
import static java.util.Collections.singleton;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import ch.unibe.scg.regex.ParserProvider.Node;
import ch.unibe.scg.regex.ParserProvider.Node.Basic;
import ch.unibe.scg.regex.ParserProvider.Node.Group;
import ch.unibe.scg.regex.ParserProvider.Node.NonGreedyStar;
import ch.unibe.scg.regex.ParserProvider.Node.Optional;
import ch.unibe.scg.regex.ParserProvider.Node.Plus;
import ch.unibe.scg.regex.ParserProvider.Node.PositiveSet;
import ch.unibe.scg.regex.ParserProvider.Node.SetItem;
import ch.unibe.scg.regex.ParserProvider.Node.Simple;
import ch.unibe.scg.regex.ParserProvider.Node.Star;
import ch.unibe.scg.regex.ParserProvider.Node.Union;
import ch.unibe.scg.regex.TNFA.Builder;
import ch.unibe.scg.regex.Transition.Priority;
/**
* Not thread-safe! Use only from one thread at a time!
*
* @author nes
*/
class RegexToNFA {
final InputRangeCleanup inputRangeCleanup = new InputRangeCleanup();
TNFA convert(final Node node) {
Collection<InputRange> allInputRanges = new ArrayList<>();
allInputRanges.add(InputRange.ANY); // All regexes contain this implicitly.
findRanges(node, allInputRanges);
final Builder builder = Builder.make(allInputRanges);
builder.registerCaptureGroup(builder.captureGroupMaker.entireMatch);
final MiniAutomaton m =
makeInitialMiniAutomaton(builder, builder.captureGroupMaker.entireMatch);
final MiniAutomaton a = make(m, builder, node, builder.captureGroupMaker.entireMatch);
final State endTagger = builder.makeState();
builder.addEndTagTransition(a.finishing, endTagger, builder.captureGroupMaker.entireMatch,
Priority.NORMAL);
builder.setAsAccepting(endTagger);
return builder.build();
}
private void findRanges(Node n, Collection<InputRange> out) {
if (n instanceof Node.SetItem) {
out.add(((SetItem) n).inputRange);
}
for (Node c : n.getChildren()) {
findRanges(c, out);
}
}
static class MiniAutomaton {
final Collection<State> finishing;
final Collection<State> initial;
MiniAutomaton(final Collection<State> initial, final Collection<State> finishing) {
if (initial.iterator().next() == null) {
assert false;
}
this.initial = initial;
this.finishing = finishing;
}
MiniAutomaton(final Collection<State> initial, final State finishing) {
this(initial, singleton(finishing));
}
@Override
public String toString() {
return "" + initial + " -> " + finishing;
}
}
MiniAutomaton make(final MiniAutomaton last, final Builder builder, final Node node,
CaptureGroup captureGroup) {
MiniAutomaton ret;
if (node instanceof Node.Any) {
ret = makeAny(last, builder);
} else if (node instanceof Node.Char) {
ret = makeChar(last, builder, (Node.Char) node);
} else if (node instanceof Node.Simple) {
ret = makeSimple(last, builder, (Node.Simple) node, captureGroup);
} else if (node instanceof Node.Optional) {
ret = makeOptional(last, builder, (Node.Optional) node, captureGroup);
} else if (node instanceof Node.NonGreedyStar) {
ret = makeNonGreedyStar(last, builder, (Node.NonGreedyStar) node, captureGroup);
} else if (node instanceof Node.Star) {
ret = makeStar(last, builder, (Star) node, captureGroup);
} else if (node instanceof Node.Plus) {
ret = makePlus(last, builder, (Node.Plus) node, captureGroup);
} else if (node instanceof Node.Group) {
ret = makeGroup(last, builder, (Node.Group) node, captureGroup);
} else if (node instanceof Node.Eos) {
ret = makeEos(last, builder);
} else if (node instanceof Node.Char) {
ret = makeChar(last, builder, (Node.Char) node);
} else if (node instanceof Node.PositiveSet) {
ret = makePositiveSet(last, builder, (Node.PositiveSet) node);
} else if (node instanceof Node.Union) {
ret = makeUnion(last, builder, (Node.Union) node, captureGroup);
} else {
throw new AssertionError("Unknown node type: " + node);
}
assert !ret.initial.contains(null);
assert !ret.finishing.contains(null);
return ret;
}
MiniAutomaton makeAny(final MiniAutomaton last, final Builder builder) {
final State a = builder.makeState();
builder.addUntaggedTransition(InputRange.ANY, last.finishing, a);
return new MiniAutomaton(last.finishing, a);
}
MiniAutomaton makeChar(final MiniAutomaton last, final Builder b, final Node.Char character) {
final State a = b.makeState();
final MiniAutomaton ret = new MiniAutomaton(last.finishing, a);
b.addUntaggedTransition(character.inputRange, ret.initial, a);
return ret;
}
MiniAutomaton makeEos(final MiniAutomaton last, final Builder builder) {
final State a = builder.makeState();
builder.addUntaggedTransition(InputRange.EOS, last.finishing, a);
return new MiniAutomaton(last.finishing, a);
}
MiniAutomaton makeGroup(final MiniAutomaton last, final Builder builder, final Group group,
CaptureGroup parentCaptureGroup) {
final CaptureGroup cg = builder.makeCaptureGroup(parentCaptureGroup);
builder.registerCaptureGroup(cg);
final State startGroup = builder.makeState();
builder.addStartTagTransition(last.finishing, startGroup, cg, Priority.NORMAL);
final MiniAutomaton startGroupAutomaton = new MiniAutomaton(singleton(startGroup), singleton(startGroup));
final MiniAutomaton body = make(startGroupAutomaton, builder, group.body, cg);
final State endTag = builder.makeState();
builder.addEndTagTransition(body.finishing, endTag, cg, Priority.NORMAL);
return new MiniAutomaton(last.finishing, endTag);
}
MiniAutomaton makeInitialMiniAutomaton(final Builder builder, CaptureGroup entireMatch) {
final State init = builder.makeInitialState();
final State startTagger = builder.makeState();
builder.addStartTagTransition(singleton(init), startTagger, entireMatch, Priority.NORMAL);
return new MiniAutomaton(singleton(init), singleton(startTagger));
}
MiniAutomaton makeOptional(final MiniAutomaton last, final Builder builder,
final Optional optional, CaptureGroup captureGroup) {
final MiniAutomaton ma = make(last, builder, optional.elementary, captureGroup);
final List<State> f = new ArrayList<>(last.finishing);
f.addAll(ma.finishing);
return new MiniAutomaton(last.finishing, f);
}
MiniAutomaton makePlus(final MiniAutomaton last, final Builder builder, final Plus plus,
CaptureGroup captureGroup) {
final MiniAutomaton inner = make(last, builder, plus.elementary, captureGroup);
Collection<State> out = singleton(builder.makeState());
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, out, Priority.LOW);
final MiniAutomaton ret = new MiniAutomaton(last.finishing, out);
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing,
inner.initial, Priority.NORMAL);
return ret;
}
MiniAutomaton makeUnion(MiniAutomaton last, Builder builder, Union union,
CaptureGroup captureGroup) {
MiniAutomaton left = make(last, builder, union.left, captureGroup);
MiniAutomaton right = make(last, builder, union.right, captureGroup);
Collection<State> out = singleton(builder.makeState());
builder.makeUntaggedEpsilonTransitionFromTo(left.finishing, out, Priority.NORMAL);
builder.makeUntaggedEpsilonTransitionFromTo(right.finishing, out, Priority.LOW);
return new MiniAutomaton(last.finishing, out);
}
MiniAutomaton makePositiveSet(final MiniAutomaton last, final Builder builder,
final PositiveSet set) {
final List<SetItem> is = set.items;
final SortedSet<InputRange> ranges = new TreeSet<>();
for (final SetItem i : is) {
ranges.add(i.inputRange);
}
final List<InputRange> rangesList = new ArrayList<>(ranges);
final List<InputRange> cleanedRanges = inputRangeCleanup.cleanUp(rangesList);
final State a = builder.makeState();
for (InputRange range : cleanedRanges) {
builder.addUntaggedTransition(range, last.finishing, a);
}
return new MiniAutomaton(last.finishing, a);
}
MiniAutomaton makeSimple(final MiniAutomaton last, final Builder b, final Simple simple,
CaptureGroup captureGroup) {
final List<? extends Basic> bs = simple.basics;
MiniAutomaton lm = last;
for (final Basic e : bs) {
lm = make(lm, b, e, captureGroup);
}
return new MiniAutomaton(last.finishing, lm.finishing);
}
MiniAutomaton makeNonGreedyStar(MiniAutomaton last, Builder builder, NonGreedyStar nonGreedyStar,
CaptureGroup captureGroup) {
// Make start state and connect.
State start = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(last.finishing, singleton(start), Priority.NORMAL);
// Make inner machine.
MiniAutomaton innerLast = new MiniAutomaton(last.finishing, start);
final MiniAutomaton inner = make(innerLast, builder, nonGreedyStar.elementary, captureGroup);
// Connect inner machine back to start.
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, singleton(start), Priority.LOW);
// Make and connect `out` state.
State out = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(singleton(start), singleton(out), Priority.NORMAL);
return new MiniAutomaton(last.finishing, out);
}
MiniAutomaton makeStar(final MiniAutomaton last, final Builder builder, final Star star,
CaptureGroup captureGroup) {
// Make start state and connect.
State start = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(last.finishing, singleton(start), Priority.NORMAL);
// Make inner machine.
MiniAutomaton innerLast = new MiniAutomaton(singleton(start), start);
final MiniAutomaton inner = make(innerLast, builder, star.elementary, captureGroup);
// Connect inner machine back to start.
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, singleton(start), Priority.NORMAL);
// Make and connect `out` state.
State out = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(singleton(start), singleton(out), Priority.LOW);
return new MiniAutomaton(last.finishing, out);
}
}
|
nes1983/tree-regex
|
src/ch/unibe/scg/regex/RegexToNFA.java
|
Java
|
apache-2.0
| 10,402 |
/*
* Copyright (c) 2015 TextGlass
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
public class TransformerIsNumber implements Transformer {
@Override
public String transform(String input) throws Exception {
try {
Double.parseDouble(input);
} catch(NumberFormatException nfe) {
throw new Exception(nfe.toString());
}
return input;
}
@Override
public String toString() {
return "TransformerIsNumber";
}
}
|
TextGlass/reference
|
client/src/TransformerIsNumber.java
|
Java
|
apache-2.0
| 972 |
/*
* Copyright (c) 2017 Martin Pfeffer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pepperonas.materialdialog.adapter;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ResolveInfo;
import android.graphics.Typeface;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.pepperonas.materialdialog.R;
import com.pepperonas.materialdialog.utils.Utils;
import java.util.List;
/**
* @author Martin Pfeffer (pepperonas)
*/
public class ShareAdapter extends BaseAdapter {
private Object[] items;
private LayoutInflater mInflater;
private Context mCtx;
private Typeface mTypeface;
public ShareAdapter(@NonNull Context context) {
this.mInflater = LayoutInflater.from(context);
Intent sendIntent = new Intent(android.content.Intent.ACTION_SEND);
sendIntent.setType("text/plain");
List activities = context.getPackageManager().queryIntentActivities(sendIntent, 0);
items = activities.toArray();
mCtx = context;
}
public ShareAdapter(@NonNull Context context, Typeface typeface) {
this.mInflater = LayoutInflater.from(context);
Intent sendIntent = new Intent(android.content.Intent.ACTION_SEND);
sendIntent.setType("text/plain");
List activities = context.getPackageManager().queryIntentActivities(sendIntent, 0);
items = activities.toArray();
mCtx = context;
mTypeface = typeface;
}
public int getCount() {
return items.length;
}
public Object getItem(int position) {
return items[position];
}
public long getItemId(int position) {
return position;
}
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
if (convertView == null) {
convertView = mInflater.inflate(R.layout.custom_list_item_share_app, null);
holder = new ViewHolder();
holder.logo = (ImageView) convertView.findViewById(R.id.iv_simple_list_item_share_app);
holder.name = (TextView) convertView.findViewById(R.id.tv_simple_list_item_share_app);
if (mTypeface != null) {
holder.name.setTypeface(mTypeface);
}
convertView.setTag(holder);
} else {
holder = (ViewHolder) convertView.getTag();
}
holder.name.setText(((ResolveInfo) items[position]).activityInfo
.applicationInfo.loadLabel(mCtx.getPackageManager()).toString());
holder.logo.setImageDrawable(((ResolveInfo) items[position]).activityInfo
.applicationInfo.loadIcon(mCtx.getPackageManager()));
LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT);
layoutParams.setMargins(
Utils.dp2px(mCtx, 16),
Utils.dp2px(mCtx, 4),
Utils.dp2px(mCtx, 4),
Utils.dp2px(mCtx, 4));
holder.logo.setLayoutParams(layoutParams);
return convertView;
}
static class ViewHolder {
TextView name;
ImageView logo;
}
}
|
pepperonas/MaterialDialog
|
library/src/main/java/com/pepperonas/materialdialog/adapter/ShareAdapter.java
|
Java
|
apache-2.0
| 3,990 |
package com.siqisoft.stone.admin.dict.controller;
import java.util.List;
import org.siqisource.stone.dict.model.Dict;
import org.siqisource.stone.dict.service.DictService;
import org.siqisource.stone.orm.condition.Condition;
import org.siqisource.stone.ui.AjaxResponse;
import org.siqisource.stone.ui.Notify;
import org.siqisource.stone.ui.easyui.PagedRows;
import org.siqisource.stone.ui.easyui.Paging;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.siqisoft.stone.admin.dict.service.DictConditionBuilder;
@Controller
public class DictController {
@Autowired
DictService service;
@RequestMapping("/dict/DictList.do")
public String list(Model model) {
return "dict/DictList";
}
@RequestMapping("/dict/dictListData.do")
@ResponseBody
public PagedRows<Dict> listData(DictQueryForm dictQueryForm, Paging paging) {
Condition condition = DictConditionBuilder.listCondition(dictQueryForm);
int count = service.count(condition);
List<Dict> dictList = service.list(condition, paging.getRowBounds());
return new PagedRows<Dict>(count, dictList);
}
@RequestMapping("/dict/DictRead.do")
public String read(String code, Model model) {
Dict dict = service.read(code);
model.addAttribute("dict", dict);
return "dict/DictRead";
}
@RequestMapping("/dict/DictAddInit.do")
public String addInit(Dict dict, Model model) {
return "dict/DictAdd";
}
@RequestMapping("/dict/DictAdd.do")
public String add(Dict dict, Model model) {
service.insert(dict);
return this.read(dict.getCode(), model);
}
@RequestMapping("/dict/dictDelete.do")
@ResponseBody
public AjaxResponse delete(String[] codeList, Model model) {
// 判断是否被关联
if (codeList != null) {
service.deleteBatch(codeList);
}
return new Notify("成功删除"+codeList.length+"条记录");
}
@RequestMapping("/dict/DictEditInit.do")
public String editInit(String code, Model model) {
Dict dict = service.read(code);
model.addAttribute("dict", dict);
return "dict/DictEdit";
}
@RequestMapping("/dict/DictEdit.do")
public String edit(Dict dict, Model model) {
service.update(dict);
return this.read(dict.getCode(), model);
}
}
|
ylyxf/stone-sdk
|
src/main/java/com/siqisoft/stone/admin/dict/controller/DictController.java
|
Java
|
apache-2.0
| 2,471 |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.listeners;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.psi.PsiElement;
/**
* Refactorings invoke {@link #getListener(com.intellij.psi.PsiElement)} of registered
* {@linkplain RefactoringElementListenerProvider} before particular element is subjected to refactoring.
* @author dsl
*/
public interface RefactoringElementListenerProvider {
ExtensionPointName<RefactoringElementListenerProvider> EP_NAME = ExtensionPointName.create("com.intellij.refactoring.elementListenerProvider");
/**
*
* Should return a listener for particular element. Invoked in read action.
*/
@javax.annotation.Nullable
RefactoringElementListener getListener(PsiElement element);
}
|
consulo/consulo
|
modules/base/lang-api/src/main/java/com/intellij/refactoring/listeners/RefactoringElementListenerProvider.java
|
Java
|
apache-2.0
| 1,347 |
/*
* Copyright 2016 Shredder121.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.shredder121.gh_event_api.handler.pull_request;
/**
* The handler interface for receiving {@code pull_request} events.
*
* @author Shredder121
*/
@FunctionalInterface
public interface PullRequestHandler {
void handle(PullRequestPayload payload);
}
|
johnktims/gh-event-api
|
src/main/java/com/github/shredder121/gh_event_api/handler/pull_request/PullRequestHandler.java
|
Java
|
apache-2.0
| 873 |
package com.nguyenmanhtuan.benhandientu;
import android.app.Activity;
import android.content.Intent;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.util.HashMap;
import java.util.Locale;
import com.nguyenmanhtuan.utils.DatabaseHandler;
public class RegisteredActivity extends Activity {
private Locale myLocale;
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_registered);
DatabaseHandler db = new DatabaseHandler(getApplicationContext());
HashMap<String, String> user = new HashMap<String, String>();
user = db.getUserDetails();
/**
* Displays the registration details in Text view
**/
final TextView fname = (TextView) findViewById(R.id.fname);
final TextView lname = (TextView) findViewById(R.id.lname);
final TextView uname = (TextView) findViewById(R.id.uname);
final TextView email = (TextView) findViewById(R.id.email);
final TextView address = (TextView) findViewById(R.id.tvadd);
final TextView phonenumber = (TextView) findViewById(R.id.tvphone);
final TextView birthyear = (TextView) findViewById(R.id.tvBirthyear);
final TextView created_at = (TextView) findViewById(R.id.regat);
fname.setText(user.get("fname"));
lname.setText(user.get("lname"));
uname.setText(user.get("uname"));
email.setText(user.get("email"));
address.setText(user.get("address"));
phonenumber.setText(user.get("phonenumber"));
birthyear.setText(user.get("birthyear"));
created_at.setText(user.get("created_at"));
Button login = (Button) findViewById(R.id.login);
login.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
Intent myIntent = new Intent(view.getContext(), LoginActivity.class);
startActivityForResult(myIntent, 0);
finish();
}
});
}
public void setLocale(String lang) {
myLocale = new Locale(lang);
Resources res = getResources();
DisplayMetrics dm = res.getDisplayMetrics();
Configuration conf = res.getConfiguration();
conf.locale = myLocale;
res.updateConfiguration(conf, dm);
Intent refresh = new Intent(this, RegisteredActivity.class);
startActivity(refresh);
}
}
|
techmaster-prj/BenhAnDienTu
|
BenhAnDienTu/src/com/nguyenmanhtuan/benhandientu/RegisteredActivity.java
|
Java
|
apache-2.0
| 2,788 |
package com.vertabelo.mobileorm.myplaces.orm.gen;
public class AddressViewDAOImpl
extends com.vertabelo.mobileorm.myplaces.orm.runtime.dao.BaseDAO<AddressView>
implements AddressViewDAO {
public AddressViewDAOImpl(com.vertabelo.mobileorm.myplaces.orm.runtime.util.SQLiteDataSource dataSource) {
super(dataSource);
}
public AddressViewDAOImpl(com.vertabelo.mobileorm.myplaces.orm.runtime.util.SQLiteDataSource dataSource,
com.vertabelo.mobileorm.myplaces.orm.runtime.util.DAOMonitor daoMonitor) {
super(dataSource, daoMonitor);
}
@Override
public Class<AddressView> getPojoClass() {
return POJO_CLASS;
}
@Override
public com.vertabelo.mobileorm.myplaces.orm.runtime.query.TableExpression getTableExpression() {
return TABLE_EXPRESSION;
}
@Override
public com.vertabelo.mobileorm.myplaces.orm.runtime.util.ResultSetHandler getResultSetHandler() {
return RESULT_SET_HANDLER;
}
@Override
public java.util.List<AddressView> getAddressViewList() {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.orderBy(orderBy);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy, com.vertabelo.mobileorm.myplaces.orm.runtime.query.OrderByDirection asc) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.orderBy(orderBy, asc);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.setWhere(where);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where,
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.setWhere(where);
query.orderBy(orderBy);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where,
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy, com.vertabelo.mobileorm.myplaces.orm.runtime.query.OrderByDirection asc) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.setWhere(where);
query.orderBy(orderBy, asc);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public Long getCount() {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION,
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.fun("COUNT",
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.ASTERISK));
java.util.List<Long> list = select(query, new com.vertabelo.mobileorm.myplaces.orm.runtime.util.handlers.LongResultSetHandler()).getObjectList();
if (list.size() > 1) {
throw new RuntimeException("More than one object returned");
} else if (list.size() == 1) {
return list.get(0);
} else {
throw new RuntimeException("Cannot retrieve count() method result");
}
}
@Override
public Long getCount(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION,
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.fun("COUNT",
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.ASTERISK));
query.setWhere(where);
java.util.List<Long> list = select(query, new com.vertabelo.mobileorm.myplaces.orm.runtime.util.handlers.LongResultSetHandler()).getObjectList();
if (list.size() > 1) {
throw new RuntimeException("More than one object returned");
} else if (list.size() == 1) {
return list.get(0);
} else {
throw new RuntimeException("Cannot retrieve count() method result");
}
}
}
|
Vertabelo/mobiorm-demo-android
|
app/src/main/java/com/vertabelo/mobileorm/myplaces/orm/gen/AddressViewDAOImpl.java
|
Java
|
apache-2.0
| 6,557 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cep.operator;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.cep.Event;
import org.apache.flink.cep.SubEvent;
import org.apache.flink.cep.nfa.NFA;
import org.apache.flink.cep.nfa.compiler.NFACompiler;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.runtime.checkpoint.OperatorSubtaskState;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness;
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
import org.apache.flink.streaming.util.OperatorSnapshotUtil;
import org.apache.flink.streaming.util.migration.MigrationTestUtil;
import org.apache.flink.streaming.util.migration.MigrationVersion;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedQueue;
import static org.apache.flink.cep.operator.CepOperatorTestUtilities.getKeyedCepOpearator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Tests for checking whether CEP operator can restore from snapshots that were done
* using previous Flink versions.
*
* <p>For regenerating the binary snapshot file of previous versions you have to run the
* {@code write*()} method on the corresponding Flink release-* branch.
*/
@RunWith(Parameterized.class)
public class CEPMigrationTest {
/**
* TODO change this to the corresponding savepoint version to be written (e.g. {@link MigrationVersion#v1_3} for 1.3)
* TODO and remove all @Ignore annotations on write*Snapshot() methods to generate savepoints
*/
private final MigrationVersion flinkGenerateSavepointVersion = null;
private final MigrationVersion migrateVersion;
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
return Arrays.asList(MigrationVersion.v1_3, MigrationVersion.v1_4, MigrationVersion.v1_5);
}
public CEPMigrationTest(MigrationVersion migrateVersion) {
this.migrateVersion = migrateVersion;
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeAfterBranchingPatternSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<Event>(startEvent, 1));
harness.processElement(new StreamRecord<Event>(new Event(42, "foobar", 1.0), 2));
harness
.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3));
harness.processElement(new StreamRecord<Event>(middleEvent1, 2));
harness.processElement(new StreamRecord<Event>(middleEvent2, 3));
harness.processWatermark(new Watermark(5));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-after-branching-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testRestoreAfterBranchingPattern() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
final Event endEvent = new Event(42, "end", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-after-branching-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
harness.processElement(new StreamRecord<>(new Event(42, "start", 1.0), 4));
harness.processElement(new StreamRecord<>(endEvent, 5));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and 2 results
assertEquals(3, result.size());
Object resultObject1 = result.poll();
assertTrue(resultObject1 instanceof StreamRecord);
StreamRecord<?> resultRecord1 = (StreamRecord<?>) resultObject1;
assertTrue(resultRecord1.getValue() instanceof Map);
Object resultObject2 = result.poll();
assertTrue(resultObject2 instanceof StreamRecord);
StreamRecord<?> resultRecord2 = (StreamRecord<?>) resultObject2;
assertTrue(resultRecord2.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap1 =
(Map<String, List<Event>>) resultRecord1.getValue();
assertEquals(startEvent, patternMap1.get("start").get(0));
assertEquals(middleEvent1, patternMap1.get("middle").get(0));
assertEquals(endEvent, patternMap1.get("end").get(0));
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap2 =
(Map<String, List<Event>>) resultRecord2.getValue();
assertEquals(startEvent, patternMap2.get("start").get(0));
assertEquals(middleEvent2, patternMap2.get("middle").get(0));
assertEquals(endEvent, patternMap2.get("end").get(0));
// and now go for a checkpoint with the new serializers
final Event startEvent1 = new Event(42, "start", 2.0);
final SubEvent middleEvent3 = new SubEvent(42, "foo", 1.0, 11.0);
final Event endEvent1 = new Event(42, "end", 2.0);
harness.processElement(new StreamRecord<Event>(startEvent1, 21));
harness.processElement(new StreamRecord<Event>(middleEvent3, 23));
// simulate snapshot/restore with some elements in internal sorting queue
OperatorSubtaskState snapshot = harness.snapshot(1L, 1L);
harness.close();
harness = new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
harness.setup();
harness.initializeState(snapshot);
harness.open();
harness.processElement(new StreamRecord<>(endEvent1, 25));
harness.processWatermark(new Watermark(50));
result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject3 = result.poll();
assertTrue(resultObject3 instanceof StreamRecord);
StreamRecord<?> resultRecord3 = (StreamRecord<?>) resultObject3;
assertTrue(resultRecord3.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap3 =
(Map<String, List<Event>>) resultRecord3.getValue();
assertEquals(startEvent1, patternMap3.get("start").get(0));
assertEquals(middleEvent3, patternMap3.get("middle").get(0));
assertEquals(endEvent1, patternMap3.get("end").get(0));
} finally {
harness.close();
}
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeStartingNewPatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<Event>(startEvent1, 1));
harness.processElement(new StreamRecord<Event>(new Event(42, "foobar", 1.0), 2));
harness
.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3));
harness.processElement(new StreamRecord<Event>(middleEvent1, 2));
harness.processWatermark(new Watermark(5));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-starting-new-pattern-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testRestoreStartingNewPatternAfterMigration() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
final Event startEvent2 = new Event(42, "start", 5.0);
final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
final Event endEvent = new Event(42, "end", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-starting-new-pattern-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
harness.processElement(new StreamRecord<>(startEvent2, 5));
harness.processElement(new StreamRecord<Event>(middleEvent2, 6));
harness.processElement(new StreamRecord<>(endEvent, 7));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and 3 results
assertEquals(4, result.size());
Object resultObject1 = result.poll();
assertTrue(resultObject1 instanceof StreamRecord);
StreamRecord<?> resultRecord1 = (StreamRecord<?>) resultObject1;
assertTrue(resultRecord1.getValue() instanceof Map);
Object resultObject2 = result.poll();
assertTrue(resultObject2 instanceof StreamRecord);
StreamRecord<?> resultRecord2 = (StreamRecord<?>) resultObject2;
assertTrue(resultRecord2.getValue() instanceof Map);
Object resultObject3 = result.poll();
assertTrue(resultObject3 instanceof StreamRecord);
StreamRecord<?> resultRecord3 = (StreamRecord<?>) resultObject3;
assertTrue(resultRecord3.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap1 =
(Map<String, List<Event>>) resultRecord1.getValue();
assertEquals(startEvent1, patternMap1.get("start").get(0));
assertEquals(middleEvent1, patternMap1.get("middle").get(0));
assertEquals(endEvent, patternMap1.get("end").get(0));
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap2 =
(Map<String, List<Event>>) resultRecord2.getValue();
assertEquals(startEvent1, patternMap2.get("start").get(0));
assertEquals(middleEvent2, patternMap2.get("middle").get(0));
assertEquals(endEvent, patternMap2.get("end").get(0));
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap3 =
(Map<String, List<Event>>) resultRecord3.getValue();
assertEquals(startEvent2, patternMap3.get("start").get(0));
assertEquals(middleEvent2, patternMap3.get("middle").get(0));
assertEquals(endEvent, patternMap3.get("end").get(0));
// and now go for a checkpoint with the new serializers
final Event startEvent3 = new Event(42, "start", 2.0);
final SubEvent middleEvent3 = new SubEvent(42, "foo", 1.0, 11.0);
final Event endEvent1 = new Event(42, "end", 2.0);
harness.processElement(new StreamRecord<Event>(startEvent3, 21));
harness.processElement(new StreamRecord<Event>(middleEvent3, 23));
// simulate snapshot/restore with some elements in internal sorting queue
OperatorSubtaskState snapshot = harness.snapshot(1L, 1L);
harness.close();
harness = new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
harness.setup();
harness.initializeState(snapshot);
harness.open();
harness.processElement(new StreamRecord<>(endEvent1, 25));
harness.processWatermark(new Watermark(50));
result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject4 = result.poll();
assertTrue(resultObject4 instanceof StreamRecord);
StreamRecord<?> resultRecord4 = (StreamRecord<?>) resultObject4;
assertTrue(resultRecord4.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap4 =
(Map<String, List<Event>>) resultRecord4.getValue();
assertEquals(startEvent3, patternMap4.get("start").get(0));
assertEquals(middleEvent3, patternMap4.get("middle").get(0));
assertEquals(endEvent1, patternMap4.get("end").get(0));
} finally {
harness.close();
}
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeSinglePatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new SinglePatternNFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processWatermark(new Watermark(5));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-single-pattern-afterwards-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testSinglePatternAfterMigration() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new SinglePatternNFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-single-pattern-afterwards-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
harness.processElement(new StreamRecord<>(startEvent1, 5));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject = result.poll();
assertTrue(resultObject instanceof StreamRecord);
StreamRecord<?> resultRecord = (StreamRecord<?>) resultObject;
assertTrue(resultRecord.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap =
(Map<String, List<Event>>) resultRecord.getValue();
assertEquals(startEvent1, patternMap.get("start").get(0));
} finally {
harness.close();
}
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeAndOrSubtypConditionsPatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAComplexConditionsFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<>(startEvent1, 5));
harness.processWatermark(new Watermark(6));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-conditions-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testAndOrSubtypeConditionsAfterMigration() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAComplexConditionsFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-conditions-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
final Event endEvent = new SubEvent(42, "end", 1.0, 2.0);
harness.processElement(new StreamRecord<>(endEvent, 9));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject = result.poll();
assertTrue(resultObject instanceof StreamRecord);
StreamRecord<?> resultRecord = (StreamRecord<?>) resultObject;
assertTrue(resultRecord.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap =
(Map<String, List<Event>>) resultRecord.getValue();
assertEquals(startEvent1, patternMap.get("start").get(0));
assertEquals(endEvent, patternMap.get("start").get(1));
} finally {
harness.close();
}
}
private static class SinglePatternNFAFactory implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private SinglePatternNFAFactory() {
this(false);
}
private SinglePatternNFAFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new StartFilter())
.within(Time.milliseconds(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static class NFAComplexConditionsFactory implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private NFAComplexConditionsFactory() {
this(false);
}
private NFAComplexConditionsFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern = Pattern.<Event>begin("start")
.subtype(SubEvent.class)
.where(new MiddleFilter())
.or(new SubEventEndFilter())
.times(2)
.within(Time.milliseconds(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static class NFAFactory implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private NFAFactory() {
this(false);
}
private NFAFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new StartFilter())
.followedByAny("middle")
.subtype(SubEvent.class)
.where(new MiddleFilter())
.followedByAny("end")
.where(new EndFilter())
// add a window timeout to test whether timestamps of elements in the
// priority queue in CEP operator are correctly checkpointed/restored
.within(Time.milliseconds(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static class StartFilter extends SimpleCondition<Event> {
private static final long serialVersionUID = 5726188262756267490L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}
private static class MiddleFilter extends SimpleCondition<SubEvent> {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getVolume() > 5.0;
}
}
private static class EndFilter extends SimpleCondition<Event> {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
}
private static class SubEventEndFilter extends SimpleCondition<SubEvent> {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getName().equals("end");
}
}
}
|
zhangminglei/flink
|
flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/operator/CEPMigrationTest.java
|
Java
|
apache-2.0
| 24,021 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.generation;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.intention.AddAnnotationPsiFix;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.intellij.codeInsight.AnnotationUtil.CHECK_EXTERNAL;
import static com.intellij.codeInsight.AnnotationUtil.CHECK_TYPE;
/**
* @author anna
*/
public interface OverrideImplementsAnnotationsHandler {
ExtensionPointName<OverrideImplementsAnnotationsHandler> EP_NAME = ExtensionPointName.create("com.intellij.overrideImplementsAnnotationsHandler");
/**
* Returns annotations which should be copied from a source to an implementation (by default, no annotations are copied).
*/
default String[] getAnnotations(@NotNull PsiFile file) {
//noinspection deprecation
return getAnnotations(file.getProject());
}
/**
* @deprecated Use {@link #getAnnotations(PsiFile)}
*/
@Deprecated
String[] getAnnotations(Project project);
@Deprecated
@NotNull
default String[] annotationsToRemove(Project project, @NotNull String fqName) {
return ArrayUtil.EMPTY_STRING_ARRAY;
}
/** Perform post processing on the annotations, such as deleting or renaming or otherwise updating annotations in the override */
default void cleanup(PsiModifierListOwner source, @Nullable PsiElement targetClass, PsiModifierListOwner target) {
}
static void repeatAnnotationsFromSource(PsiModifierListOwner source, @Nullable PsiElement targetClass, PsiModifierListOwner target) {
Module module = ModuleUtilCore.findModuleForPsiElement(targetClass != null ? targetClass : target);
GlobalSearchScope moduleScope = module != null ? GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(module) : null;
Project project = target.getProject();
JavaPsiFacade facade = JavaPsiFacade.getInstance(project);
for (OverrideImplementsAnnotationsHandler each : EP_NAME.getExtensionList()) {
for (String annotation : each.getAnnotations(target.getContainingFile())) {
if (moduleScope != null && facade.findClass(annotation, moduleScope) == null) continue;
int flags = CHECK_EXTERNAL | CHECK_TYPE;
if (AnnotationUtil.isAnnotated(source, annotation, flags) && !AnnotationUtil.isAnnotated(target, annotation, flags)) {
each.transferToTarget(annotation, source, target);
}
}
}
for (OverrideImplementsAnnotationsHandler each : EP_NAME.getExtensionList()) {
each.cleanup(source, targetClass, target);
}
}
default void transferToTarget(String annotation, PsiModifierListOwner source, PsiModifierListOwner target) {
PsiModifierList modifierList = target.getModifierList();
assert modifierList != null : target;
PsiAnnotation srcAnnotation = AnnotationUtil.findAnnotation(source, annotation);
PsiNameValuePair[] valuePairs = srcAnnotation != null ? srcAnnotation.getParameterList().getAttributes() : PsiNameValuePair.EMPTY_ARRAY;
AddAnnotationPsiFix.addPhysicalAnnotation(annotation, valuePairs, modifierList);
}
}
|
paplorinc/intellij-community
|
java/java-impl/src/com/intellij/codeInsight/generation/OverrideImplementsAnnotationsHandler.java
|
Java
|
apache-2.0
| 3,542 |
package com.chisw.work.addressbook.test;
import com.chisw.work.addressbook.Data.GroupData;
import com.chisw.work.addressbook.Data.Groups;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
public class TestGroupModification extends TestBase {
@BeforeMethod
public void checkPreconditions() {
if (app.db().groups().size() == 0) {
app.goTo().groupPage();
app.groups().createGroupInBeforeMethod();
}
}
@Test
public void checkGroupModification() {
Groups before = app.db().groups();
GroupData modifiedGroup = before.iterator().next();
GroupData group = new GroupData()
.withId(modifiedGroup.getId()).withGroupName("test 258").withGroupLogo("Logo 123").withGroupComment("Comment 12345");
app.goTo().groupPage();
app.groups().modifyGroup(group);
assertThat(app.groups().count(),equalTo(before.size()));
Groups after = app.db().groups();
assertThat(after, equalTo(before.withoutAdded(modifiedGroup).withAdded(group)));
verifyGroupsListInUi();
}
}
|
Tarrest/java_home
|
addressbook/src/test/java/com/chisw/work/addressbook/test/TestGroupModification.java
|
Java
|
apache-2.0
| 1,230 |
package io.omengye.common.utils.constants;
public class Constants {
private Constants(){}
public static final String RESULT_FLAG = "flag";
}
|
omengye/ws
|
common/src/main/java/io/omengye/common/utils/constants/Constants.java
|
Java
|
apache-2.0
| 153 |
/*
* Created on Mar 29, 2009
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright @2013 the original author or authors.
*/
package org.fest.assertions.api;
import static org.fest.test.ExpectedException.none;
import org.fest.test.ExpectedException;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
/**
* Tests for {@link LongAssert#isNull()}.
*
* @author Yvonne Wang
*/
public class LongAssert_isNull_Test {
@Rule
public ExpectedException thrown = none();
private LongAssert assertions;
private Long actual;
@Before
public void setUp() {
actual = null;
assertions = new LongAssert(actual);
}
@Test
public void should_pass_if_actual_is_null() {
assertions.isNull();
}
@Test
public void should_fail_if_actual_is_not_null() {
thrown.expect(AssertionError.class);
actual = new Long(6l);
assertions = new LongAssert(actual);
assertions.isNull();
}
}
|
alexruiz/fest-assert-2.x
|
src/test/java/org/fest/assertions/api/LongAssert_isNull_Test.java
|
Java
|
apache-2.0
| 1,440 |
/*******************************************************************************
* Copyright 2006 - 2012 Vienna University of Technology,
* Department of Software Technology and Interactive Systems, IFS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This work originates from the Planets project, co-funded by the European Union under the Sixth Framework Programme.
******************************************************************************/
package eu.scape_project.planning.model.transform;
import java.io.Serializable;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.DiscriminatorColumn;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Inheritance;
import javax.persistence.ManyToOne;
import eu.scape_project.planning.model.ChangeLog;
import eu.scape_project.planning.model.IChangesHandler;
import eu.scape_project.planning.model.ITouchable;
import eu.scape_project.planning.model.Values;
import eu.scape_project.planning.model.values.INumericValue;
import eu.scape_project.planning.model.values.IOrdinalValue;
import eu.scape_project.planning.model.values.TargetValues;
import eu.scape_project.planning.model.values.Value;
import eu.scape_project.planning.validation.ValidationError;
/**
* Implements basic transformation functionality, i.e. aggregation over {@link Values} and
* common properties of transformers.
* @author Hannes Kulovits
*/
@Entity
@Inheritance
@DiscriminatorColumn(name = "type")
public abstract class Transformer implements ITransformer, Serializable, ITouchable
{
private static final long serialVersionUID = -3708795251848706848L;
@Id
@GeneratedValue
protected int id;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
@ManyToOne(cascade=CascadeType.ALL)
private ChangeLog changeLog = new ChangeLog();
/**
* Transforms all the values in the list of the provided {@link Values}.
* According to the type of each {@link Value}, either
* {@link ITransformer#transform(INumericValue)} or {@link ITransformer#transform(IOrdinalValue)}
* is called.
* @param values List of values to be transformed
* @return {@link TargetValues}, which contains a list of all transformed values corresponding to the provided input
*/
public TargetValues transformValues(Values values) {
TargetValues result = new TargetValues();
for (Value v : values.getList()) {
if (v instanceof INumericValue) {
result.add(transform((INumericValue) v));
} else {
result.add(transform((IOrdinalValue) v));
}
}
return result;
}
public ChangeLog getChangeLog() {
return this.changeLog;
}
public void setChangeLog(ChangeLog value) {
changeLog = value;
}
public boolean isChanged() {
return changeLog.isAltered();
}
public void touch(String username) {
getChangeLog().touch(username);
}
public void touch() {
getChangeLog().touch();
}
/**
* @see ITouchable#handleChanges(IChangesHandler)
*/
public void handleChanges(IChangesHandler h){
h.visit(this);
}
/**
* If this Transformer is not correctly configured, this method adds
* an appropriate error-message to the given list and returns false.
*
* @return true if this transformer is correctly configured
*/
public abstract boolean isTransformable(List<ValidationError> errors);
public abstract Transformer clone();
}
|
openpreserve/plato
|
plato-model/src/main/java/eu/scape_project/planning/model/transform/Transformer.java
|
Java
|
apache-2.0
| 4,334 |
/*
* Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dtolabs.rundeck.core.execution.workflow;
/*
* StepFirstWorkflowStrategyTests.java
*
* User: Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
* Created: 3/25/11 9:30 AM
*
*/
import java.io.File;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.dtolabs.rundeck.core.common.*;
import com.dtolabs.rundeck.core.execution.*;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.tools.ant.BuildListener;
import org.junit.Assert;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import com.dtolabs.rundeck.core.execution.dispatch.Dispatchable;
import com.dtolabs.rundeck.core.execution.dispatch.DispatcherResult;
import com.dtolabs.rundeck.core.execution.service.NodeExecutorResult;
import com.dtolabs.rundeck.core.execution.workflow.steps.FailureReason;
import com.dtolabs.rundeck.core.execution.workflow.steps.NodeDispatchStepExecutor;
import com.dtolabs.rundeck.core.execution.workflow.steps.StepExecutionResult;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepException;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutionItem;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutionService;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutor;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepResult;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepResultImpl;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ExecCommandBase;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ExecCommandExecutionItem;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ScriptFileCommandBase;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ScriptFileCommandExecutionItem;
import com.dtolabs.rundeck.core.tools.AbstractBaseTest;
import com.dtolabs.rundeck.core.utils.FileUtils;
import com.dtolabs.rundeck.core.utils.NodeSet;
public class TestStepFirstWorkflowStrategy extends AbstractBaseTest {
Framework testFramework;
String testnode;
private static final String TEST_PROJECT = "StepFirstWorkflowStrategyTests";
public TestStepFirstWorkflowStrategy(String name) {
super(name);
}
public static Test suite() {
return new TestSuite(TestStepFirstWorkflowStrategy.class);
}
protected void setUp() {
super.setUp();
testFramework = getFrameworkInstance();
testnode=testFramework.getFrameworkNodeName();
final IRundeckProject frameworkProject = testFramework.getFrameworkProjectMgr().createFrameworkProject(
TEST_PROJECT,
generateProjectResourcesFile(
new File("src/test/resources/com/dtolabs/rundeck/core/common/test-nodes1.xml")
)
);
}
protected void tearDown() throws Exception {
super.tearDown();
File projectdir = new File(getFrameworkProjectsBase(), TEST_PROJECT);
FileUtils.deleteDir(projectdir);
}
public static void main(String args[]) {
junit.textui.TestRunner.run(suite());
}
static class testWorkflowCmdItem extends BaseExecutionItem implements NodeStepExecutionItem {
private String type;
int flag=-1;
@Override
public String toString() {
return "testWorkflowCmdItem{" +
"type='" + type + '\'' +
", flag=" + flag +
'}';
}
@Override
public String getNodeStepType() {
return type;
}
public String getType() {
return "NodeDispatch";
}
}
/*static class testWorkflowJobCmdItem extends testWorkflowCmdItem implements IWorkflowJobItem {
private String jobIdentifier;
public String getJobIdentifier() {
return jobIdentifier;
}
}*/
static class testListener implements ExecutionListenerOverride {
public boolean isTerse() {
return false;
}
public String getLogFormat() {
return null;
}
public void log(int i, String s) {
}
@Override
public void event(String eventType, String message, Map eventMeta) {
}
public FailedNodesListener getFailedNodesListener() {
return null;
}
public void beginStepExecution(ExecutionContext context, StepExecutionItem item) {
}
public void finishStepExecution(StatusResult result, ExecutionContext context, StepExecutionItem item) {
}
public void beginNodeExecution(ExecutionContext context, String[] command, INodeEntry node) {
}
public void finishNodeExecution(NodeExecutorResult result, ExecutionContext context, String[] command,
INodeEntry node) {
}
public void beginNodeDispatch(ExecutionContext context, StepExecutionItem item) {
}
public void beginNodeDispatch(ExecutionContext context, Dispatchable item) {
}
public void finishNodeDispatch(DispatcherResult result, ExecutionContext context, StepExecutionItem item) {
}
public void finishNodeDispatch(DispatcherResult result, ExecutionContext context, Dispatchable item) {
}
public void beginFileCopyFileStream(ExecutionContext context, InputStream input, INodeEntry node) {
}
public void beginFileCopyFile(ExecutionContext context, File input, INodeEntry node) {
}
public void beginFileCopyScriptContent(ExecutionContext context, String input, INodeEntry node) {
}
public void finishFileCopy(String result, ExecutionContext context, INodeEntry node) {
}
public void beginExecuteNodeStep(ExecutionContext context, NodeStepExecutionItem item, INodeEntry node) {
}
public void finishExecuteNodeStep(NodeStepResult result, ExecutionContext context, StepExecutionItem item,
INodeEntry node) {
}
public BuildListener getBuildListener() {
return null;
}
public ExecutionListenerOverride createOverride() {
return this;
}
public void setTerse(boolean terse) {
}
public void setLogFormat(String format) {
}
public void setFailedNodesListener(FailedNodesListener listener) {
}
}
static class testInterpreter implements NodeStepExecutor {
List<StepExecutionItem> executionItemList = new ArrayList<StepExecutionItem>();
List<ExecutionContext> executionContextList = new ArrayList<ExecutionContext>();
List<INodeEntry> nodeEntryList = new ArrayList<INodeEntry>();
int index = 0;
List<NodeStepResult> resultList = new ArrayList<NodeStepResult>();
boolean shouldThrowException = false;
public NodeStepResult executeNodeStep(StepExecutionContext executionContext,
NodeStepExecutionItem executionItem, INodeEntry iNodeEntry) throws
NodeStepException {
executionItemList.add(executionItem);
executionContextList.add(executionContext);
nodeEntryList.add(iNodeEntry);
if (shouldThrowException) {
throw new NodeStepException("testInterpreter test exception",null,iNodeEntry.getNodename());
}
// System.out.println("return index: (" + index + ") in size: " + resultList.size());
return resultList.get(index++);
}
}
static enum Reason implements FailureReason{
Test
}
static class testResult extends NodeStepResultImpl {
boolean success;
int flag;
INodeEntry node;
testResult(boolean success, int flag) {
super(null,success?null: TestStepFirstWorkflowStrategy.Reason.Test,success?null:"test failure",null);
this.success = success;
this.flag = flag;
}
@Override
public Exception getException() {
return null;
}
public boolean isSuccess() {
return success;
}
@Override
public String toString() {
return "testResult{" +
"success=" + success +
", flag=" + flag +
'}';
}
public INodeEntry getNode() {
return node;
}
}
public void testExecuteWorkflow() throws Exception {
final IRundeckProject frameworkProject = testFramework.getFrameworkProjectMgr().getFrameworkProject(
TEST_PROJECT);
final INodeSet nodes = frameworkProject.getNodeSet();
assertNotNull(nodes);
assertEquals(2, nodes.getNodes().size());
}
public void testExecuteWorkflow_empty() throws Exception {
//test empty workflow
final NodeSet nodeset = new NodeSet();
final WorkflowImpl workflow = new WorkflowImpl(new ArrayList<StepExecutionItem>(), 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet())
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, interpreterMock);
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(0, interpreterMock.executionItemList.size());
}
public void testExecuteWorkflow_undefined_item() throws Exception {
//test undefined workflow item
final NodeSet nodeset = new NodeSet();
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
commands.add(new testWorkflowCmdItem());
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset.nodeSelectorWithDefaultAll())
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(nodeset.nodeSelectorWithDefaultAll(),
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, interpreterMock);
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.out);
}
assertFalse(result.isSuccess());
assertEquals(0, interpreterMock.executionItemList.size());
assertNotNull("threw exception: " + result.getException(), result.getException());
assertTrue("threw exception: " + result.getException(),
result.getException() instanceof NullPointerException);
assertEquals("threw exception: " + result.getException(),
"provider name was null for Service: WorkflowNodeStep",
result.getException().getMessage());
}
public void testExecuteWorkflow_scriptExec() throws Exception {
//test script exec item
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ScriptFileCommandBase(){
@Override
public String getScript() {
return "a command";
}
};
commands.add(testWorkflowCmdItem);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("exec", failMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, failMock);
//set resturn result
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(1, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) executionItem1;
assertEquals("a command", scriptItem.getScript());
assertNull(scriptItem.getScriptAsStream());
assertNull(scriptItem.getServerScriptFilePath());
assertEquals(1, interpreterMock.executionContextList.size());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals("expected " + nodeset + ", but was " + executionContext.getNodeSelector(), nodeset,
executionContext.getNodeSelector());
}
public void testExecuteWorkflow_commandexec() throws Exception {
//test command exec item
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "command"};
}
};
commands.add(testWorkflowCmdItem);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, failMock);
//set resturn result
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(1, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(2, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("command", execItem.getCommand()[1]);
assertEquals(1, interpreterMock.executionContextList.size());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
public void testExecuteWorkflowThreeItems() throws Exception{
{
//test workflow of three successful items
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2","command"};
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "a command";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "1"};
}
};
commands.add(testWorkflowCmdItemScript);
final StepExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() {
@Override
public String getServerScriptFilePath() {
return "/some/file/path";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "2"};
}
};
commands.add(testWorkflowCmdItemScript2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results
interpreterMock.resultList.add(new testResult(true, 0));
interpreterMock.resultList.add(new testResult(true, 1));
interpreterMock.resultList.add(new testResult(true, 2));
final WorkflowExecutionResult result = strategy.executeWorkflow(context,executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
assertEquals(3, test1.size());
for (final int i : new int[]{0, 1, 2}) {
final StepExecutionResult interpreterResult = test1.get(i);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertTrue(val.isSuccess());
assertEquals(i, val.flag);
}
assertEquals(3, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final StepExecutionItem item2 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + item2.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2;
assertEquals("a command", scriptItem.getScript());
assertNull(scriptItem.getScriptAsStream());
assertNull(scriptItem.getServerScriptFilePath());
final StepExecutionItem item3 = interpreterMock.executionItemList.get(2);
assertTrue("wrong class: " + item3.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem2 = (ScriptFileCommandExecutionItem) item3;
assertNull(scriptItem2.getScript());
assertNull(scriptItem2.getScriptAsStream());
assertEquals("/some/file/path", scriptItem2.getServerScriptFilePath());
assertNotNull(scriptItem2.getArgs());
assertEquals(2, scriptItem2.getArgs().length);
assertEquals("-testargs", scriptItem2.getArgs()[0]);
assertEquals("2", scriptItem2.getArgs()[1]);
assertEquals(3, interpreterMock.executionContextList.size());
for (final int i : new int[]{0, 1, 2}) {
final ExecutionContext executionContext = interpreterMock.executionContextList.get(i);
assertEquals("item "+i,TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull("item " + i, executionContext.getDataContext());
assertNotNull("item " + i, executionContext.getDataContext().get("node"));
assertEquals("item " + i,0, executionContext.getLoglevel());
assertEquals("item " + i,"user1", executionContext.getUser());
assertEquals("item " + i,nodeset, executionContext.getNodeSelector());
}
}
}
public void testWorkflowFailNoKeepgoing() throws Exception{
{
//test a workflow with a failing item (1), with keepgoing=false
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "a command";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "1"};
}
};
commands.add(testWorkflowCmdItemScript);
final StepExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() {
@Override
public String getServerScriptFilePath() {
return "/some/file/path";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "2"};
}
};
commands.add(testWorkflowCmdItemScript2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(false);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results, fail on second item
interpreterMock.resultList.add(new testResult(true, 0));
interpreterMock.resultList.add(new testResult(false, 1));
interpreterMock.resultList.add(new testResult(true, 2));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (null != result.getException()) {
result.getException().printStackTrace(System.out);
}
assertFalse(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
StepExecutionResult result1 = result.getResultSet().get(1);
final DispatcherResult executionResult = NodeDispatchStepExecutor.extractDispatcherResult(result1);
assertNotNull(executionResult.getResults());
assertEquals(1, executionResult.getResults().size());
assertNotNull(executionResult.getResults().get(testnode));
final StatusResult testnode1 = executionResult.getResults().get(testnode);
assertNotNull(testnode1);
assertTrue(testnode1 instanceof testResult);
testResult failResult = (testResult) testnode1;
assertEquals(1, failResult.flag);
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
assertEquals(2, test1.size());
for (final int i : new int[]{0, 1}) {
final StepExecutionResult interpreterResult = test1.get(i);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(i, val.flag);
if(0==i){
assertTrue(val.isSuccess());
}else{
assertFalse(val.isSuccess());
}
}
assertEquals(2, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final StepExecutionItem item2 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + item2.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2;
assertEquals("a command", scriptItem.getScript());
assertNull(scriptItem.getScriptAsStream());
assertNull(scriptItem.getServerScriptFilePath());
assertNotNull(scriptItem.getArgs());
assertEquals(2, scriptItem.getArgs().length);
assertEquals("-testargs", scriptItem.getArgs()[0]);
assertEquals("1",scriptItem.getArgs()[1]);
assertEquals(2, interpreterMock.executionContextList.size());
for (final int i : new int[]{0, 1}) {
final ExecutionContext executionContext = interpreterMock.executionContextList.get(i);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
}
public void testWorkflowFailYesKeepgoing() throws Exception{
{
//test a workflow with a failing item (1), with keepgoing=true
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "a command";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "1"};
}
};
commands.add(testWorkflowCmdItemScript);
final StepExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() {
@Override
public String getServerScriptFilePath() {
return "/some/file/path";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "2"};
}
};
commands.add(testWorkflowCmdItemScript2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(true);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results, fail on second item
interpreterMock.resultList.add(new testResult(true, 0));
interpreterMock.resultList.add(new testResult(false, 1));
interpreterMock.resultList.add(new testResult(true, 2));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertFalse(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
assertEquals(3, test1.size());
for (final int i : new int[]{0, 1, 2}) {
final StepExecutionResult interpreterResult = test1.get(i);
assertTrue(NodeDispatchStepExecutor.isWrappedDispatcherResult(interpreterResult));
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(i, val.flag);
if (1 == i) {
assertFalse(val.isSuccess());
} else {
assertTrue(val.isSuccess());
}
}
assertEquals(3, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final StepExecutionItem item2 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + item2.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2;
assertEquals("a command", scriptItem.getScript());
assertNull(scriptItem.getScriptAsStream());
assertNull(scriptItem.getServerScriptFilePath());
assertNotNull(scriptItem.getArgs());
assertEquals(2, scriptItem.getArgs().length);
assertEquals("-testargs", scriptItem.getArgs()[0]);
assertEquals("1",scriptItem.getArgs()[1]);
final StepExecutionItem item3 = interpreterMock.executionItemList.get(2);
assertTrue("wrong class: " + item2.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem3 = (ScriptFileCommandExecutionItem) item3;
assertEquals("/some/file/path", scriptItem3.getServerScriptFilePath());
assertNull(scriptItem3.getScript());
assertNull(scriptItem3.getScriptAsStream());
assertNotNull(scriptItem3.getArgs());
assertEquals(2, scriptItem3.getArgs().length);
assertEquals("-testargs", scriptItem3.getArgs()[0]);
assertEquals("2", scriptItem3.getArgs()[1]);
assertEquals(3, interpreterMock.executionContextList.size());
for (final int i : new int[]{0, 1}) {
final ExecutionContext executionContext = interpreterMock.executionContextList.get(i);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
}
public void testFailureHandlerItemNoKeepgoing() throws Exception{
{
//test a workflow with a failing item (1), with keepgoing=false, and a failureHandler
final boolean KEEPGOING_TEST = false;
final boolean STEP_0_RESULT = false;
final boolean STEP_1_RESULT = true;
final boolean HANDLER_RESULT = true;
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testHandlerItem = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "failure handler script";
}
@Override
public String[] getArgs() {
return new String[]{"failure","script","args"};
}
};
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "a command";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "1"};
}
};
commands.add(testWorkflowCmdItemScript);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(KEEPGOING_TEST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter handlerInterpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", handlerInterpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results, fail on second item
interpreterMock.resultList.add(new testResult(STEP_0_RESULT, 0));
interpreterMock.resultList.add(new testResult(STEP_1_RESULT, 1));
handlerInterpreterMock.resultList.add(new testResult(HANDLER_RESULT, 0));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertFalse(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
StepExecutionResult result1 = result.getResultSet().get(0);
final DispatcherResult executionResult
= NodeDispatchStepExecutor.extractDispatcherResult(result1);
assertNotNull(executionResult.getResults());
assertEquals(1, executionResult.getResults().size());
assertNotNull(executionResult.getResults().get(testnode));
final StatusResult testnode1 = executionResult.getResults().get(testnode);
assertNotNull(testnode1);
assertTrue(testnode1 instanceof testResult);
testResult failResult = (testResult) testnode1;
assertEquals(0, failResult.flag);
assertEquals(1, result.getResultSet().size());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
assertEquals(1, test1.size());
final int i =0;
final StepExecutionResult interpreterResult = test1.get(i);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(i, val.flag);
assertFalse(val.isSuccess());
assertEquals(1, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
assertEquals(1, interpreterMock.executionContextList.size());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(i);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
//check handler item was executed
assertEquals(1, handlerInterpreterMock.executionItemList.size());
final StepExecutionItem executionItemX = handlerInterpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItemX.getClass().getName(),
executionItemX instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem execItemX = (ScriptFileCommandExecutionItem) executionItemX;
assertNotNull(execItemX.getScript());
assertNotNull(execItemX.getArgs());
assertEquals("failure handler script", execItemX.getScript());
assertEquals(3, execItemX.getArgs().length);
assertEquals("failure", execItemX.getArgs()[0]);
assertEquals("script", execItemX.getArgs()[1]);
assertEquals("args", execItemX.getArgs()[2]);
assertEquals(1, handlerInterpreterMock.executionContextList.size());
final ExecutionContext executionContextX = handlerInterpreterMock.executionContextList.get(i);
assertEquals(TEST_PROJECT, executionContextX.getFrameworkProject());
assertNotNull(executionContextX.getDataContext());
assertNotNull(executionContextX.getDataContext().get("node"));
assertEquals(0, executionContextX.getLoglevel());
assertEquals("user1", executionContextX.getUser());
assertEquals(nodeset, executionContextX.getNodeSelector());
}
}
public void testFailureHandlerItemYesKeepgoing() throws Exception{
{
//test a workflow with a failing item (1), with keepgoing=true, and a failureHandler that fails
final boolean KEEPGOING_TEST = true;
final boolean STEP_0_RESULT = false;
final boolean STEP_1_RESULT = true;
final boolean HANDLER_RESULT = false;
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testHandlerItem = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "failure handler script";
}
@Override
public String[] getArgs() {
return new String[]{"failure","script","args"};
}
@Override
public String toString() {
return "testHandlerItem";
}
};
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
@Override
public String toString() {
return "testWorkflowCmdItem";
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItem2 = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "3", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
@Override
public String toString() {
return "testWorkflowCmdItem2";
}
};
commands.add(testWorkflowCmdItem2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(KEEPGOING_TEST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter handlerInterpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", handlerInterpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results
interpreterMock.resultList.add(new testResult(STEP_0_RESULT, 0));
interpreterMock.resultList.add(new testResult(STEP_1_RESULT, 1));
handlerInterpreterMock.resultList.add(new testResult(HANDLER_RESULT, 0));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertFalse(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
System.out.println("results: "+test1);
assertEquals(2, interpreterMock.executionItemList.size());
assertEquals(2, interpreterMock.executionContextList.size());
//check handler item was executed
assertEquals(1, handlerInterpreterMock.executionItemList.size());
assertEquals(1, handlerInterpreterMock.executionContextList.size());
assertEquals(2, test1.size());
int resultIndex =0;
int stepNum=0;
{
//first step result
final StepExecutionResult interpreterResult = test1.get(resultIndex);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(0, val.flag);
assertFalse(val.isSuccess());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(stepNum);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(stepNum);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
resultIndex=1;
//
// {
// //failure handler result
// final StepExecutionResult interpreterResult = test1.get(resultIndex);
// final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
// assertEquals(1, dr.getResults().size());
// final NodeStepResult nrs = dr.getResults().values().iterator().next();
// assertTrue("unexpected class: " + nrs.getClass(),
// nrs instanceof testResult);
// testResult val = (testResult) nrs;
// assertEquals(0, val.flag);
// assertFalse(val.isSuccess());
//
// final StepExecutionItem executionItemX = handlerInterpreterMock.executionItemList.get(stepNum);
// assertTrue("wrong class: " + executionItemX.getClass().getName(),
// executionItemX instanceof ScriptFileCommandExecutionItem);
// ScriptFileCommandExecutionItem execItemX = (ScriptFileCommandExecutionItem) executionItemX;
// assertNotNull(execItemX.getScript());
// assertNotNull(execItemX.getArgs());
// assertEquals("failure handler script", execItemX.getScript());
// assertEquals(3, execItemX.getArgs().length);
// assertEquals("failure", execItemX.getArgs()[0]);
// assertEquals("script", execItemX.getArgs()[1]);
// assertEquals("args", execItemX.getArgs()[2]);
//
//
// final ExecutionContext executionContextX = handlerInterpreterMock.executionContextList.get(stepNum);
// assertEquals(TEST_PROJECT, executionContextX.getFrameworkProject());
// assertNull(executionContextX.getDataContext());
// assertEquals(0, executionContextX.getLoglevel());
// assertEquals("user1", executionContextX.getUser());
// assertEquals(nodeset, executionContextX.getNodeSelector());
// assertNull(executionContextX.getArgs());
// }
// resultIndex=2;
stepNum = 1;
{
//second step result
final StepExecutionResult interpreterResult = test1.get(resultIndex);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(1, val.flag);
assertTrue(val.isSuccess());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(stepNum);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("3", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(stepNum);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
}
public void testFailureHandlerItemYesKeepgoingHandlerSuccess() throws Exception {
{
//test a workflow with a failing item (1), with keepgoing=true, and a failureHandler that succeeds
final boolean KEEPGOING_TEST = true;
final boolean STEP_0_RESULT = false;
final boolean STEP_1_RESULT = true;
final boolean HANDLER_RESULT = true;
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testHandlerItem = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "failure handler script";
}
@Override
public String[] getArgs() {
return new String[]{"failure","script","args"};
}
@Override
public String toString() {
return "testHandlerItem";
}
};
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
@Override
public String toString() {
return "testWorkflowCmdItem";
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItem2 = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "3", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
@Override
public String toString() {
return "testWorkflowCmdItem2";
}
};
commands.add(testWorkflowCmdItem2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(KEEPGOING_TEST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter handlerInterpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", handlerInterpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results
interpreterMock.resultList.add(new testResult(STEP_0_RESULT, 0));
interpreterMock.resultList.add(new testResult(STEP_1_RESULT, 1));
handlerInterpreterMock.resultList.add(new testResult(HANDLER_RESULT, 0));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertTrue(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
System.err.println("results: "+test1);
assertEquals(2, test1.size());
assertEquals(2, interpreterMock.executionItemList.size());
assertEquals(2, interpreterMock.executionContextList.size());
//check handler item was executed
assertEquals(1, handlerInterpreterMock.executionItemList.size());
assertEquals(1, handlerInterpreterMock.executionContextList.size());
int resultIndex =0;
int stepNum=0;
{
//failure handler result
final StepExecutionResult interpreterResult = test1.get(resultIndex);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(0, val.flag);
assertTrue(val.isSuccess());
final StepExecutionItem executionItemX = handlerInterpreterMock.executionItemList.get(stepNum);
assertTrue("wrong class: " + executionItemX.getClass().getName(),
executionItemX instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem execItemX = (ScriptFileCommandExecutionItem) executionItemX;
assertNotNull(execItemX.getScript());
assertNotNull(execItemX.getArgs());
assertEquals("failure handler script", execItemX.getScript());
assertEquals(3, execItemX.getArgs().length);
assertEquals("failure", execItemX.getArgs()[0]);
assertEquals("script", execItemX.getArgs()[1]);
assertEquals("args", execItemX.getArgs()[2]);
final ExecutionContext executionContextX = handlerInterpreterMock.executionContextList.get(stepNum);
assertEquals(TEST_PROJECT, executionContextX.getFrameworkProject());
assertNotNull(executionContextX.getDataContext());
assertNotNull(executionContextX.getDataContext().get("node"));
assertEquals(0, executionContextX.getLoglevel());
assertEquals("user1", executionContextX.getUser());
assertEquals(nodeset, executionContextX.getNodeSelector());
}
resultIndex=1;
stepNum = 1;
{
//second step result
final StepExecutionResult interpreterResult = test1.get(resultIndex);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(1, val.flag);
assertTrue(val.isSuccess());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(stepNum);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("3", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(stepNum);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
}
public void testGenericItem() throws Exception{
{
//test jobref item
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final testWorkflowCmdItem item = new testWorkflowCmdItem();
item.type = "my-type";
commands.add(item);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("my-type", interpreterMock);
interpreterService.registerInstance("exec", failMock);
interpreterService.registerInstance("script", failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn result
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(1, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(1, interpreterMock.executionContextList.size());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
public void testMultipleNodes() throws Exception{
{
//test jobref item
final NodeSet nodeset = new NodeSet();
nodeset.createInclude().setName(".*");
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final testWorkflowCmdItem item = new testWorkflowCmdItem();
item.type = "my-type";
commands.add(item);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("my-type", interpreterMock);
interpreterService.registerInstance("exec", failMock);
interpreterService.registerInstance("script", failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn result node 1
interpreterMock.resultList.add(new NodeStepResultImpl(null));
//set resturn result node 2
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(2, interpreterMock.executionItemList.size());
assertEquals(2, interpreterMock.executionContextList.size());
{
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("test1"), executionContext.getNodeSelector());
}
{
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(1);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("testnode2"), executionContext.getNodeSelector());
}
}
}
public void testMultipleItemsAndNodes() throws Exception{
{
//test jobref item
final NodeSet nodeset = new NodeSet();
nodeset.createInclude().setName(".*");
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final testWorkflowCmdItem item = new testWorkflowCmdItem();
item.flag=0;
item.type = "my-type";
commands.add(item);
final testWorkflowCmdItem item2 = new testWorkflowCmdItem();
item2.flag = 1;
item2.type = "my-type";
commands.add(item2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("my-type", interpreterMock);
interpreterService.registerInstance("exec", failMock);
interpreterService.registerInstance("script", failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn result node 1 step 1
interpreterMock.resultList.add(new NodeStepResultImpl(null));
//set resturn result node 2 step 1
interpreterMock.resultList.add(new NodeStepResultImpl(null));
//set resturn result node 1 step 2
interpreterMock.resultList.add(new NodeStepResultImpl(null));
//set resturn result node 2 step 2
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(4, interpreterMock.executionItemList.size());
assertEquals(4, interpreterMock.executionContextList.size());
{//node 1 step 1
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(0, execItem.flag);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("test1"), executionContext.getNodeSelector());
}
{//node 2 step 1
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(0, execItem.flag);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(1);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("testnode2"), executionContext.getNodeSelector());
}
{//node 1 step 2
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(2);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(1, execItem.flag);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(2);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("test1"), executionContext.getNodeSelector());
}
{//node 2 step 2
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(3);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(1, execItem.flag);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(3);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("testnode2"), executionContext.getNodeSelector());
}
}
}
public void testCreatePrintableDataContext() {
Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>();
String otherKey = "other";
Map<String, String> otherData = new HashMap<String, String>();
dataContext.put(otherKey, otherData);
Map<String, String> secureData = new HashMap<String, String>();
String secureKey = "secureKey";
secureData.put(secureKey, "secureValue");
dataContext.put(StepFirstWorkflowExecutor.SECURE_OPTION_KEY, secureData);
Map<String, String> regularData = new HashMap<String, String>();
String insecureKey = "insecureKey";
regularData.put(insecureKey, "insecureValue");
regularData.put(secureKey, "secureValue");
dataContext.put(StepFirstWorkflowExecutor.OPTION_KEY, regularData);
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext);
Assert.assertSame("Expected other data to be present", otherData, result.get(otherKey));
Map<String, String> resultSecureData = result.get(StepFirstWorkflowExecutor.SECURE_OPTION_KEY);
Assert.assertEquals("Expected secure value to be replaced", StepFirstWorkflowExecutor.SECURE_OPTION_VALUE, resultSecureData.get(secureKey));
Map<String, String> resultRegularData = result.get(StepFirstWorkflowExecutor.OPTION_KEY);
Assert.assertEquals("Expected secure value to be replaced", StepFirstWorkflowExecutor.SECURE_OPTION_VALUE, resultRegularData.get(secureKey));
Assert.assertEquals("Expected insecure value to be untouched", regularData.get(insecureKey), resultRegularData.get(insecureKey));
}
public void testCreatePrintableDataContextNoDataContext() {
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(null);
Assert.assertTrue("Expected empty data context", result.isEmpty());
}
public void testCreatePrintableDataContextEmptyDataContext() {
Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>();
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext);
Assert.assertTrue("Expected empty data context", result.isEmpty());
}
public void testCreatePrintableDataContextNoSecureData() {
Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>();
String otherKey = "other";
Map<String, String> otherData = new HashMap<String, String>();
dataContext.put(otherKey, otherData);
Map<String, String> regularData = new HashMap<String, String>();
String insecureKey = "insecureKey";
regularData.put(insecureKey, "insecureValue");
dataContext.put(StepFirstWorkflowExecutor.OPTION_KEY, regularData);
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext);
Assert.assertSame("Expected other data to be present", otherData, result.get(otherKey));
Map<String, String> resultRegularData = result.get(StepFirstWorkflowExecutor.OPTION_KEY);
Assert.assertEquals("Expected insecure value to be untouched", regularData.get(insecureKey), resultRegularData.get(insecureKey));
}
public void testCreatePrintableDataContextNoRegularData() {
Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>();
String otherKey = "other";
Map<String, String> otherData = new HashMap<String, String>();
dataContext.put(otherKey, otherData);
Map<String, String> secureData = new HashMap<String, String>();
String secureKey = "secureKey";
secureData.put(secureKey, "secureValue");
dataContext.put(StepFirstWorkflowExecutor.SECURE_OPTION_KEY, secureData);
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext);
Assert.assertSame("Expected other data to be present", otherData, result.get(otherKey));
Map<String, String> resultSecureData = result.get(StepFirstWorkflowExecutor.SECURE_OPTION_KEY);
Assert.assertEquals("Expected secure value to be replaced", StepFirstWorkflowExecutor.SECURE_OPTION_VALUE, resultSecureData.get(secureKey));
}
@SuppressWarnings("unchecked")
public void testExecuteWorkflowUsesPrintableDataContext() {
ExecutionListener listener = Mockito.mock(ExecutionListener.class);
StepExecutionContext context = Mockito.mock(StepExecutionContext.class);
Mockito.when(context.getExecutionListener()).thenReturn(listener);
String printableContextToString = "this is hopefully some string that won't appear elsewhere";
Map<String, Map<String, String>> printableContext = Mockito.mock(Map.class);
Mockito.when(printableContext.toString()).thenReturn(printableContextToString);
String dataContextToString = "this is another magic string that hopefully won't appear elsewhere";
Map<String, Map<String, String>> dataContext = Mockito.mock(Map.class);
Mockito.when(dataContext.toString()).thenReturn(dataContextToString);
Mockito.when(context.getDataContext()).thenReturn(dataContext);
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
strategy = Mockito.spy(strategy);
Mockito.doReturn(printableContext).when(strategy).createPrintableDataContext(Mockito.same(dataContext));
WorkflowExecutionItem item = Mockito.mock(WorkflowExecutionItem.class);
IWorkflow workflow = Mockito.mock(IWorkflow.class);
Mockito.doReturn(workflow).when(item).getWorkflow();
strategy.executeWorkflowImpl(context, item);
ArgumentCaptor<String> logLineCaptor = ArgumentCaptor.forClass(String.class);
Mockito.verify(listener, Mockito.atLeastOnce()).log(Mockito.anyInt(), logLineCaptor.capture());
for (String line : logLineCaptor.getAllValues()) {
if (line.startsWith(StepFirstWorkflowExecutor.DATA_CONTEXT_PREFIX)) {
Assert.assertTrue("Expected printable data context string.", line.contains(printableContextToString));
Assert.assertFalse("Not expecting raw data context string.", line.contains(dataContextToString));
}
}
}
}
|
jgpacker/rundeck
|
core/src/test/java/com/dtolabs/rundeck/core/execution/workflow/TestStepFirstWorkflowStrategy.java
|
Java
|
apache-2.0
| 100,809 |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.googlesource.gerrit.plugins.hooks.rtc.network;
import java.net.URI;
import org.apache.http.client.methods.HttpPost;
public class HttpPatch extends HttpPost {
public HttpPatch() {
super();
}
public HttpPatch(String uri) {
super(uri);
}
public HttpPatch(URI uri) {
super(uri);
}
@Override
public String getMethod() {
return "PATCH";
}
}
|
GerritCodeReview/plugins_hooks-rtc
|
src/main/java/com/googlesource/gerrit/plugins/hooks/rtc/network/HttpPatch.java
|
Java
|
apache-2.0
| 999 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.runtime;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.io.IOReadableWritable;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.runtime.io.network.api.reader.RecordReader;
import org.apache.flink.runtime.io.network.api.writer.RecordWriter;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.jobgraph.DistributionPattern;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.jobmanager.scheduler.SlotSharingGroup;
import org.apache.flink.test.util.JavaProgramTestBase;
import org.apache.flink.util.TestLogger;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
/**
* Manually test the throughput of the network stack.
*/
@Ignore
public class NetworkStackThroughputITCase extends TestLogger {
private static final Logger LOG = LoggerFactory.getLogger(NetworkStackThroughputITCase.class);
private static final String DATA_VOLUME_GB_CONFIG_KEY = "data.volume.gb";
private static final String USE_FORWARDER_CONFIG_KEY = "use.forwarder";
private static final String PARALLELISM_CONFIG_KEY = "num.subtasks";
private static final String NUM_SLOTS_PER_TM_CONFIG_KEY = "num.slots.per.tm";
private static final String IS_SLOW_SENDER_CONFIG_KEY = "is.slow.sender";
private static final String IS_SLOW_RECEIVER_CONFIG_KEY = "is.slow.receiver";
private static final int IS_SLOW_SLEEP_MS = 10;
private static final int IS_SLOW_EVERY_NUM_RECORDS = (2 * 32 * 1024) / SpeedTestRecord.RECORD_SIZE;
// ------------------------------------------------------------------------
// wrapper to reuse JavaProgramTestBase code in runs via main()
private static class TestBaseWrapper extends JavaProgramTestBase {
private int dataVolumeGb;
private boolean useForwarder;
private boolean isSlowSender;
private boolean isSlowReceiver;
private int parallelism;
public TestBaseWrapper(Configuration config) {
super(config);
dataVolumeGb = config.getInteger(DATA_VOLUME_GB_CONFIG_KEY, 1);
useForwarder = config.getBoolean(USE_FORWARDER_CONFIG_KEY, true);
isSlowSender = config.getBoolean(IS_SLOW_SENDER_CONFIG_KEY, false);
isSlowReceiver = config.getBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, false);
parallelism = config.getInteger(PARALLELISM_CONFIG_KEY, 1);
int numSlots = config.getInteger(NUM_SLOTS_PER_TM_CONFIG_KEY, 1);
if (parallelism % numSlots != 0) {
throw new RuntimeException("The test case defines a parallelism that is not a multiple of the slots per task manager.");
}
setNumTaskManagers(parallelism / numSlots);
setTaskManagerNumSlots(numSlots);
}
protected JobGraph getJobGraph() throws Exception {
return createJobGraph(dataVolumeGb, useForwarder, isSlowSender, isSlowReceiver, parallelism);
}
private JobGraph createJobGraph(int dataVolumeGb, boolean useForwarder, boolean isSlowSender,
boolean isSlowReceiver, int numSubtasks) {
JobGraph jobGraph = new JobGraph("Speed Test");
SlotSharingGroup sharingGroup = new SlotSharingGroup();
JobVertex producer = new JobVertex("Speed Test Producer");
jobGraph.addVertex(producer);
producer.setSlotSharingGroup(sharingGroup);
producer.setInvokableClass(SpeedTestProducer.class);
producer.setParallelism(numSubtasks);
producer.getConfiguration().setInteger(DATA_VOLUME_GB_CONFIG_KEY, dataVolumeGb);
producer.getConfiguration().setBoolean(IS_SLOW_SENDER_CONFIG_KEY, isSlowSender);
JobVertex forwarder = null;
if (useForwarder) {
forwarder = new JobVertex("Speed Test Forwarder");
jobGraph.addVertex(forwarder);
forwarder.setSlotSharingGroup(sharingGroup);
forwarder.setInvokableClass(SpeedTestForwarder.class);
forwarder.setParallelism(numSubtasks);
}
JobVertex consumer = new JobVertex("Speed Test Consumer");
jobGraph.addVertex(consumer);
consumer.setSlotSharingGroup(sharingGroup);
consumer.setInvokableClass(SpeedTestConsumer.class);
consumer.setParallelism(numSubtasks);
consumer.getConfiguration().setBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, isSlowReceiver);
if (useForwarder) {
forwarder.connectNewDataSetAsInput(producer, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED);
consumer.connectNewDataSetAsInput(forwarder, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED);
}
else {
consumer.connectNewDataSetAsInput(producer, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED);
}
return jobGraph;
}
@Override
protected void testProgram() throws Exception {
JobExecutionResult jer = executor.submitJobAndWait(getJobGraph(), false);
int dataVolumeGb = this.config.getInteger(DATA_VOLUME_GB_CONFIG_KEY, 1);
long dataVolumeMbit = dataVolumeGb * 8192;
long runtimeSecs = jer.getNetRuntime(TimeUnit.SECONDS);
int mbitPerSecond = (int) (((double) dataVolumeMbit) / runtimeSecs);
LOG.info(String.format("Test finished with throughput of %d MBit/s (runtime [secs]: %d, " +
"data volume [gb/mbits]: %d/%d)", mbitPerSecond, runtimeSecs, dataVolumeGb, dataVolumeMbit));
}
}
// ------------------------------------------------------------------------
private static class SpeedTestProducer extends AbstractInvokable {
@Override
public void invoke() throws Exception {
RecordWriter<SpeedTestRecord> writer = new RecordWriter<>(getEnvironment().getWriter(0));
try {
// Determine the amount of data to send per subtask
int dataVolumeGb = getTaskConfiguration().getInteger(NetworkStackThroughputITCase.DATA_VOLUME_GB_CONFIG_KEY, 1);
long dataMbPerSubtask = (dataVolumeGb * 1024) / getCurrentNumberOfSubtasks();
long numRecordsToEmit = (dataMbPerSubtask * 1024 * 1024) / SpeedTestRecord.RECORD_SIZE;
LOG.info(String.format("%d/%d: Producing %d records (each record: %d bytes, total: %.2f GB)",
getIndexInSubtaskGroup() + 1, getCurrentNumberOfSubtasks(), numRecordsToEmit,
SpeedTestRecord.RECORD_SIZE, dataMbPerSubtask / 1024.0));
boolean isSlow = getTaskConfiguration().getBoolean(IS_SLOW_SENDER_CONFIG_KEY, false);
int numRecords = 0;
SpeedTestRecord record = new SpeedTestRecord();
for (long i = 0; i < numRecordsToEmit; i++) {
if (isSlow && (numRecords++ % IS_SLOW_EVERY_NUM_RECORDS) == 0) {
Thread.sleep(IS_SLOW_SLEEP_MS);
}
writer.emit(record);
}
}
finally {
writer.flush();
}
}
}
private static class SpeedTestForwarder extends AbstractInvokable {
@Override
public void invoke() throws Exception {
RecordReader<SpeedTestRecord> reader = new RecordReader<>(
getEnvironment().getInputGate(0),
SpeedTestRecord.class,
getEnvironment().getTaskManagerInfo().getTmpDirectories());
RecordWriter<SpeedTestRecord> writer = new RecordWriter<>(getEnvironment().getWriter(0));
try {
SpeedTestRecord record;
while ((record = reader.next()) != null) {
writer.emit(record);
}
}
finally {
reader.clearBuffers();
writer.flush();
}
}
}
private static class SpeedTestConsumer extends AbstractInvokable {
@Override
public void invoke() throws Exception {
RecordReader<SpeedTestRecord> reader = new RecordReader<>(
getEnvironment().getInputGate(0),
SpeedTestRecord.class,
getEnvironment().getTaskManagerInfo().getTmpDirectories());
try {
boolean isSlow = getTaskConfiguration().getBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, false);
int numRecords = 0;
while (reader.next() != null) {
if (isSlow && (numRecords++ % IS_SLOW_EVERY_NUM_RECORDS) == 0) {
Thread.sleep(IS_SLOW_SLEEP_MS);
}
}
}
finally {
reader.clearBuffers();
}
}
}
private static class SpeedTestRecord implements IOReadableWritable {
private static final int RECORD_SIZE = 128;
private final byte[] buf = new byte[RECORD_SIZE];
public SpeedTestRecord() {
for (int i = 0; i < RECORD_SIZE; ++i) {
this.buf[i] = (byte) (i % 128);
}
}
@Override
public void write(DataOutputView out) throws IOException {
out.write(this.buf);
}
@Override
public void read(DataInputView in) throws IOException {
in.readFully(this.buf);
}
}
// ------------------------------------------------------------------------
public void testThroughput() throws Exception {
Object[][] configParams = new Object[][]{
new Object[]{1, false, false, false, 4, 2},
new Object[]{1, true, false, false, 4, 2},
new Object[]{1, true, true, false, 4, 2},
new Object[]{1, true, false, true, 4, 2},
new Object[]{2, true, false, false, 4, 2},
new Object[]{4, true, false, false, 4, 2},
new Object[]{4, true, false, false, 8, 4},
};
for (Object[] p : configParams) {
Configuration config = new Configuration();
config.setInteger(DATA_VOLUME_GB_CONFIG_KEY, (Integer) p[0]);
config.setBoolean(USE_FORWARDER_CONFIG_KEY, (Boolean) p[1]);
config.setBoolean(IS_SLOW_SENDER_CONFIG_KEY, (Boolean) p[2]);
config.setBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, (Boolean) p[3]);
config.setInteger(PARALLELISM_CONFIG_KEY, (Integer) p[4]);
config.setInteger(NUM_SLOTS_PER_TM_CONFIG_KEY, (Integer) p[5]);
TestBaseWrapper test = new TestBaseWrapper(config);
test.startCluster();
System.out.println(Arrays.toString(p));
test.testProgram();
test.stopCluster();
}
}
private void runAllTests() throws Exception {
testThroughput();
System.out.println("Done.");
}
public static void main(String[] args) throws Exception {
new NetworkStackThroughputITCase().runAllTests();
}
}
|
mtunique/flink
|
flink-tests/src/test/java/org/apache/flink/test/runtime/NetworkStackThroughputITCase.java
|
Java
|
apache-2.0
| 10,771 |
/*
* *************************************************************************
* Copyright (C) FRS Belgium NV ("FRSGlobal"). All rights reserved.
*
* This computer program is protected by copyright law and international
* treaties. Unauthorized reproduction or distribution of this program,
* or any portion of it, may result in severe civil and criminal penalties,
* and will be prosecuted to the maximum extent possible under the law.
* *************************************************************************
*/
package org.cluj.bus.servlet;
import com.google.gson.Gson;
import org.cluj.bus.model.BusSchedule;
import org.cluj.bus.model.BusScheduleDTO;
import org.cluj.bus.model.CategorySchedule;
import org.cluj.bus.services.JPARepository;
import org.cluj.bus.util.ScheduleUtilities;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.text.ParseException;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
public class BusScheduleServlet extends HttpServlet
{
private static final Logger LOGGER = Logger.getLogger(BusScheduleServlet.class.getName());
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException
{
doPost(req, resp);
}
@Override
protected void doPost(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException
{
String busId = httpServletRequest.getParameter(ServletUtils.BUS_ID_PARAMETER_KEY);
ServletUtils.sendResponse(httpServletResponse, getResponseString(busId));
}
private String getResponseString(String busId)
{
List<BusSchedule> busSchedules = new JPARepository<>(BusSchedule.class).findAll("busId", busId);
Map<String, CategorySchedule> categorySchedules = new HashMap<>();
for (BusSchedule busSchedule : busSchedules)
{
String days = busSchedule.getDays();
CategorySchedule categorySchedule = categorySchedules.get(days);
if (categorySchedule == null)
{
categorySchedule = new CategorySchedule();
categorySchedules.put(days, categorySchedule);
categorySchedule.setDisplayName(busSchedule.getCategory());
categorySchedule.setApplicableDays(getApplicableDays(days));
}
Collection<Date> startTimes = categorySchedule.getStartTimes();
if (startTimes == null)
{
startTimes = new ArrayList<>();
categorySchedule.setStartTimes(startTimes);
}
try
{
startTimes.add(ScheduleUtilities.getStartTime(busSchedule.getStartTime()));
}
catch (ParseException e)
{
LOGGER.log(Level.SEVERE, "Error parsing start time", e);
}
}
BusScheduleDTO schedule = new BusScheduleDTO();
schedule.setSchedules(categorySchedules.values());
return new Gson().toJson(schedule);
}
private Collection<Integer> getApplicableDays(String days)
{
List<Integer> applicableDays = new ArrayList<>();
for (char aChar : days.toCharArray())
{
int day = Integer.parseInt(String.valueOf(aChar));
applicableDays.add(day);
}
return applicableDays;
}
}
|
abotos/ClujLiveTransit
|
Java/appengine-code/appengine-web-ui/src/java/org/cluj/bus/servlet/BusScheduleServlet.java
|
Java
|
apache-2.0
| 3,589 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty4;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ThreadFactory;
import io.netty.util.concurrent.DefaultEventExecutorGroup;
import io.netty.util.concurrent.EventExecutorGroup;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.impl.UriEndpointComponent;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.concurrent.CamelThreadFactory;
public class NettyComponent extends UriEndpointComponent {
private NettyConfiguration configuration;
private volatile EventExecutorGroup executorService;
public NettyComponent() {
super(NettyEndpoint.class);
}
public NettyComponent(Class<? extends Endpoint> endpointClass) {
super(endpointClass);
}
public NettyComponent(CamelContext context) {
super(context, NettyEndpoint.class);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
NettyConfiguration config;
if (configuration != null) {
config = configuration.copy();
} else {
config = new NettyConfiguration();
}
config = parseConfiguration(config, remaining, parameters);
// merge any custom bootstrap configuration on the config
NettyServerBootstrapConfiguration bootstrapConfiguration = resolveAndRemoveReferenceParameter(parameters, "bootstrapConfiguration", NettyServerBootstrapConfiguration.class);
if (bootstrapConfiguration != null) {
Map<String, Object> options = new HashMap<String, Object>();
if (IntrospectionSupport.getProperties(bootstrapConfiguration, options, null, false)) {
IntrospectionSupport.setProperties(getCamelContext().getTypeConverter(), config, options);
}
}
// validate config
config.validateConfiguration();
NettyEndpoint nettyEndpoint = new NettyEndpoint(remaining, this, config);
setProperties(nettyEndpoint.getConfiguration(), parameters);
return nettyEndpoint;
}
/**
* Parses the configuration
*
* @return the parsed and valid configuration to use
*/
protected NettyConfiguration parseConfiguration(NettyConfiguration configuration, String remaining, Map<String, Object> parameters) throws Exception {
configuration.parseURI(new URI(remaining), parameters, this, "tcp", "udp");
return configuration;
}
public NettyConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(NettyConfiguration configuration) {
this.configuration = configuration;
}
public void setExecutorService(EventExecutorGroup executorService) {
this.executorService = executorService;
}
public synchronized EventExecutorGroup getExecutorService() {
if (executorService == null) {
executorService = createExecutorService();
}
return executorService;
}
@Override
protected void doStart() throws Exception {
if (configuration == null) {
configuration = new NettyConfiguration();
}
if (configuration.isUsingExecutorService() && executorService == null) {
executorService = createExecutorService();
}
super.doStart();
}
protected EventExecutorGroup createExecutorService() {
// Provide the executor service for the application
// and use a Camel thread factory so we have consistent thread namings
// we should use a shared thread pool as recommended by Netty
String pattern = getCamelContext().getExecutorServiceManager().getThreadNamePattern();
ThreadFactory factory = new CamelThreadFactory(pattern, "NettyEventExecutorGroup", true);
return new DefaultEventExecutorGroup(configuration.getMaximumPoolSize(), factory);
}
@Override
protected void doStop() throws Exception {
if (executorService != null) {
getCamelContext().getExecutorServiceManager().shutdownNow(executorService);
executorService = null;
}
super.doStop();
}
}
|
logzio/camel
|
components/camel-netty4/src/main/java/org/apache/camel/component/netty4/NettyComponent.java
|
Java
|
apache-2.0
| 5,124 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.valves.rewrite;
import java.nio.charset.Charset;
/**
* Resolver abstract class.
*/
public abstract class Resolver {
public abstract String resolve(String key);
public String resolveEnv(String key) {
return System.getProperty(key);
}
public abstract String resolveSsl(String key);
public abstract String resolveHttp(String key);
public abstract boolean resolveResource(int type, String name);
/**
* @return The name of the encoding to use to %nn encode URIs
*
* @deprecated This will be removed in Tomcat 9.0.x
*/
@Deprecated
public abstract String getUriEncoding();
public abstract Charset getUriCharset();
}
|
IAMTJW/Tomcat-8.5.20
|
tomcat-8.5.20/java/org/apache/catalina/valves/rewrite/Resolver.java
|
Java
|
apache-2.0
| 1,568 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.droids.impl;
import java.util.Date;
import java.util.Queue;
import java.util.concurrent.TimeUnit;
import org.apache.droids.api.DelayTimer;
import org.apache.droids.api.Droid;
import org.apache.droids.api.Task;
import org.apache.droids.api.TaskExceptionHandler;
import org.apache.droids.api.TaskExceptionResult;
import org.apache.droids.api.TaskMaster;
import org.apache.droids.api.Worker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SequentialTaskMaster<T extends Task> implements TaskMaster<T>
{
private static final Logger LOG = LoggerFactory.getLogger(SequentialTaskMaster.class);
private final Object mutex;
private volatile boolean completed;
private volatile Date startedWorking = null;
private volatile Date finishedWorking = null;
private volatile int completedTask = 0;
private volatile T lastCompletedTask = null;
private volatile ExecutionState state = ExecutionState.INITIALIZED;
private DelayTimer delayTimer = null;
private TaskExceptionHandler exHandler = null;
public SequentialTaskMaster() {
super();
this.mutex = new Object();
}
/**
* The queue has been initialized
*/
@Override
public synchronized void start(final Queue<T> queue, final Droid<T> droid) {
this.completed = false;
this.startedWorking = new Date();
this.finishedWorking = null;
this.completedTask = 0;
this.state = ExecutionState.RUNNING;
boolean terminated = false;
while (!terminated) {
T task = queue.poll();
if (task == null) {
break;
}
if (delayTimer != null) {
long delay = delayTimer.getDelayMillis();
if (delay > 0) {
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
}
}
}
Worker<T> worker = droid.getNewWorker();
try {
if (!task.isAborted()) {
worker.execute(task);
}
completedTask++;
lastCompletedTask = task;
} catch (Exception ex) {
TaskExceptionResult result = TaskExceptionResult.WARN;
if (exHandler != null) {
result = exHandler.handleException(ex);
}
switch (result) {
case WARN:
LOG.warn(ex.toString() + " " + task.getId());
if (LOG.isDebugEnabled()) {
LOG.debug(ex.toString(), ex);
}
break;
case FATAL:
LOG.error(ex.getMessage(), ex);
terminated = true;
break;
}
}
}
finishedWorking = new Date();
this.state = ExecutionState.STOPPED;
droid.finished();
synchronized (mutex) {
completed = true;
mutex.notifyAll();
}
}
@Override
public final void setExceptionHandler(TaskExceptionHandler exHandler) {
this.exHandler = exHandler;
}
@Override
public final void setDelayTimer(DelayTimer delayTimer) {
this.delayTimer = delayTimer;
}
public boolean isWorking() {
return startedWorking != null && finishedWorking == null;
}
@Override
public Date getStartTime() {
return startedWorking;
}
@Override
public Date getFinishedWorking() {
return finishedWorking;
}
@Override
public long getCompletedTasks() {
return completedTask;
}
@Override
public T getLastCompletedTask() {
return lastCompletedTask;
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
if (timeout < 0) {
timeout = 0;
}
synchronized (this.mutex) {
long deadline = System.currentTimeMillis() + unit.toMillis(timeout);
long remaining = timeout;
while (!completed) {
this.mutex.wait(remaining);
if (timeout >= 0) {
remaining = deadline - System.currentTimeMillis();
if (remaining <= 0) {
return false; // Reach if timeout is over and no finish.
}
}
}
}
return true;
}
@Override
public ExecutionState getExecutionState() {
return state;
}
}
|
fogbeam/Heceta_droids
|
droids-core/src/main/java/org/apache/droids/impl/SequentialTaskMaster.java
|
Java
|
apache-2.0
| 4,864 |
package com.wjyup.coolq.util;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.common.hash.HashCode;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import com.google.gson.JsonObject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.util.DigestUtils;
import java.nio.charset.StandardCharsets;
/**
* 发送消息工具类
* @author WJY
*/
public class SendMessageUtil {
private static Logger log = LogManager.getLogger(SendMessageUtil.class);
/**
* 发送json数据并获取返回值
* @param message 消息
* @return 发送消息的结果
*/
public static String sendSocketData(String message){
try {
ConfigCache configCache = SpringContext.getConfigCache();
//判断发送消息方式
if(StaticConf.MSG_SEND_TYPE_HTTP.equalsIgnoreCase(configCache.getMSG_SEND_TYPE())){// http
String url = String.format("http://%s:%s", configCache.getHTTP_HOST(), configCache.getHTTP_PORT());
if(configCache.isUSE_TOKEN()){// 使用token
long authTime = System.currentTimeMillis() / 1000;
String key = configCache.getKEY()+":"+authTime;
String authToken = DigestUtils.md5DigestAsHex(key.getBytes(StandardCharsets.UTF_8));
JSONObject jsonObject = JSON.parseObject(message);
jsonObject.put("authTime", authTime);
jsonObject.put("authToken", authToken);
message = jsonObject.toJSONString();
}
log.debug("发送的json文本:"+message);
try{
String result = WebUtil.post(url, message);
log.debug("返回结果:" + result);
return result;
}catch (Exception e){
log.error(e.getMessage(),e);
}
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return null;
}
}
|
ForeverWJY/CoolQ_Java_Plugin
|
src/main/java/com/wjyup/coolq/util/SendMessageUtil.java
|
Java
|
apache-2.0
| 1,815 |
/**
* Copyright (c) 2008-2010 Andrey Somov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yaml.snakeyaml.tokens;
import java.util.List;
import org.yaml.snakeyaml.error.Mark;
import org.yaml.snakeyaml.error.YAMLException;
/**
* @see <a href="http://pyyaml.org/wiki/PyYAML">PyYAML</a> for more information
*/
public final class DirectiveToken<T> extends Token {
private final String name;
private final List<T> value;
public DirectiveToken(String name, List<T> value, Mark startMark, Mark endMark) {
super(startMark, endMark);
this.name = name;
if (value != null && value.size() != 2) {
throw new YAMLException("Two strings must be provided instead of "
+ String.valueOf(value.size()));
}
this.value = value;
}
public String getName() {
return this.name;
}
public List<T> getValue() {
return this.value;
}
@Override
protected String getArguments() {
if (value != null) {
return "name=" + name + ", value=[" + value.get(0) + ", " + value.get(1) + "]";
} else {
return "name=" + name;
}
}
@Override
public Token.ID getTokenId() {
return ID.Directive;
}
}
|
spariev/snakeyaml
|
src/main/java/org/yaml/snakeyaml/tokens/DirectiveToken.java
|
Java
|
apache-2.0
| 1,789 |
/*
Copyright 2018 Nationale-Nederlanden
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nl.nn.adapterframework.http.cxf;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Iterator;
import java.util.Properties;
import javax.activation.DataHandler;
import javax.xml.soap.AttachmentPart;
import javax.xml.soap.MessageFactory;
import javax.xml.soap.MimeHeader;
import javax.xml.soap.SOAPConstants;
import javax.xml.soap.SOAPException;
import javax.xml.soap.SOAPMessage;
import javax.xml.transform.stream.StreamSource;
import javax.xml.ws.WebServiceContext;
import org.apache.soap.util.mime.ByteArrayDataSource;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.w3c.dom.Element;
import nl.nn.adapterframework.core.PipeLineSession;
import nl.nn.adapterframework.stream.Message;
import nl.nn.adapterframework.util.DomBuilderException;
import nl.nn.adapterframework.util.Misc;
import nl.nn.adapterframework.util.XmlUtils;
@RunWith(MockitoJUnitRunner.class)
public class SoapProviderTest {
@BeforeClass
public static void setUp() {
Properties prop = System.getProperties();
String vendor = prop.getProperty("java.vendor");
System.out.println("JVM Vendor : " + vendor);
assumeThat(vendor, not(equalTo("IBM Corporation")));
/*
* The above exclusion of IBM JDK to work around the below error, seen when executing these tests with an IBM JDK:
*
java.lang.VerifyError: JVMVRFY012 stack shape inconsistent; class=com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl, method=createDocumentFragment()Lorg/w3c/dom/DocumentFragment;, pc=5; Type Mismatch, argument 0 in signature com/sun/xml/messaging/saaj/soap/SOAPDocumentFragment.<init>:(Lcom/sun/org/apache/xerces/internal/dom/CoreDocumentImpl;)V does not match
Exception Details:
Location:
com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl.createDocumentFragment()Lorg/w3c/dom/DocumentFragment; @5: JBinvokespecial
Reason:
Type 'com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl' (current frame, stack[2]) is not assignable to 'com/sun/org/apache/xerces/internal/dom/CoreDocumentImpl'
Current Frame:
bci: @5
flags: { }
locals: { 'com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl' }
stack: { 'uninitialized', 'uninitialized', 'com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl' }
at com.sun.xml.messaging.saaj.soap.SOAPPartImpl.<init>(SOAPPartImpl.java:106)
at com.sun.xml.messaging.saaj.soap.ver1_1.SOAPPart1_1Impl.<init>(SOAPPart1_1Impl.java:70)
at com.sun.xml.messaging.saaj.soap.ver1_1.Message1_1Impl.getSOAPPart(Message1_1Impl.java:90)
at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.createMessage(SoapProviderTest.java:109)
at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.createMessage(SoapProviderTest.java:98)
at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.createMessage(SoapProviderTest.java:94)
at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.sendMessageWithInputStreamAttachmentsTest(SoapProviderTest.java:228)
*/
}
@Spy
WebServiceContext webServiceContext = new WebServiceContextStub();
@InjectMocks
private SoapProviderStub SOAPProvider = new SoapProviderStub();
private final String ATTACHMENT_CONTENT = "<dummy/>";
private final String ATTACHMENT_MIMETYPE = "plain/text";
private final String ATTACHMENT2_CONTENT = "<I'm a pdf file/>";
private final String ATTACHMENT2_NAME = "document.pdf";
private final String ATTACHMENT2_MIMETYPE = "application/pdf";
private final String MULTIPART_XML = "<parts><part type=\"file\" name=\""+ATTACHMENT2_NAME+"\" "
+ "sessionKey=\"part_file\" size=\"72833\" "
+ "mimeType=\""+ATTACHMENT2_MIMETYPE+"\"/></parts>";
private final String BASEDIR = "/Soap/";
protected InputStream getFile(String file) throws IOException {
URL url = this.getClass().getResource(BASEDIR+file);
if (url == null) {
throw new IOException("file not found");
}
return url.openStream();
}
private SOAPMessage createMessage(String filename) throws IOException, SOAPException {
return createMessage(filename, false, false);
}
private SOAPMessage createMessage(String filename, boolean addAttachment, boolean isSoap1_1) throws IOException, SOAPException {
MessageFactory factory = MessageFactory.newInstance(isSoap1_1 ? SOAPConstants.SOAP_1_1_PROTOCOL : SOAPConstants.SOAP_1_2_PROTOCOL);
SOAPMessage soapMessage = factory.createMessage();
StreamSource streamSource = new StreamSource(getFile(filename));
soapMessage.getSOAPPart().setContent(streamSource);
if(addAttachment) {
InputStream fis = new ByteArrayInputStream(ATTACHMENT_CONTENT.getBytes());
DataHandler dataHander = new DataHandler(new ByteArrayDataSource(fis, ATTACHMENT_MIMETYPE));
AttachmentPart part = soapMessage.createAttachmentPart(dataHander);
soapMessage.addAttachmentPart(part);
}
return soapMessage;
}
private void assertAttachmentInSession(PipeLineSession session) throws DomBuilderException, IOException {
assertNotNull(session.get("mimeHeaders"));
assertNotNull(session.get("attachments"));
Element xml = XmlUtils.buildElement((String) session.get("attachments"));
Element attachment = XmlUtils.getFirstChildTag(xml, "attachment");
assertNotNull(attachment);
//Retrieve sessionkey the attachment was stored in
String sessionKey = XmlUtils.getChildTagAsString(attachment, "sessionKey");
assertNotNull(sessionKey);
Message attachmentMessage = session.getMessage(sessionKey);
//Verify that the attachment sent, was received properly
assertEquals(ATTACHMENT_CONTENT, attachmentMessage.asString());
//Verify the content type
Element mimeTypes = XmlUtils.getFirstChildTag(attachment, "mimeHeaders");
mimeTypes.getElementsByTagName("mimeHeader");
//TODO check what happens when multiple attachments are returned...
String mimeType = XmlUtils.getChildTagAsString(mimeTypes, "mimeHeader");
assertEquals(ATTACHMENT_MIMETYPE, mimeType);
}
private void assertAttachmentInReceivedMessage(SOAPMessage message) throws SOAPException, IOException {
assertEquals(1, message.countAttachments());
Iterator<?> attachmentParts = message.getAttachments();
while (attachmentParts.hasNext()) {
AttachmentPart soapAttachmentPart = (AttachmentPart)attachmentParts.next();
String attachment = Misc.streamToString(soapAttachmentPart.getRawContent());
//ContentID should be equal to the filename
assertEquals(ATTACHMENT2_NAME, soapAttachmentPart.getContentId());
//Validate the attachment's content
assertEquals(ATTACHMENT2_CONTENT, attachment);
//Make sure at least the content-type header has been set
Iterator<?> headers = soapAttachmentPart.getAllMimeHeaders();
String contentType = null;
while (headers.hasNext()) {
MimeHeader header = (MimeHeader) headers.next();
if("Content-Type".equalsIgnoreCase(header.getName()))
contentType = header.getValue();
}
assertEquals(ATTACHMENT2_MIMETYPE, contentType);
}
}
@Test
/**
* Receive SOAP message without attachment
* Reply SOAP message without attachment
* @throws Throwable
*/
public void simpleMessageTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml");
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
PipeLineSession session = SOAPProvider.getSession();
assertNotNull(session.get("mimeHeaders"));
assertNotNull(session.get("attachments"));
assertEquals("<attachments/>", session.get("attachments").toString().trim());
}
@Test
/**
* Receive faulty message without attachment
* @throws Throwable
*/
public void errorMessageTest() throws Throwable {
SOAPMessage message = SOAPProvider.invoke(null);
String result = XmlUtils.nodeToString(message.getSOAPPart());
assertTrue(result.indexOf("SOAPMessage is null") > 0);
}
@Test
/**
* Receive SOAP message with MTOM attachment
* Reply SOAP message without attachment
* @throws Throwable
*/
public void receiveMessageWithAttachmentsTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml", true, false);
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
PipeLineSession session = SOAPProvider.getSession();
assertAttachmentInSession(session);
}
@Test
/**
* Receive SOAP message without attachment
* Reply SOAP message with (InputStream) attachment
* @throws Throwable
*/
public void sendMessageWithInputStreamAttachmentsTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml");
PipeLineSession session = new PipeLineSession();
session.put("attachmentXmlSessionKey", MULTIPART_XML);
session.put("part_file", new ByteArrayInputStream(ATTACHMENT2_CONTENT.getBytes()));
SOAPProvider.setAttachmentXmlSessionKey("attachmentXmlSessionKey");
SOAPProvider.setSession(session);
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
assertAttachmentInReceivedMessage(message);
}
@Test
/**
* Receive SOAP message without attachment
* Reply SOAP message with (String) attachment
* @throws Throwable
*/
public void sendMessageWithStringAttachmentsTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml");
PipeLineSession session = new PipeLineSession();
session.put("attachmentXmlSessionKey", MULTIPART_XML);
session.put("part_file", ATTACHMENT2_CONTENT);
SOAPProvider.setAttachmentXmlSessionKey("attachmentXmlSessionKey");
SOAPProvider.setSession(session);
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
assertAttachmentInReceivedMessage(message);
}
@Test
/**
* Receive SOAP message with attachment
* Reply SOAP message with attachment
* @throws Throwable
*/
public void receiveAndSendMessageWithAttachmentsTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml", true, false);
PipeLineSession session = new PipeLineSession();
session.put("attachmentXmlSessionKey", MULTIPART_XML);
session.put("part_file", ATTACHMENT2_CONTENT);
SOAPProvider.setAttachmentXmlSessionKey("attachmentXmlSessionKey");
SOAPProvider.setSession(session);
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
//Validate an attachment was sent to the listener
assertAttachmentInSession(SOAPProvider.getSession());
//Validate the listener returned an attachment back
assertAttachmentInReceivedMessage(message);
}
@Test
public void soapActionInSessionKeySOAP1_1() throws Throwable {
// Soap protocol 1.1
SOAPMessage request = createMessage("soapmsg1_1.xml", false, true);
String value = "1.1-SoapAction";
webServiceContext.getMessageContext().put("SOAPAction", value);
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertEquals(value, SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void noSoapActionInSessionKeySOAP1_1() throws Throwable {
// Soap protocol 1.1
SOAPMessage request = createMessage("soapmsg1_1.xml", false, true);
SOAPProvider.invoke(request);
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soap1_1MessageWithActionInContentTypeHeader() throws Throwable {
// Soap protocol 1.1
SOAPMessage request = createMessage("soapmsg1_1.xml", false, true);
String value = "ActionInContentTypeHeader";
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action="+value);
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soapActionInSessionKeySOAP1_2ActionIsTheLastItem() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
String value = "SOAP1_2ActionIsTheLastItem";
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action="+value);
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertEquals(value, SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soapActionInSessionKeySOAP1_2ActionIsInMiddle() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
String value = "SOAP1_2ActionIsInMiddle";
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action="+value+";somethingelse");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertEquals(value, SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soapActionInSessionKeySOAP1_2ActionIsAtTheBeginning() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
String value = "SOAP1_2ActionIsAtTheBeginning";
webServiceContext.getMessageContext().put("Content-Type", "action="+value+";application/soap+xml; somethingelse");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertEquals(value, SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void noSoapActionInSessionKey1_2() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; somethingelse");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void emptySoapActionInSessionKey1_2() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action=; somethingelse");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soap1_2MessageWithSOAPActionHeader() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
webServiceContext.getMessageContext().put("SOAPAction", "action");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
}
|
ibissource/iaf
|
core/src/test/java/nl/nn/adapterframework/http/cxf/SoapProviderTest.java
|
Java
|
apache-2.0
| 16,059 |
package mx.emite.sdk.scot.request;
import java.util.List;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
import lombok.Builder;
import lombok.Data;
import lombok.Singular;
import mx.emite.sdk.cfdi32.anotaciones.Rfc;
import mx.emite.sdk.scot.request.extra.SucursalInfo;
@Data
@Builder
public class SucursalesAltaRequest {
/**
* Token del <b>Integrador</b> obtenido con el servicio de Token
* -- SETTER --
*
* @param token
* Token del <b>Integrador</b> obtenido de Scot©
*
*/
@NotNull
private String token;
/**
* @param rfc del emisor, si se deja en blanco se consultan todos los emisores
*/
@Rfc
private String rfc;
/**
* @param sucursales lista de sucursales a dar de alta
*/
@Valid @NotEmpty @Singular("sucursal")
private List<SucursalInfo> sucursales;
/**
* modificar si la sucursal ya se encuentra dado de alta
*/
@NotNull
public Boolean modificar;
}
|
emite-mx/ef-sdk-java
|
ef-sdk-java/src/main/java/mx/emite/sdk/scot/request/SucursalesAltaRequest.java
|
Java
|
apache-2.0
| 1,030 |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rust;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.CxxPlatforms;
import com.facebook.buck.cxx.Linker;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.FlavorDomain;
import com.facebook.buck.model.Flavored;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.AbstractDescriptionArg;
import com.facebook.buck.rules.BinaryWrapperRule;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.CellPathResolver;
import com.facebook.buck.rules.Description;
import com.facebook.buck.rules.ImplicitDepsInferringDescription;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.ToolProvider;
import com.facebook.buck.versions.VersionRoot;
import com.facebook.infer.annotation.SuppressFieldNotInitialized;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Stream;
public class RustTestDescription implements
Description<RustTestDescription.Arg>,
ImplicitDepsInferringDescription<RustTestDescription.Arg>,
Flavored,
VersionRoot<RustTestDescription.Arg> {
private final RustBuckConfig rustBuckConfig;
private final FlavorDomain<CxxPlatform> cxxPlatforms;
private final CxxPlatform defaultCxxPlatform;
public RustTestDescription(
RustBuckConfig rustBuckConfig,
FlavorDomain<CxxPlatform> cxxPlatforms, CxxPlatform defaultCxxPlatform) {
this.rustBuckConfig = rustBuckConfig;
this.cxxPlatforms = cxxPlatforms;
this.defaultCxxPlatform = defaultCxxPlatform;
}
@Override
public Arg createUnpopulatedConstructorArg() {
return new Arg();
}
@Override
public <A extends Arg> BuildRule createBuildRule(
TargetGraph targetGraph,
BuildRuleParams params,
BuildRuleResolver resolver,
CellPathResolver cellRoots,
A args) throws NoSuchBuildTargetException {
final BuildTarget buildTarget = params.getBuildTarget();
BuildTarget exeTarget = params.getBuildTarget()
.withAppendedFlavors(InternalFlavor.of("unittest"));
Optional<Map.Entry<Flavor, RustBinaryDescription.Type>> type =
RustBinaryDescription.BINARY_TYPE.getFlavorAndValue(buildTarget);
boolean isCheck = type.map(t -> t.getValue().isCheck()).orElse(false);
BinaryWrapperRule testExeBuild = resolver.addToIndex(
RustCompileUtils.createBinaryBuildRule(
params.withBuildTarget(exeTarget),
resolver,
rustBuckConfig,
cxxPlatforms,
defaultCxxPlatform,
args.crate,
args.features,
Stream.of(
args.framework ? Stream.of("--test") : Stream.<String>empty(),
rustBuckConfig.getRustTestFlags().stream(),
args.rustcFlags.stream())
.flatMap(x -> x).iterator(),
args.linkerFlags.iterator(),
RustCompileUtils.getLinkStyle(params.getBuildTarget(), args.linkStyle),
args.rpath, args.srcs,
args.crateRoot,
ImmutableSet.of("lib.rs", "main.rs"),
isCheck
));
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
Tool testExe = testExeBuild.getExecutableCommand();
BuildRuleParams testParams = params.copyAppendingExtraDeps(
testExe.getDeps(ruleFinder));
return new RustTest(
testParams,
ruleFinder,
testExeBuild,
args.labels,
args.contacts);
}
@Override
public void findDepsForTargetFromConstructorArgs(
BuildTarget buildTarget,
CellPathResolver cellRoots,
Arg constructorArg,
ImmutableCollection.Builder<BuildTarget> extraDepsBuilder,
ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) {
ToolProvider compiler = rustBuckConfig.getRustCompiler();
extraDepsBuilder.addAll(compiler.getParseTimeDeps());
extraDepsBuilder.addAll(CxxPlatforms.getParseTimeDeps(cxxPlatforms.getValues()));
}
@Override
public boolean hasFlavors(ImmutableSet<Flavor> flavors) {
if (cxxPlatforms.containsAnyOf(flavors)) {
return true;
}
for (RustBinaryDescription.Type type : RustBinaryDescription.Type.values()) {
if (flavors.contains(type.getFlavor())) {
return true;
}
}
return false;
}
@Override
public Optional<ImmutableSet<FlavorDomain<?>>> flavorDomains() {
return Optional.of(ImmutableSet.of(cxxPlatforms, RustBinaryDescription.BINARY_TYPE));
}
@Override
public boolean isVersionRoot(ImmutableSet<Flavor> flavors) {
return true;
}
@SuppressFieldNotInitialized
public static class Arg extends AbstractDescriptionArg {
public ImmutableSortedSet<SourcePath> srcs = ImmutableSortedSet.of();
public ImmutableSet<String> contacts = ImmutableSet.of();
public ImmutableSortedSet<String> features = ImmutableSortedSet.of();
public ImmutableList<String> rustcFlags = ImmutableList.of();
public ImmutableList<String> linkerFlags = ImmutableList.of();
public ImmutableSortedSet<BuildTarget> deps = ImmutableSortedSet.of();
public Optional<Linker.LinkableDepType> linkStyle;
public boolean rpath = true;
public boolean framework = true;
public Optional<String> crate;
public Optional<SourcePath> crateRoot;
}
}
|
vschs007/buck
|
src/com/facebook/buck/rust/RustTestDescription.java
|
Java
|
apache-2.0
| 6,463 |
package org.efix.util.buffer;
import org.efix.util.ByteSequenceWrapper;
import org.efix.util.StringUtil;
public class BufferUtil {
public static UnsafeBuffer fromString(String string) {
return new UnsafeBuffer(StringUtil.asciiBytes(string));
}
public static String toString(Buffer buffer) {
return toString(buffer, 0, buffer.capacity());
}
public static String toString(Buffer buffer, int offset, int length) {
return new ByteSequenceWrapper(buffer, offset, length).toString();
}
}
|
artyomkorzun/efix
|
src/main/java/org/efix/util/buffer/BufferUtil.java
|
Java
|
apache-2.0
| 536 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.example.db.discovery.spring.namespace.jdbc.repository;
import org.apache.shardingsphere.example.db.discovery.spring.namespace.jdbc.entity.Address;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.LinkedList;
import java.util.List;
public final class AddressRepository {
private final DataSource dataSource;
public AddressRepository(final DataSource dataSource) {
this.dataSource = dataSource;
}
public void createTableIfNotExists() throws SQLException {
String sql = "CREATE TABLE IF NOT EXISTS t_address "
+ "(address_id BIGINT NOT NULL, address_name VARCHAR(100) NOT NULL, PRIMARY KEY (address_id))";
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement()) {
statement.executeUpdate(sql);
}
}
public void dropTable() throws SQLException {
String sql = "DROP TABLE t_address";
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement()) {
statement.executeUpdate(sql);
}
}
public void truncateTable() throws SQLException {
String sql = "TRUNCATE TABLE t_address";
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement()) {
statement.executeUpdate(sql);
}
}
public Long insert(final Address entity) throws SQLException {
String sql = "INSERT INTO t_address (address_id, address_name) VALUES (?, ?)";
try (Connection connection = dataSource.getConnection();
PreparedStatement preparedStatement = connection.prepareStatement(sql)) {
preparedStatement.setLong(1, entity.getAddressId());
preparedStatement.setString(2, entity.getAddressName());
preparedStatement.executeUpdate();
}
return entity.getAddressId();
}
public void delete(final Long primaryKey) throws SQLException {
String sql = "DELETE FROM t_address WHERE address_id=?";
try (Connection connection = dataSource.getConnection();
PreparedStatement preparedStatement = connection.prepareStatement(sql)) {
preparedStatement.setLong(1, primaryKey);
preparedStatement.executeUpdate();
}
}
public List<Address> selectAll() throws SQLException {
String sql = "SELECT * FROM t_address";
return getAddress(sql);
}
private List<Address> getAddress(final String sql) throws SQLException {
List<Address> result = new LinkedList<>();
try (Connection connection = dataSource.getConnection();
PreparedStatement preparedStatement = connection.prepareStatement(sql);
ResultSet resultSet = preparedStatement.executeQuery()) {
while (resultSet.next()) {
Address address = new Address();
address.setAddressId(resultSet.getLong(1));
address.setAddressName(resultSet.getString(2));
result.add(address);
}
}
return result;
}
}
|
apache/incubator-shardingsphere
|
examples/shardingsphere-sample/shardingsphere-example-generated/shardingsphere-jdbc-sample/shardingsphere-jdbc-memory-local-db-discovery-spring-namespace-jdbc-example/src/main/java/org/apache/shardingsphere/example/db/discovery/spring/namespace/jdbc/repository/AddressRepository.java
|
Java
|
apache-2.0
| 4,185 |
/*
* Copyright (c) 2017 Antony Esik
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ae.camunda.dispatcher.mapper.xml;
import com.ae.camunda.dispatcher.api.mapper.TaskMapper;
import com.ae.camunda.dispatcher.exception.CamundaMappingException;
import org.eclipse.persistence.jaxb.JAXBContextFactory;
import org.springframework.stereotype.Component;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.Collections;
/**
* @author AEsik
* Date 09.10.2017
*/
@Component
public class XmlTaskMapper implements TaskMapper {
@Override
public String map(Object task) {
try {
JAXBContext context = JAXBContextFactory.createContext(new Class[]{task.getClass()}, Collections.emptyMap());
StringWriter sw = new StringWriter();
context.createMarshaller().marshal(task, sw);
return sw.toString();
} catch (JAXBException e) {
throw new CamundaMappingException(e);
}
}
@Override
public Object map(String body, Class<?> clazz) {
try {
JAXBContext context = JAXBContextFactory.createContext(new Class[]{clazz}, Collections.emptyMap());
StringReader sr = new StringReader(body);
return context.createUnmarshaller().unmarshal(sr);
} catch (JAXBException e) {
throw new CamundaMappingException(e);
}
}
}
|
EsikAntony/camunda-task-dispatcher
|
camunda-task-dispatcher-mapper-xml/src/main/java/com/ae/camunda/dispatcher/mapper/xml/XmlTaskMapper.java
|
Java
|
apache-2.0
| 1,991 |
package org.whale.ext.domain;
import java.util.ArrayList;
import java.util.List;
import org.whale.system.annotation.jdbc.Column;
import org.whale.system.annotation.jdbc.Id;
import org.whale.system.annotation.jdbc.Table;
import org.whale.system.annotation.jdbc.Validate;
import org.whale.system.base.BaseEntry;
import org.whale.system.common.util.PropertiesUtil;
/**
* 实体对象
*
* @author wjs
* 2014年9月10日-上午10:12:48
*/
@Table(value="sys_domian", cnName="实体对象")
public class Domain extends BaseEntry{
private static final long serialVersionUID = -23042834921L;
@Id
@Column(cnName="id")
private Long id;
@Validate(required=true)
@Column(cnName="实体名")
private String domainName;
@Validate(required=true)
@Column(cnName="中文名")
private String domainCnName;
@Validate(required=true)
@Column(cnName="数据库", unique=true)
private String domainSqlName;
@Column(cnName="基础包路径")
private String pkgName = "org.whale.system";
//树模型
private Integer treeModel;
private String treeId;
private String treePid;
private String treeName;
//模板类型
private Integer ftlType;
//代码路径
private String codePath;
private String author = PropertiesUtil.getValue("author", "wjs");
//主键
private Attr idAttr;
private List<Attr> attrs;
private List<Attr> listAttrs = new ArrayList<Attr>();
private List<Attr> formAttrs = new ArrayList<Attr>();
private List<Attr> queryAttrs = new ArrayList<Attr>();
public Long getId() {
return id;
}
public String getDomainName() {
return domainName;
}
public void setDomainName(String domainName) {
this.domainName = domainName;
}
public String getDomainCnName() {
return domainCnName;
}
public void setDomainCnName(String domainCnName) {
this.domainCnName = domainCnName;
}
public String getDomainSqlName() {
return domainSqlName;
}
public void setDomainSqlName(String domainSqlName) {
this.domainSqlName = domainSqlName;
}
public String getPkgName() {
return pkgName;
}
public void setPkgName(String pkgName) {
this.pkgName = pkgName;
}
public Attr getIdAttr() {
return idAttr;
}
public void setIdAttr(Attr idAttr) {
this.idAttr = idAttr;
}
public List<Attr> getAttrs() {
return attrs;
}
public void setAttrs(List<Attr> attrs) {
this.attrs = attrs;
}
public List<Attr> getListAttrs() {
return listAttrs;
}
public void setListAttrs(List<Attr> listAttrs) {
this.listAttrs = listAttrs;
}
public List<Attr> getFormAttrs() {
return formAttrs;
}
public void setFormAttrs(List<Attr> formAttrs) {
this.formAttrs = formAttrs;
}
public List<Attr> getQueryAttrs() {
return queryAttrs;
}
public void setQueryAttrs(List<Attr> queryAttrs) {
this.queryAttrs = queryAttrs;
}
public void setId(Long id) {
this.id = id;
}
public Integer getFtlType() {
return ftlType;
}
public void setFtlType(Integer ftlType) {
this.ftlType = ftlType;
}
public String getCodePath() {
return codePath;
}
public void setCodePath(String codePath) {
this.codePath = codePath;
}
public Integer getTreeModel() {
return treeModel;
}
public void setTreeModel(Integer treeModel) {
this.treeModel = treeModel;
}
public String getTreeId() {
return treeId;
}
public void setTreeId(String treeId) {
this.treeId = treeId;
}
public String getTreePid() {
return treePid;
}
public void setTreePid(String treePid) {
this.treePid = treePid;
}
public String getTreeName() {
return treeName;
}
public void setTreeName(String treeName) {
this.treeName = treeName;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
}
|
fywxin/base
|
system-parent/ext-code/src/main/java/org/whale/ext/domain/Domain.java
|
Java
|
apache-2.0
| 3,741 |
/* ========================================================================= *
* Boarder *
* http://boarder.mikuz.org/ *
* ========================================================================= *
* Copyright (C) 2013 Boarder *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
* ========================================================================= */
package fi.mikuz.boarder.util;
import org.acra.ACRA;
import android.content.Context;
import android.os.Looper;
import android.util.Log;
import android.widget.Toast;
public abstract class ContextUtils {
private static final String TAG = ContextUtils.class.getSimpleName();
public static void toast(Context context, String toast) {
toast(context, toast, Toast.LENGTH_SHORT);
}
public static void toast(Context context, String toast, int duration) {
String errLogMsg = "Unable to toast message: \"" + toast + "\"";
if (Looper.myLooper() == null) {
Exception e = new IllegalStateException("Not running in a looper");
Log.e(TAG, errLogMsg, e);
ACRA.getErrorReporter().handleException(e);
} else if (Looper.myLooper() != Looper.getMainLooper()) {
Exception e = new IllegalStateException("Not running in the main looper");
Log.e(TAG, errLogMsg, e);
ACRA.getErrorReporter().handleException(e);
} else {
try {
Toast.makeText(context, toast, duration).show();
} catch (NullPointerException e) {
Log.e(TAG, errLogMsg, e);
}
}
}
}
|
Mikuz/Boarder
|
src/fi/mikuz/boarder/util/ContextUtils.java
|
Java
|
apache-2.0
| 2,556 |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.kafka.impl;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.config.DataFormat;
import com.streamsets.pipeline.kafka.api.PartitionStrategy;
import com.streamsets.pipeline.kafka.api.SdcKafkaProducer;
import com.streamsets.pipeline.lib.kafka.KafkaErrors;
import com.streamsets.pipeline.lib.kafka.exception.KafkaConnectionException;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Properties;
public class KafkaProducer08 implements SdcKafkaProducer {
private static final Logger LOG = LoggerFactory.getLogger(KafkaProducer08.class);
private static final String METADATA_BROKER_LIST_KEY = "metadata.broker.list";
private static final String KEY_SERIALIZER_CLASS_KEY = "key.serializer.class";
private static final String PRODUCER_TYPE_KEY = "producer.type";
private static final String PRODUCER_TYPE_DEFAULT = "sync";
private static final String SERIALIZER_CLASS_KEY = "serializer.class";
private static final String REQUEST_REQUIRED_ACKS_KEY = "request.required.acks";
private static final String REQUEST_REQUIRED_ACKS_DEFAULT = "1";
private static final String DEFAULT_ENCODER_CLASS = "kafka.serializer.DefaultEncoder";
private static final String STRING_ENCODER_CLASS = "kafka.serializer.StringEncoder";
private static final String PARTITIONER_CLASS_KEY = "partitioner.class";
private static final String RANDOM_PARTITIONER_CLASS = "com.streamsets.pipeline.kafka.impl.RandomPartitioner";
private static final String ROUND_ROBIN_PARTITIONER_CLASS = "com.streamsets.pipeline.kafka.impl.RoundRobinPartitioner";
private static final String EXPRESSION_PARTITIONER_CLASS = "com.streamsets.pipeline.kafka.impl.ExpressionPartitioner";
/*Topic to readData from*/
/*Host on which the seed broker is running*/
private final String metadataBrokerList;
private final Map<String, Object> kafkaProducerConfigs;
private final DataFormat producerPayloadType;
private final PartitionStrategy partitionStrategy;
private List<KeyedMessage> messageList;
private Producer producer;
public KafkaProducer08(
String metadataBrokerList,
DataFormat producerPayloadType,
PartitionStrategy partitionStrategy,
Map<String, Object> kafkaProducerConfigs
) {
this.metadataBrokerList = metadataBrokerList;
this.producerPayloadType = producerPayloadType;
this.partitionStrategy = partitionStrategy;
this.messageList = new ArrayList<>();
this.kafkaProducerConfigs = kafkaProducerConfigs;
}
@Override
public void init() throws StageException {
Properties props = new Properties();
//metadata.broker.list
props.put(METADATA_BROKER_LIST_KEY, metadataBrokerList);
//producer.type
props.put(PRODUCER_TYPE_KEY, PRODUCER_TYPE_DEFAULT);
//key.serializer.class
props.put(KEY_SERIALIZER_CLASS_KEY, STRING_ENCODER_CLASS);
//partitioner.class
configurePartitionStrategy(props, partitionStrategy);
//serializer.class
configureSerializer(props, producerPayloadType);
//request.required.acks
props.put(REQUEST_REQUIRED_ACKS_KEY, REQUEST_REQUIRED_ACKS_DEFAULT);
addUserConfiguredProperties(props);
ProducerConfig config = new ProducerConfig(props);
producer = new Producer<>(config);
}
@Override
public void destroy() {
if(producer != null) {
producer.close();
}
}
@Override
public String getVersion() {
return Kafka08Constants.KAFKA_VERSION;
}
@Override
public void enqueueMessage(String topic, Object message, Object messageKey) {
//Topic could be a record EL string. This is not a good place to evaluate expression
//Hence get topic as parameter
messageList.add(new KeyedMessage<>(topic, messageKey, message));
}
@Override
public void clearMessages() {
messageList.clear();
}
@Override
public List<Record> write(Stage.Context context) throws StageException {
try {
producer.send(messageList);
messageList.clear();
} catch (Exception e) {
//Producer internally refreshes metadata and retries if there is any recoverable exception.
//If retry fails, a FailedToSendMessageException is thrown.
//In this case we want to fail pipeline.
LOG.error(KafkaErrors.KAFKA_50.getMessage(), e.toString(), e);
throw new KafkaConnectionException(KafkaErrors.KAFKA_50, e.toString(), e);
}
return Collections.emptyList();
}
private void configureSerializer(Properties props, DataFormat producerPayloadType) {
if(producerPayloadType == DataFormat.TEXT) {
props.put(SERIALIZER_CLASS_KEY, DEFAULT_ENCODER_CLASS);
}
}
private void configurePartitionStrategy(Properties props, PartitionStrategy partitionStrategy) {
if (partitionStrategy == PartitionStrategy.RANDOM) {
props.put(PARTITIONER_CLASS_KEY, RANDOM_PARTITIONER_CLASS);
} else if (partitionStrategy == PartitionStrategy.ROUND_ROBIN) {
props.put(PARTITIONER_CLASS_KEY, ROUND_ROBIN_PARTITIONER_CLASS);
} else if (partitionStrategy == PartitionStrategy.EXPRESSION) {
props.put(PARTITIONER_CLASS_KEY, EXPRESSION_PARTITIONER_CLASS);
} else if (partitionStrategy == PartitionStrategy.DEFAULT) {
//default partitioner class
}
}
private void addUserConfiguredProperties(Properties props) {
//The following options, if specified, are ignored : "metadata.broker.list", "producer.type",
// "key.serializer.class", "partitioner.class", "serializer.class".
if (kafkaProducerConfigs != null && !kafkaProducerConfigs.isEmpty()) {
kafkaProducerConfigs.remove(METADATA_BROKER_LIST_KEY);
kafkaProducerConfigs.remove(KEY_SERIALIZER_CLASS_KEY);
kafkaProducerConfigs.remove(SERIALIZER_CLASS_KEY);
for (Map.Entry<String, Object> producerConfig : kafkaProducerConfigs.entrySet()) {
props.put(producerConfig.getKey(), producerConfig.getValue());
}
}
}
}
|
kunickiaj/datacollector
|
sdc-kafka_0_8/src/main/java/com/streamsets/pipeline/kafka/impl/KafkaProducer08.java
|
Java
|
apache-2.0
| 6,836 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.docdb.model.transform;
import java.util.ArrayList;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.docdb.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* Event StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class EventStaxUnmarshaller implements Unmarshaller<Event, StaxUnmarshallerContext> {
public Event unmarshall(StaxUnmarshallerContext context) throws Exception {
Event event = new Event();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return event;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("SourceIdentifier", targetDepth)) {
event.setSourceIdentifier(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("SourceType", targetDepth)) {
event.setSourceType(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Message", targetDepth)) {
event.setMessage(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("EventCategories", targetDepth)) {
event.withEventCategories(new ArrayList<String>());
continue;
}
if (context.testExpression("EventCategories/EventCategory", targetDepth)) {
event.withEventCategories(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Date", targetDepth)) {
event.setDate(DateStaxUnmarshallerFactory.getInstance("iso8601").unmarshall(context));
continue;
}
if (context.testExpression("SourceArn", targetDepth)) {
event.setSourceArn(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return event;
}
}
}
}
private static EventStaxUnmarshaller instance;
public static EventStaxUnmarshaller getInstance() {
if (instance == null)
instance = new EventStaxUnmarshaller();
return instance;
}
}
|
jentfoo/aws-sdk-java
|
aws-java-sdk-docdb/src/main/java/com/amazonaws/services/docdb/model/transform/EventStaxUnmarshaller.java
|
Java
|
apache-2.0
| 3,623 |
package com.gentics.mesh.changelog.changes;
import static com.gentics.mesh.core.data.relationship.GraphRelationships.SCHEMA_CONTAINER_VERSION_KEY_PROPERTY;
import com.gentics.mesh.changelog.AbstractChange;
import com.tinkerpop.blueprints.Direction;
/**
* Changelog entry which removed the schema version edges with properties
*/
public class ReplaceSchemaVersionEdges extends AbstractChange {
@Override
public String getUuid() {
return "E737684330534623B768433053C623F2";
}
@Override
public String getName() {
return "ReplaceSchemaVersionEdges";
}
@Override
public String getDescription() {
return "Replaces edges from node content to schema versions with properties.";
}
@Override
public void applyInTx() {
replaceSingleEdge("NodeGraphFieldContainerImpl", Direction.OUT, "HAS_SCHEMA_CONTAINER_VERSION", SCHEMA_CONTAINER_VERSION_KEY_PROPERTY);
}
}
|
gentics/mesh
|
changelog-system/src/main/java/com/gentics/mesh/changelog/changes/ReplaceSchemaVersionEdges.java
|
Java
|
apache-2.0
| 877 |
package sl.hr_client;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
}
|
862573026/SchoolHRProj
|
Android/HRAM_SNU_App/hr-client/src/androidTest/java/sl/hr_client/ApplicationTest.java
|
Java
|
apache-2.0
| 343 |
package cc.mallet.util;
/**
* Static utility methods for Strings
*/
final public class Strings {
public static int commonPrefixIndex (String[] strings)
{
int prefixLen = strings[0].length();
for (int i = 1; i < strings.length; i++) {
if (strings[i].length() < prefixLen)
prefixLen = strings[i].length();
int j = 0;
if (prefixLen == 0)
return 0;
while (j < prefixLen) {
if (strings[i-1].charAt(j) != strings[i].charAt(j)) {
prefixLen = j;
break;
}
j++;
}
}
return prefixLen;
}
public static String commonPrefix (String[] strings)
{
return strings[0].substring (0, commonPrefixIndex(strings));
}
public static int count (String string, char ch)
{
int idx = -1;
int count = 0;
while ((idx = string.indexOf (ch, idx+1)) >= 0) { count++; };
return count;
}
public static double levenshteinDistance (String s, String t) {
int n = s.length();
int m = t.length();
int d[][]; // matrix
int i; // iterates through s
int j; // iterates through t
char s_i; // ith character of s
char t_j; // jth character of t
int cost; // cost
if (n == 0)
return 1.0;
if (m == 0)
return 1.0;
d = new int[n+1][m+1];
for (i = 0; i <= n; i++)
d[i][0] = i;
for (j = 0; j <= m; j++)
d[0][j] = j;
for (i = 1; i <= n; i++) {
s_i = s.charAt (i - 1);
for (j = 1; j <= m; j++) {
t_j = t.charAt (j - 1);
cost = (s_i == t_j) ? 0 : 1;
d[i][j] = minimum (d[i-1][j]+1, d[i][j-1]+1, d[i-1][j-1] + cost);
}
}
int longer = (n > m) ? n : m;
return (double)d[n][m] / longer; // Normalize to 0-1.
}
private static int minimum (int a, int b, int c) {
int mi = a;
if (b < mi) {
mi = b;
}
if (c < mi) {
mi = c;
}
return mi;
}
}
|
UnsupervisedOntologyLearning/hrLDA
|
hrLDA/src/cc/mallet/util/Strings.java
|
Java
|
apache-2.0
| 1,930 |
package org.museautomation.core.step;
import org.jetbrains.annotations.*;
import org.museautomation.core.*;
import org.museautomation.core.context.*;
import org.museautomation.core.step.descriptor.*;
import org.museautomation.core.steptask.*;
import org.museautomation.core.values.*;
import org.museautomation.core.values.descriptor.*;
import java.util.*;
/**
* Executes the steps contained within a Macro.
*
* Note that this does NOT execute those steps within a separate variable scope, despite this class extending
* ScopedGroup. It overrides #isCreateNewVariableScope to disable that behavior. That seems a bit strange, but
* CallFunction builds on the basic function of CallMacroStep and it needs to be scoped. We need multiple-inheritance
* to do this cleanly (yuck), but this will have to suffice.
*
* @see Macro
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
@MuseTypeId("callmacro")
@MuseStepName("Macro")
@MuseInlineEditString("call macro {id}")
@MuseStepIcon("glyph:FontAwesome:EXTERNAL_LINK")
@MuseStepTypeGroup("Structure")
@MuseStepLongDescription("The 'id' source is resolved to a string and used to find the macro in the project. The steps within the macro are then executed as children of the call-macro step, within the same variable scope as the parent. This means that steps within the macro have access to the same variables as the caller.")
@MuseSubsourceDescriptor(displayName = "Macro name", description = "The name (resource id) of the macro to call", type = SubsourceDescriptor.Type.Named, name = CallMacroStep.ID_PARAM)
public class CallMacroStep extends ScopedGroup
{
@SuppressWarnings("unused") // called via reflection
public CallMacroStep(StepConfiguration config, MuseProject project)
{
super(config, project);
_config = config;
_project = project;
}
@Override
protected StepExecutionContext createStepExecutionContextForChildren(StepExecutionContext context) throws MuseExecutionError
{
String id = getStepsId(context);
ContainsStep resource = _project.getResourceStorage().getResource(id, ContainsStep.class);
if (resource == null)
throw new StepExecutionError("unable to locate project resource, id=" + id);
StepConfiguration step = resource.getStep();
List<StepConfiguration> steps;
if (step.getChildren() != null && step.getChildren().size() > 0)
steps = step.getChildren();
else
{
steps = new ArrayList<>();
steps.add(step);
}
context.getStepLocator().loadSteps(steps);
context.raiseEvent(DynamicStepLoadingEventType.create(_config, steps));
return new ListOfStepsExecutionContext(context.getParent(), steps, isCreateNewVariableScope(), this);
}
/**
* Get the id of the project resource that contains the steps that should be run.
*/
@NotNull
@SuppressWarnings("WeakerAccess")
protected String getStepsId(StepExecutionContext context) throws MuseExecutionError
{
MuseValueSource id_source = getValueSource(_config, ID_PARAM, true, context.getProject());
return BaseValueSource.getValue(id_source, context, false, String.class);
}
@Override
protected boolean isCreateNewVariableScope()
{
return false;
}
protected MuseProject _project;
private StepConfiguration _config;
public final static String ID_PARAM = "id";
public final static String TYPE_ID = CallMacroStep.class.getAnnotation(MuseTypeId.class).value();
}
|
ChrisLMerrill/muse
|
core/src/main/java/org/museautomation/core/step/CallMacroStep.java
|
Java
|
apache-2.0
| 3,625 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.diff.impl.patch.formove;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diff.impl.patch.FilePatch;
import com.intellij.openapi.diff.impl.patch.TextFilePatch;
import com.intellij.openapi.diff.impl.patch.apply.ApplyFilePatchBase;
import com.intellij.openapi.diff.impl.patch.apply.ApplyFilePatchFactory;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.fileTypes.ex.FileTypeChooser;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.patch.RelativePathCalculator;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.CalledInAwt;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class PathsVerifier {
// in
private final Project myProject;
private final VirtualFile myBaseDirectory;
private final List<FilePatch> myPatches;
// temp
private final Map<VirtualFile, MovedFileData> myMovedFiles;
private final List<FilePath> myBeforePaths;
private final List<VirtualFile> myCreatedDirectories;
// out
private final List<PatchAndFile> myTextPatches;
private final List<PatchAndFile> myBinaryPatches;
@NotNull private final List<VirtualFile> myWritableFiles;
private final ProjectLevelVcsManager myVcsManager;
private final List<FilePatch> mySkipped;
private DelayedPrecheckContext myDelayedPrecheckContext;
private final List<FilePath> myAddedPaths;
private final List<FilePath> myDeletedPaths;
private boolean myIgnoreContentRootsCheck;
public PathsVerifier(@NotNull Project project,
@NotNull VirtualFile baseDirectory,
@NotNull List<FilePatch> patches) {
myProject = project;
myBaseDirectory = baseDirectory;
myPatches = patches;
myMovedFiles = new HashMap<>();
myBeforePaths = new ArrayList<>();
myCreatedDirectories = new ArrayList<>();
myTextPatches = new ArrayList<>();
myBinaryPatches = new ArrayList<>();
myWritableFiles = new ArrayList<>();
myVcsManager = ProjectLevelVcsManager.getInstance(myProject);
mySkipped = new ArrayList<>();
myAddedPaths = new ArrayList<>();
myDeletedPaths = new ArrayList<>();
}
// those to be moved to CL: target + created dirs
public List<FilePath> getDirectlyAffected() {
final List<FilePath> affected = new ArrayList<>();
addAllFilePath(myCreatedDirectories, affected);
addAllFilePath(myWritableFiles, affected);
affected.addAll(myBeforePaths);
return affected;
}
// old parents of moved files
public List<VirtualFile> getAllAffected() {
final List<VirtualFile> affected = new ArrayList<>();
affected.addAll(myCreatedDirectories);
affected.addAll(myWritableFiles);
// after files' parent
for (VirtualFile file : myMovedFiles.keySet()) {
final VirtualFile parent = file.getParent();
if (parent != null) {
affected.add(parent);
}
}
// before..
for (FilePath path : myBeforePaths) {
final FilePath parent = path.getParentPath();
if (parent != null) {
affected.add(parent.getVirtualFile());
}
}
return affected;
}
private static void addAllFilePath(final Collection<VirtualFile> files, final Collection<FilePath> paths) {
for (VirtualFile file : files) {
paths.add(VcsUtil.getFilePath(file));
}
}
@CalledInAwt
public List<FilePatch> nonWriteActionPreCheck() {
List<FilePatch> failedToApply = ContainerUtil.newArrayList();
myDelayedPrecheckContext = new DelayedPrecheckContext(myProject);
for (FilePatch patch : myPatches) {
final CheckPath checker = getChecker(patch);
if (!checker.canBeApplied(myDelayedPrecheckContext)) {
revert(checker.getErrorMessage());
failedToApply.add(patch);
}
}
final Collection<FilePatch> skipped = myDelayedPrecheckContext.doDelayed();
mySkipped.addAll(skipped);
myPatches.removeAll(skipped);
myPatches.removeAll(failedToApply);
return failedToApply;
}
public List<FilePatch> getSkipped() {
return mySkipped;
}
public List<FilePatch> execute() {
List<FilePatch> failedPatches = ContainerUtil.newArrayList();
try {
final List<CheckPath> checkers = new ArrayList<>(myPatches.size());
for (FilePatch patch : myPatches) {
final CheckPath checker = getChecker(patch);
checkers.add(checker);
}
for (CheckPath checker : checkers) {
if (!checker.check()) {
failedPatches.add(checker.getPatch());
revert(checker.getErrorMessage());
}
}
}
catch (IOException e) {
revert(e.getMessage());
}
myPatches.removeAll(failedPatches);
return failedPatches;
}
private CheckPath getChecker(final FilePatch patch) {
final String beforeFileName = patch.getBeforeName();
final String afterFileName = patch.getAfterName();
if (beforeFileName == null || patch.isNewFile()) {
return new CheckAdded(patch);
}
else if (afterFileName == null || patch.isDeletedFile()) {
return new CheckDeleted(patch);
}
else if (!beforeFileName.equals(afterFileName)) {
return new CheckMoved(patch);
}
else {
return new CheckModified(patch);
}
}
public Collection<FilePath> getToBeAdded() {
return myAddedPaths;
}
public Collection<FilePath> getToBeDeleted() {
return myDeletedPaths;
}
@NotNull
public Collection<FilePatch> filterBadFileTypePatches() {
List<PatchAndFile> failedTextPatches =
ContainerUtil.findAll(myTextPatches, textPatch -> !isFileTypeOk(textPatch.getFile()));
myTextPatches.removeAll(failedTextPatches);
return ContainerUtil.map(failedTextPatches, patchInfo -> patchInfo.getApplyPatch().getPatch());
}
private boolean isFileTypeOk(@NotNull VirtualFile file) {
if (file.isDirectory()) {
PatchApplier
.showError(myProject, "Cannot apply content for " + file.getPresentableName() + " file from patch because it is directory.");
return false;
}
FileType fileType = file.getFileType();
if (fileType == FileTypes.UNKNOWN) {
fileType = FileTypeChooser.associateFileType(file.getName());
if (fileType == null) {
PatchApplier
.showError(myProject, "Cannot apply content for " + file.getPresentableName() + " file from patch because its type not defined.");
return false;
}
}
if (fileType.isBinary()) {
PatchApplier.showError(myProject, "Cannot apply file " + file.getPresentableName() + " from patch because it is binary.");
return false;
}
return true;
}
private class CheckModified extends CheckDeleted {
private CheckModified(final FilePatch path) {
super(path);
}
}
private class CheckDeleted extends CheckPath {
protected CheckDeleted(final FilePatch path) {
super(path);
}
@Override
protected boolean precheck(final VirtualFile beforeFile, final VirtualFile afterFile, DelayedPrecheckContext context) {
if (beforeFile == null) {
context.addSkip(getMappedFilePath(myBeforeName), myPatch);
}
return true;
}
@Override
protected boolean check() {
final VirtualFile beforeFile = getMappedFile(myBeforeName);
if (! checkExistsAndValid(beforeFile, myBeforeName)) {
return false;
}
addPatch(myPatch, beforeFile);
FilePath filePath = VcsUtil.getFilePath(beforeFile.getParent(), beforeFile.getName(), beforeFile.isDirectory());
if (myPatch.isDeletedFile() || myPatch.getAfterName() == null) {
myDeletedPaths.add(filePath);
}
myBeforePaths.add(filePath);
return true;
}
}
private class CheckAdded extends CheckPath {
private CheckAdded(final FilePatch path) {
super(path);
}
@Override
protected boolean precheck(final VirtualFile beforeFile, final VirtualFile afterFile, DelayedPrecheckContext context) {
if (afterFile != null) {
context.addOverrideExisting(myPatch, VcsUtil.getFilePath(afterFile));
}
return true;
}
@Override
public boolean check() throws IOException {
final String[] pieces = RelativePathCalculator.split(myAfterName);
final VirtualFile parent = makeSureParentPathExists(pieces);
if (parent == null) {
setErrorMessage(fileNotFoundMessage(myAfterName));
return false;
}
String name = pieces[pieces.length - 1];
File afterFile = new File(parent.getPath(), name);
//if user already accepted overwriting, we shouldn't have created a new one
final VirtualFile file = myDelayedPrecheckContext.getOverridenPaths().contains(VcsUtil.getFilePath(afterFile))
? parent.findChild(name)
: createFile(parent, name);
if (file == null) {
setErrorMessage(fileNotFoundMessage(myAfterName));
return false;
}
myAddedPaths.add(VcsUtil.getFilePath(file));
if (! checkExistsAndValid(file, myAfterName)) {
return false;
}
addPatch(myPatch, file);
return true;
}
}
private class CheckMoved extends CheckPath {
private CheckMoved(final FilePatch path) {
super(path);
}
// before exists; after does not exist
@Override
protected boolean precheck(final VirtualFile beforeFile, final VirtualFile afterFile, final DelayedPrecheckContext context) {
if (beforeFile == null) {
setErrorMessage(fileNotFoundMessage(myBeforeName));
} else if (afterFile != null) {
setErrorMessage(fileAlreadyExists(afterFile.getPath()));
}
return beforeFile != null && afterFile == null;
}
@Override
public boolean check() throws IOException {
final String[] pieces = RelativePathCalculator.split(myAfterName);
final VirtualFile afterFileParent = makeSureParentPathExists(pieces);
if (afterFileParent == null) {
setErrorMessage(fileNotFoundMessage(myAfterName));
return false;
}
final VirtualFile beforeFile = getMappedFile(myBeforeName);
if (! checkExistsAndValid(beforeFile, myBeforeName)) {
return false;
}
assert beforeFile != null; // if beforeFile is null then checkExist returned false;
myMovedFiles.put(beforeFile, new MovedFileData(afterFileParent, beforeFile, myPatch.getAfterFileName()));
addPatch(myPatch, beforeFile);
return true;
}
}
private abstract class CheckPath {
protected final String myBeforeName;
protected final String myAfterName;
protected final FilePatch myPatch;
private String myErrorMessage;
CheckPath(final FilePatch path) {
myPatch = path;
myBeforeName = path.getBeforeName();
myAfterName = path.getAfterName();
}
public String getErrorMessage() {
return myErrorMessage;
}
public void setErrorMessage(final String errorMessage) {
myErrorMessage = errorMessage;
}
public boolean canBeApplied(DelayedPrecheckContext context) {
final VirtualFile beforeFile = getMappedFile(myBeforeName);
final VirtualFile afterFile = getMappedFile(myAfterName);
return precheck(beforeFile, afterFile, context);
}
protected abstract boolean precheck(final VirtualFile beforeFile,
final VirtualFile afterFile,
DelayedPrecheckContext context);
protected abstract boolean check() throws IOException;
protected boolean checkExistsAndValid(final VirtualFile file, final String name) {
if (file == null) {
setErrorMessage(fileNotFoundMessage(name));
return false;
}
return checkModificationValid(file, name);
}
protected boolean checkModificationValid(final VirtualFile file, final String name) {
if (ApplicationManager.getApplication().isUnitTestMode() && myIgnoreContentRootsCheck) return true;
// security check to avoid overwriting system files with a patch
if (file == null || !inContent(file) || myVcsManager.getVcsRootFor(file) == null) {
setErrorMessage("File to patch found outside content root: " + name);
return false;
}
return true;
}
@Nullable
protected VirtualFile getMappedFile(String path) {
return PathMerger.getFile(myBaseDirectory, path);
}
protected FilePath getMappedFilePath(String path) {
return PathMerger.getFile(VcsUtil.getFilePath(myBaseDirectory), path);
}
private boolean inContent(VirtualFile file) {
return myVcsManager.isFileInContent(file);
}
public FilePatch getPatch() {
return myPatch;
}
}
private void addPatch(final FilePatch patch, final VirtualFile file) {
if (patch instanceof TextFilePatch) {
myTextPatches.add(new PatchAndFile(file, ApplyFilePatchFactory.create((TextFilePatch)patch)));
}
else {
myBinaryPatches.add(new PatchAndFile(file, ApplyFilePatchFactory.createGeneral(patch)));
}
myWritableFiles.add(file);
}
private static String fileNotFoundMessage(final String path) {
return VcsBundle.message("cannot.find.file.to.patch", path);
}
private static String fileAlreadyExists(final String path) {
return VcsBundle.message("cannot.apply.file.already.exists", path);
}
private void revert(final String errorMessage) {
PatchApplier.showError(myProject, errorMessage);
// move back
/*for (MovedFileData movedFile : myMovedFiles) {
try {
final VirtualFile current = movedFile.getCurrent();
final VirtualFile newParent = current.getParent();
final VirtualFile file;
if (! Comparing.equal(newParent, movedFile.getOldParent())) {
file = moveFile(current, movedFile.getOldParent());
} else {
file = current;
}
if (! Comparing.equal(current.getName(), movedFile.getOldName())) {
file.rename(PatchApplier.class, movedFile.getOldName());
}
}
catch (IOException e) {
// ignore: revert as much as possible
}
}
// go back
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
for (int i = myCreatedDirectories.size() - 1; i >= 0; -- i) {
final VirtualFile file = myCreatedDirectories.get(i);
try {
file.delete(PatchApplier.class);
}
catch (IOException e) {
// ignore
}
}
}
});
myBinaryPatches.clear();
myTextPatches.clear();
myWritableFiles.clear();*/
}
private static VirtualFile createFile(final VirtualFile parent, final String name) throws IOException {
return parent.createChildData(PatchApplier.class, name);
/*final Ref<IOException> ioExceptionRef = new Ref<IOException>();
final Ref<VirtualFile> result = new Ref<VirtualFile>();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
try {
result.set(parent.createChildData(PatchApplier.class, name));
}
catch (IOException e) {
ioExceptionRef.set(e);
}
}
});
if (! ioExceptionRef.isNull()) {
throw ioExceptionRef.get();
}
return result.get();*/
}
private static VirtualFile moveFile(final VirtualFile file, final VirtualFile newParent) throws IOException {
file.move(FilePatch.class, newParent);
return file;
/*final Ref<IOException> ioExceptionRef = new Ref<IOException>();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
try {
file.move(FilePatch.class, newParent);
}
catch (IOException e) {
ioExceptionRef.set(e);
}
}
});
if (! ioExceptionRef.isNull()) {
throw ioExceptionRef.get();
}
return file;*/
}
@Nullable
private VirtualFile makeSureParentPathExists(final String[] pieces) throws IOException {
VirtualFile child = myBaseDirectory;
final int size = pieces.length - 1;
for (int i = 0; i < size; i++) {
final String piece = pieces[i];
if (StringUtil.isEmptyOrSpaces(piece)) {
continue;
}
if ("..".equals(piece)) {
child = child.getParent();
continue;
}
VirtualFile nextChild = child.findChild(piece);
if (nextChild == null) {
nextChild = VfsUtil.createDirectories(child.getPath() + '/' + piece);
myCreatedDirectories.add(nextChild);
}
child = nextChild;
}
return child;
}
public List<PatchAndFile> getTextPatches() {
return myTextPatches;
}
public List<PatchAndFile> getBinaryPatches() {
return myBinaryPatches;
}
@NotNull
public List<VirtualFile> getWritableFiles() {
return myWritableFiles;
}
public void doMoveIfNeeded(final VirtualFile file) throws IOException {
final MovedFileData movedFile = myMovedFiles.get(file);
if (movedFile != null) {
myBeforePaths.add(VcsUtil.getFilePath(file));
ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<VirtualFile, IOException>() {
@Override
public VirtualFile compute() throws IOException {
return movedFile.doMove();
}
});
}
}
private static class MovedFileData {
private final VirtualFile myNewParent;
private final VirtualFile myCurrent;
private final String myNewName;
private MovedFileData(@NotNull final VirtualFile newParent, @NotNull final VirtualFile current, @NotNull final String newName) {
myNewParent = newParent;
myCurrent = current;
myNewName = newName;
}
public VirtualFile getCurrent() {
return myCurrent;
}
public VirtualFile getNewParent() {
return myNewParent;
}
public String getNewName() {
return myNewName;
}
public VirtualFile doMove() throws IOException {
final VirtualFile oldParent = myCurrent.getParent();
boolean needRename = !Comparing.equal(myCurrent.getName(), myNewName);
boolean needMove = !myNewParent.equals(oldParent);
if (needRename) {
if (needMove) {
File oldParentFile = VfsUtilCore.virtualToIoFile(oldParent);
File targetAfterRenameFile = new File(oldParentFile, myNewName);
if (targetAfterRenameFile.exists() && myCurrent.exists()) {
// if there is a conflict during first rename we have to rename to third name, then move, then rename to final target
performRenameWithConflicts(oldParentFile);
return myCurrent;
}
}
myCurrent.rename(PatchApplier.class, myNewName);
}
if (needMove) {
myCurrent.move(PatchApplier.class, myNewParent);
}
return myCurrent;
}
private void performRenameWithConflicts(@NotNull File oldParent) throws IOException {
File tmpFileWithUniqueName = FileUtil.createTempFile(oldParent, "tempFileToMove", null, false);
File newParentFile = VfsUtilCore.virtualToIoFile(myNewParent);
File destFile = new File(newParentFile, tmpFileWithUniqueName.getName());
while (destFile.exists()) {
destFile = new File(newParentFile,
FileUtil.createTempFile(oldParent, FileUtil.getNameWithoutExtension(destFile.getName()), null, false)
.getName());
}
myCurrent.rename(PatchApplier.class, destFile.getName());
myCurrent.move(PatchApplier.class, myNewParent);
myCurrent.rename(PatchApplier.class, myNewName);
}
}
private static class DelayedPrecheckContext {
private final Map<FilePath, FilePatch> mySkipDeleted;
private final Map<FilePath, FilePatch> myOverrideExisting;
private final List<FilePath> myOverridenPaths;
private final Project myProject;
private DelayedPrecheckContext(final Project project) {
myProject = project;
myOverrideExisting = new HashMap<>();
mySkipDeleted = new HashMap<>();
myOverridenPaths = new LinkedList<>();
}
public void addSkip(final FilePath path, final FilePatch filePatch) {
mySkipDeleted.put(path, filePatch);
}
public void addOverrideExisting(final FilePatch patch, final FilePath filePath) {
if (! myOverrideExisting.containsKey(filePath)) {
myOverrideExisting.put(filePath, patch);
}
}
// returns those to be skipped
public Collection<FilePatch> doDelayed() {
final List<FilePatch> result = new LinkedList<>();
if (! myOverrideExisting.isEmpty()) {
final String title = "Overwrite Existing Files";
List<FilePath> files = new ArrayList<>(myOverrideExisting.keySet());
Collection<FilePath> selected = AbstractVcsHelper.getInstance(myProject).selectFilePathsToProcess(
files, title,
"\nThe following files should be created by patch, but they already exist.\nDo you want to overwrite them?\n", title,
"The following file should be created by patch, but it already exists.\nDo you want to overwrite it?\n{0}",
VcsShowConfirmationOption.STATIC_SHOW_CONFIRMATION,
"Overwrite", "Cancel");
if (selected != null) {
for (FilePath path : selected) {
myOverrideExisting.remove(path);
}
}
result.addAll(myOverrideExisting.values());
if (selected != null) {
myOverridenPaths.addAll(selected);
}
}
result.addAll(mySkipDeleted.values());
return result;
}
public List<FilePath> getOverridenPaths() {
return myOverridenPaths;
}
public Collection<FilePath> getAlreadyDeletedPaths() {
return mySkipDeleted.keySet();
}
}
public void setIgnoreContentRootsCheck(boolean ignoreContentRootsCheck) {
myIgnoreContentRootsCheck = ignoreContentRootsCheck;
}
public static class PatchAndFile {
private final VirtualFile myFile;
private final ApplyFilePatchBase<?> myPatch;
public PatchAndFile(VirtualFile file, ApplyFilePatchBase<?> patch) {
myFile = file;
myPatch = patch;
}
public VirtualFile getFile() {
return myFile;
}
public ApplyFilePatchBase<?> getApplyPatch() {
return myPatch;
}
}
}
|
goodwinnk/intellij-community
|
platform/vcs-impl/src/com/intellij/openapi/diff/impl/patch/formove/PathsVerifier.java
|
Java
|
apache-2.0
| 23,092 |
package com.therabbitmage.android.beacon.network;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URISyntaxException;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.message.BasicHeader;
import android.net.Uri;
import android.util.Log;
import com.therabbitmage.android.beacon.entities.google.urlshortener.Url;
public final class URLShortenerAPI {
private static final String TAG = URLShortenerAPI.class.getSimpleName();
private static final String BASE_URL = "https://www.googleapis.com/urlshortener/v1/url";
public static NetworkResponse urlShorten(String url) throws IOException, URISyntaxException{
android.net.Uri.Builder uriBuilder = Uri.parse(BASE_URL).buildUpon();
String uri = uriBuilder.build().toString();
Header[] headers = new Header[1];
headers[0] = new BasicHeader(ApacheNetworkUtils.HEADER_CONTENT_TYPE, ApacheNetworkUtils.TYPE_JSON);
ApacheNetworkUtils.getAndroidInstance(ApacheNetworkUtils.sUserAgent, false);
HttpResponse response = ApacheNetworkUtils.post(
uri,
ApacheNetworkUtils.getDefaultApacheHeaders(),
new Url(url).toJson());
ApacheNetworkUtils.toStringResponseHeaders(response.getAllHeaders());
ApacheNetworkUtils.toStringStatusLine(response.getStatusLine());
HttpEntity entity = response.getEntity();
NetworkResponse networkResponse = new NetworkResponse();
if(response.getStatusLine().getStatusCode() == HttpStatus.SC_OK){
networkResponse.setError(0);
BufferedReader br = new BufferedReader(new InputStreamReader(entity.getContent()));
StringBuilder stringBuilder = new StringBuilder();
String output = new String();
while((output = br.readLine()) != null){
stringBuilder.append(output);
}
br.close();
Log.i(TAG, "Body: " + stringBuilder.toString());
networkResponse.setUrl(Url.fromJson(stringBuilder.toString()));
} else {
networkResponse.setError(1);
}
return networkResponse;
}
}
|
GregSaintJean/Beacon
|
src/com/therabbitmage/android/beacon/network/URLShortenerAPI.java
|
Java
|
apache-2.0
| 2,158 |
/**
* Copyright (c) 2015 the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.jmnarloch.spring.jaxrs.client.support;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import javax.ws.rs.ext.Provider;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
/**
* Tests the {@link JaxRsClientProxyFactorySupport} class.
*
* @author Jakub Narloch
*/
public class JaxRsClientProxyFactorySupportTest {
/**
* The instance of the tested class.
*/
private JaxRsClientProxyFactorySupport instance;
/**
* Sets up the test environment.
*
* @throws Exception if any error occurs
*/
@Before
public void setUp() throws Exception {
instance = new MockJaxRsClientProxyFactorySupport();
}
@Test
public void shouldRetrieveProviders() {
// given
final List<JaxRsClientConfigurer> configurers = Arrays.asList(
mock(JaxRsClientConfigurer.class),
mock(JaxRsClientConfigurer.class)
);
for(JaxRsClientConfigurer conf : configurers) {
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
((ProviderRegistry)invocation.getArguments()[0]).addProvider(SimpleProvider.class);
return null;
}
}).when(conf).registerProviders(any(ProviderRegistry.class));
}
instance.setConfigurers(configurers);
// when
Class<?>[] providers = instance.getProviders();
// then
assertNotNull(providers);
assertEquals(2, providers.length);
}
private static class MockJaxRsClientProxyFactorySupport extends JaxRsClientProxyFactorySupport {
@Override
public <T> T createClientProxy(Class<T> serviceClass, String serviceUrl) {
return null;
}
}
/**
* A simple provider class used for testing.
*
* @author Jakub Narloch
*/
@Provider
private static class SimpleProvider {
}
}
|
jmnarloch/spring-jax-rs-client-proxy
|
src/test/java/com/github/jmnarloch/spring/jaxrs/client/support/JaxRsClientProxyFactorySupportTest.java
|
Java
|
apache-2.0
| 2,921 |
/*
Copyright (c) 2012 Marco Amadei.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.ucanaccess.test;
import java.sql.Connection;
import com.healthmarketscience.jackcess.Database.FileFormat;
public class PasswordTest extends UcanaccessTestBase {
public PasswordTest() {
super();
}
public PasswordTest(FileFormat accVer) {
super(accVer);
}
public String getAccessPath() {
return "net/ucanaccess/test/resources/pwd.mdb";
}
protected void setUp() throws Exception {}
public void testPassword() throws Exception {
Class.forName("net.ucanaccess.jdbc.UcanaccessDriver");
Connection ucanaccessConnection = null;
try {
ucanaccessConnection = getUcanaccessConnection();
} catch (Exception e) {
}
assertNull(ucanaccessConnection);
super.setPassword("ucanaccess");
//url will be
try {
ucanaccessConnection = getUcanaccessConnection();
} catch (Exception e) {
e.printStackTrace();
}
assertNotNull(ucanaccessConnection);
}
}
|
lmu-bioinformatics/ucanaccess
|
src/test/java/net/ucanaccess/test/PasswordTest.java
|
Java
|
apache-2.0
| 1,516 |
/*
* Copyright 2017-2019 University of Hildesheim, Software Systems Engineering
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ssehub.kernel_haven.code_model;
import java.io.File;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.ssehub.kernel_haven.SetUpException;
import net.ssehub.kernel_haven.config.DefaultSettings;
import net.ssehub.kernel_haven.provider.AbstractCache;
import net.ssehub.kernel_haven.provider.AbstractProvider;
import net.ssehub.kernel_haven.util.null_checks.NonNull;
/**
* The provider for the code model. This class serves as an intermediate between the analysis and the code model
* extractor.
*
* @author Adam
*/
public class CodeModelProvider extends AbstractProvider<SourceFile<?>> {
@Override
protected long getTimeout() {
return config.getValue(DefaultSettings.CODE_PROVIDER_TIMEOUT);
}
@Override
protected @NonNull List<@NonNull File> getTargets() throws SetUpException {
List<@NonNull File> result = new LinkedList<>();
Pattern pattern = config.getValue(DefaultSettings.CODE_EXTRACTOR_FILE_REGEX);
for (String relativeStr : config.getValue(DefaultSettings.CODE_EXTRACTOR_FILES)) {
File relativeFile = new File(relativeStr);
File absoluteFile = new File(config.getValue(DefaultSettings.SOURCE_TREE), relativeFile.getPath());
if (absoluteFile.isFile()) {
result.add(relativeFile);
} else if (absoluteFile.isDirectory()) {
readFilesFromDirectory(absoluteFile, pattern, result);
} else {
throw new SetUpException("Non-existing file specified in code.extractor.files: "
+ relativeFile.getPath());
}
}
return result;
}
/**
* Finds all files in the given directory (recursively) that match the given
* pattern. The files that match are added to filesToParse.
*
* @param directory
* The directory to search in.
* @param pattern
* The pattern to check against.
* @param result
* The list to add the found files to.
*/
private void readFilesFromDirectory(File directory, Pattern pattern, List<File> result) {
for (File file : directory.listFiles()) {
if (file.isDirectory()) {
readFilesFromDirectory(file, pattern, result);
} else {
Matcher m = pattern.matcher(file.getName());
if (m.matches()) {
result.add(config.getValue(DefaultSettings.SOURCE_TREE).toPath()
.relativize(file.toPath()).toFile());
}
}
}
}
@Override
protected @NonNull AbstractCache<SourceFile<?>> createCache() {
return new JsonCodeModelCache(config.getValue(DefaultSettings.CACHE_DIR),
config.getValue(DefaultSettings.CODE_PROVIDER_CACHE_COMPRESS));
}
@Override
public boolean readCache() {
return config.getValue(DefaultSettings.CODE_PROVIDER_CACHE_READ);
}
@Override
public boolean writeCache() {
return config.getValue(DefaultSettings.CODE_PROVIDER_CACHE_WRITE);
}
@Override
public int getNumberOfThreads() {
return config.getValue(DefaultSettings.CODE_EXTRACTOR_THREADS);
}
}
|
KernelHaven/KernelHaven
|
src/net/ssehub/kernel_haven/code_model/CodeModelProvider.java
|
Java
|
apache-2.0
| 3,998 |
/*
* Copyright 2005-2008 The Kuali Foundation
*
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.edl.impl;
public class TestConfigProcessor extends TestEDLModelCompent {
}
|
sbower/kuali-rice-1
|
it/kew/src/test/java/org/kuali/rice/edl/impl/TestConfigProcessor.java
|
Java
|
apache-2.0
| 734 |
package org.apache.hadoop.hive.kafka.camus;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.UTF8;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Map;
/**
* The key for the mapreduce job to pull kafka. Contains offsets and the
* checksum.
*/
public class KafkaKey implements WritableComparable<KafkaKey>, IKafkaKey {
public static final Text SERVER = new Text("server");
public static final Text SERVICE = new Text("service");
public static KafkaKey DUMMY_KEY = new KafkaKey();
private String leaderId = "";
private int partition = 0;
private long beginOffset = 0;
private long offset = 0;
private long checksum = 0;
private String topic = "";
private long time = 0;
private String server = "";
private String service = "";
private MapWritable partitionMap = new MapWritable();
/**
* dummy empty constructor
*/
public KafkaKey() {
this("dummy", "0", 0, 0, 0, 0);
}
public KafkaKey(KafkaKey other) {
this.partition = other.partition;
this.beginOffset = other.beginOffset;
this.offset = other.offset;
this.checksum = other.checksum;
this.topic = other.topic;
this.time = other.time;
this.server = other.server;
this.service = other.service;
this.partitionMap = new MapWritable(other.partitionMap);
}
public KafkaKey(String topic, String leaderId, int partition) {
this.set(topic, leaderId, partition, 0, 0, 0);
}
public KafkaKey(String topic, String leaderId, int partition, long beginOffset, long offset) {
this.set(topic, leaderId, partition, beginOffset, offset, 0);
}
public KafkaKey(String topic, String leaderId, int partition, long beginOffset, long offset, long checksum) {
this.set(topic, leaderId, partition, beginOffset, offset, checksum);
}
public void set(String topic, String leaderId, int partition, long beginOffset, long offset, long checksum) {
this.leaderId = leaderId;
this.partition = partition;
this.beginOffset = beginOffset;
this.offset = offset;
this.checksum = checksum;
this.topic = topic;
this.time = System.currentTimeMillis(); // if event can't be decoded,
// this time will be used for
// debugging.
}
public void clear() {
leaderId = "";
partition = 0;
beginOffset = 0;
offset = 0;
checksum = 0;
topic = "";
time = 0;
server = "";
service = "";
partitionMap = new MapWritable();
}
public String getServer() {
return partitionMap.get(SERVER).toString();
}
public void setServer(String newServer) {
partitionMap.put(SERVER, new Text(newServer));
}
public String getService() {
return partitionMap.get(SERVICE).toString();
}
public void setService(String newService) {
partitionMap.put(SERVICE, new Text(newService));
}
public long getTime() {
return time;
}
public void setTime(long time) {
this.time = time;
}
public String getTopic() {
return topic;
}
public String getLeaderId() {
return leaderId;
}
public int getPartition() {
return this.partition;
}
public long getBeginOffset() {
return this.beginOffset;
}
public void setOffset(long offset) {
this.offset = offset;
}
public long getOffset() {
return this.offset;
}
public long getChecksum() {
return this.checksum;
}
@Override
public long getMessageSize() {
Text key = new Text("message.size");
if (this.partitionMap.containsKey(key))
return ((LongWritable) this.partitionMap.get(key)).get();
else
return 1024; //default estimated size
}
public void setMessageSize(long messageSize) {
Text key = new Text("message.size");
put(key, new LongWritable(messageSize));
}
public void put(Writable key, Writable value) {
this.partitionMap.put(key, value);
}
public void addAllPartitionMap(MapWritable partitionMap) {
this.partitionMap.putAll(partitionMap);
}
public MapWritable getPartitionMap() {
return partitionMap;
}
@Override
public void readFields(DataInput in) throws IOException {
this.leaderId = UTF8.readString(in);
this.partition = in.readInt();
this.beginOffset = in.readLong();
this.offset = in.readLong();
this.checksum = in.readLong();
this.topic = in.readUTF();
this.time = in.readLong();
this.server = in.readUTF(); // left for legacy
this.service = in.readUTF(); // left for legacy
this.partitionMap = new MapWritable();
try {
this.partitionMap.readFields(in);
} catch (IOException e) {
this.setServer(this.server);
this.setService(this.service);
}
}
@Override
public void write(DataOutput out) throws IOException {
UTF8.writeString(out, this.leaderId);
out.writeInt(this.partition);
out.writeLong(this.beginOffset);
out.writeLong(this.offset);
out.writeLong(this.checksum);
out.writeUTF(this.topic);
out.writeLong(this.time);
out.writeUTF(this.server); // left for legacy
out.writeUTF(this.service); // left for legacy
this.partitionMap.write(out);
}
@Override
public int compareTo(KafkaKey o) {
if (partition != o.partition) {
return partition = o.partition;
} else {
if (offset > o.offset) {
return 1;
} else if (offset < o.offset) {
return -1;
} else {
if (checksum > o.checksum) {
return 1;
} else if (checksum < o.checksum) {
return -1;
} else {
return 0;
}
}
}
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("topic=");
builder.append(topic);
builder.append(" partition=");
builder.append(partition);
builder.append("leaderId=");
builder.append(leaderId);
builder.append(" server=");
builder.append(server);
builder.append(" service=");
builder.append(service);
builder.append(" beginOffset=");
builder.append(beginOffset);
builder.append(" offset=");
builder.append(offset);
builder.append(" msgSize=");
builder.append(getMessageSize());
builder.append(" server=");
builder.append(server);
builder.append(" checksum=");
builder.append(checksum);
builder.append(" time=");
builder.append(time);
for (Map.Entry<Writable, Writable> e : partitionMap.entrySet()) {
builder.append(" " + e.getKey() + "=");
builder.append(e.getValue().toString());
}
return builder.toString();
}
}
|
HiveKa/HiveKa
|
src/main/java/org/apache/hadoop/hive/kafka/camus/KafkaKey.java
|
Java
|
apache-2.0
| 6,733 |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.jstestdriver.output;
/**
* Escapes and formats a filename.
*
* @author Cory Smith (corbinrsmith@gmail.com)
*/
public class FileNameFormatter {
public String format(String path, String format) {
String escaped = path
.replace('/', 'a')
.replace('\\', 'a')
.replace(">", "a")
.replace(":", "a")
.replace(":", "a")
.replace(";", "a")
.replace("+", "a")
.replace(",", "a")
.replace("<", "a")
.replace("?", "a")
.replace("*", "a")
.replace(" ", "a");
return String.format(format, escaped.length() > 200 ? escaped.substring(0, 200) : escaped);
}
}
|
BladeRunnerJS/brjs-JsTestDriver
|
JsTestDriver/src/com/google/jstestdriver/output/FileNameFormatter.java
|
Java
|
apache-2.0
| 1,271 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.