code
stringlengths 3
1.04M
| repo_name
stringlengths 5
109
| path
stringlengths 6
306
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.04M
|
---|---|---|---|---|---|
package com.walmart.labs.pcs.normalize.MongoDB.SpringBoot.service;
import com.walmart.labs.pcs.normalize.MongoDB.SpringBoot.entity.Person;
import com.walmart.labs.pcs.normalize.MongoDB.SpringBoot.repository.PersonRepository;
import com.walmart.labs.pcs.normalize.MongoDB.SpringBoot.repository.PersonRepositoryImp;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
/**
* Created by pzhong1 on 1/23/15.
*/
public class PersonService {
@Autowired
private PersonRepository personRepository;
public List<Person> getAllPersons(){
return personRepository.findAll();
}
public Person searchPerson(String id){
return personRepository.findOne(id);
}
public void insertPersonWithNameJohnAndRandomAge(Person person){
personRepository.save(person);
}
public void dropPersonCollection() {
personRepository.deleteAll();
}
}
| ArthurZhong/SparkStormKafkaTest | src/main/java/com/walmart/labs/pcs/normalize/MongoDB/SpringBoot/service/PersonService.java | Java | apache-2.0 | 925 |
package edu.wsu.weather.agweathernet.helpers;
import java.io.Serializable;
public class StationModel implements Serializable {
private static final long serialVersionUID = 1L;
private String unitId;
private String name;
private String county;
private String city;
private String state;
private String installationDate;
private String distance;
private boolean isFavourite;
// stations details data
private String airTemp;
private String relHumid;
private String windSpeed;
private String precip;
public StationModel() {
}
public StationModel(String unitId, String name, String county,
String installationDate) {
this.setUnitId(unitId);
this.setName(name);
this.setCounty(county);
this.setInstallationDate(installationDate);
}
public String getUnitId() {
return unitId;
}
public void setUnitId(String unitId) {
this.unitId = unitId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCounty() {
return county;
}
public void setCounty(String county) {
this.county = county;
}
public String getInstallationDate() {
return installationDate;
}
public void setInstallationDate(String installationDate) {
this.installationDate = installationDate;
}
@Override
public String toString() {
return this.name + " " + this.county;
}
public boolean isFavourite() {
return isFavourite;
}
public void setFavourite(boolean isFavourite) {
this.isFavourite = isFavourite;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getDistance() {
return distance;
}
public void setDistance(String distance) {
this.distance = distance;
}
public String getAirTemp() {
return airTemp;
}
public void setAirTemp(String airTemp) {
this.airTemp = airTemp;
}
public String getWindSpeed() {
return windSpeed;
}
public void setWindSpeed(String windSpeed) {
this.windSpeed = windSpeed;
}
public String getPrecip() {
return precip;
}
public void setPrecip(String precip) {
this.precip = precip;
}
public String getRelHumid() {
return relHumid;
}
public void setRelHumid(String relHumid) {
this.relHumid = relHumid;
}
}
| levanlevi/AgWeatherNet | src/edu/wsu/weather/agweathernet/helpers/StationModel.java | Java | apache-2.0 | 2,366 |
package ca.uhn.fhir.jpa.term;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2016 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class VersionIndependentConcept {
private String mySystem;
private String myCode;
public VersionIndependentConcept(String theSystem, String theCode) {
setSystem(theSystem);
setCode(theCode);
}
public String getSystem() {
return mySystem;
}
public void setSystem(String theSystem) {
mySystem = theSystem;
}
public String getCode() {
return myCode;
}
public void setCode(String theCode) {
myCode = theCode;
}
}
| Gaduo/hapi-fhir | hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/VersionIndependentConcept.java | Java | apache-2.0 | 1,149 |
/**
* Copyright 2015-2017 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin.benchmarks;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import zipkin.Annotation;
import zipkin.BinaryAnnotation;
import zipkin.Constants;
import zipkin.Endpoint;
import zipkin.TraceKeys;
import zipkin2.Span;
import zipkin.internal.V2SpanConverter;
import zipkin.internal.Util;
@Measurement(iterations = 5, time = 1)
@Warmup(iterations = 10, time = 1)
@Fork(3)
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@State(Scope.Thread)
@Threads(1)
public class Span2ConverterBenchmarks {
Endpoint frontend = Endpoint.create("frontend", 127 << 24 | 1);
Endpoint backend = Endpoint.builder()
.serviceName("backend")
.ipv4(192 << 24 | 168 << 16 | 99 << 8 | 101)
.port(9000)
.build();
zipkin.Span shared = zipkin.Span.builder()
.traceIdHigh(Util.lowerHexToUnsignedLong("7180c278b62e8f6a"))
.traceId(Util.lowerHexToUnsignedLong("216a2aea45d08fc9"))
.parentId(Util.lowerHexToUnsignedLong("6b221d5bc9e6496c"))
.id(Util.lowerHexToUnsignedLong("5b4185666d50f68b"))
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(Annotation.create(1472470996199000L, Constants.CLIENT_SEND, frontend))
.addAnnotation(Annotation.create(1472470996238000L, Constants.WIRE_SEND, frontend))
.addAnnotation(Annotation.create(1472470996250000L, Constants.SERVER_RECV, backend))
.addAnnotation(Annotation.create(1472470996350000L, Constants.SERVER_SEND, backend))
.addAnnotation(Annotation.create(1472470996403000L, Constants.WIRE_RECV, frontend))
.addAnnotation(Annotation.create(1472470996406000L, Constants.CLIENT_RECV, frontend))
.addBinaryAnnotation(BinaryAnnotation.create(TraceKeys.HTTP_PATH, "/api", frontend))
.addBinaryAnnotation(BinaryAnnotation.create(TraceKeys.HTTP_PATH, "/backend", backend))
.addBinaryAnnotation(BinaryAnnotation.create("clnt/finagle.version", "6.45.0", frontend))
.addBinaryAnnotation(BinaryAnnotation.create("srv/finagle.version", "6.44.0", backend))
.addBinaryAnnotation(BinaryAnnotation.address(Constants.CLIENT_ADDR, frontend))
.addBinaryAnnotation(BinaryAnnotation.address(Constants.SERVER_ADDR, backend))
.build();
zipkin.Span server = zipkin.Span.builder()
.traceIdHigh(Util.lowerHexToUnsignedLong("7180c278b62e8f6a"))
.traceId(Util.lowerHexToUnsignedLong("216a2aea45d08fc9"))
.parentId(Util.lowerHexToUnsignedLong("6b221d5bc9e6496c"))
.id(Util.lowerHexToUnsignedLong("5b4185666d50f68b"))
.name("get")
.addAnnotation(Annotation.create(1472470996250000L, Constants.SERVER_RECV, backend))
.addAnnotation(Annotation.create(1472470996350000L, Constants.SERVER_SEND, backend))
.addBinaryAnnotation(BinaryAnnotation.create(TraceKeys.HTTP_PATH, "/backend", backend))
.addBinaryAnnotation(BinaryAnnotation.create("srv/finagle.version", "6.44.0", backend))
.addBinaryAnnotation(BinaryAnnotation.address(Constants.CLIENT_ADDR, frontend))
.build();
Span server2 = Span.newBuilder()
.traceId("7180c278b62e8f6a216a2aea45d08fc9")
.parentId("6b221d5bc9e6496c")
.id("5b4185666d50f68b")
.name("get")
.kind(Span.Kind.SERVER)
.shared(true)
.localEndpoint(backend.toV2())
.remoteEndpoint(frontend.toV2())
.timestamp(1472470996250000L)
.duration(100000L)
.putTag(TraceKeys.HTTP_PATH, "/backend")
.putTag("srv/finagle.version", "6.44.0")
.build();
@Benchmark public List<Span> fromSpan_splitShared() {
return V2SpanConverter.fromSpan(shared);
}
@Benchmark public List<Span> fromSpan() {
return V2SpanConverter.fromSpan(server);
}
@Benchmark public zipkin.Span toSpan() {
return V2SpanConverter.toSpan(server2);
}
// Convenience main entry-point
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(".*" + Span2ConverterBenchmarks.class.getSimpleName() + ".*")
.build();
new Runner(opt).run();
}
}
| soundcloud/zipkin | benchmarks/src/main/java/zipkin/benchmarks/Span2ConverterBenchmarks.java | Java | apache-2.0 | 5,211 |
/*
* Copyright 2015 Adobe Systems Incorporated
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ${package}.core.servlets;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.servlets.HttpConstants;
import org.apache.sling.api.servlets.SlingAllMethodsServlet;
import org.apache.sling.api.servlets.SlingSafeMethodsServlet;
import org.apache.sling.api.resource.ValueMap;
import org.osgi.framework.Constants;
import org.osgi.service.component.annotations.Component;
import javax.servlet.Servlet;
import javax.servlet.ServletException;
import java.io.IOException;
/**
* Servlet that writes some sample content into the response. It is mounted for
* all resources of a specific Sling resource type. The
* {@link SlingSafeMethodsServlet} shall be used for HTTP methods that are
* idempotent. For write operations use the {@link SlingAllMethodsServlet}.
*/
@Component(service=Servlet.class,
property={
Constants.SERVICE_DESCRIPTION + "=Simple Demo Servlet",
"sling.servlet.methods=" + HttpConstants.METHOD_GET,
"sling.servlet.resourceTypes="+ "${appsFolderName}/components/structure/page",
"sling.servlet.extensions=" + "txt"
})
public class SimpleServlet extends SlingSafeMethodsServlet {
private static final long serialVersionUid = 1L;
@Override
protected void doGet(final SlingHttpServletRequest req,
final SlingHttpServletResponse resp) throws ServletException, IOException {
final Resource resource = req.getResource();
resp.setContentType("text/plain");
resp.getWriter().write("Title = " + resource.adaptTo(ValueMap.class).get("jcr:title"));
}
}
| MyAccInt/aem-project-archetype | src/main/archetype/core/src/main/java/core/servlets/SimpleServlet.java | Java | apache-2.0 | 2,367 |
package com.comp.ninti.sportsmanager;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import android.widget.ListView;
import com.comp.ninti.adapter.LeaderBoardAdapter;
import com.comp.ninti.database.DbHandler;
import com.comp.ninti.general.core.Event;
public class LeaderBoard extends AppCompatActivity {
private Event event;
private DbHandler dbHandler;
private LeaderBoardAdapter leaderBoardAdapter;
private ListView listView;
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
super.onBackPressed();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
event = getIntent().getExtras().getParcelable("com.comp.ninti.general.core.Event");
Intent intent = new Intent();
intent.putExtra("com.comp.ninti.general.core.Event", event);
setResult(RESULT_CANCELED, intent);
setContentView(R.layout.activity_leader_board);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
listView = (ListView) findViewById(R.id.lvLeaderBoard);
listView.setTextFilterEnabled(true);
displayItems();
}
private void displayItems() {
dbHandler = new DbHandler(LeaderBoard.this, "", null, 1);
new Handler().post(new Runnable() {
@Override
public void run() {
leaderBoardAdapter = new LeaderBoardAdapter(
LeaderBoard.this,
dbHandler.getLeaderBoard(event.getId()),
0);
listView.setAdapter(leaderBoardAdapter);
}
});
dbHandler.close();
}
@Override
protected void onResume() {
super.onResume();
displayItems();
}
}
| Nintinugga/SportsManager | app/src/main/java/com/comp/ninti/sportsmanager/LeaderBoard.java | Java | apache-2.0 | 2,193 |
/**
* www.bplow.com
*/
package com.bplow.netconn.systemmng.domain;
/**
* @desc 角色
* @author wangxiaolei
* @date 2016年5月8日 下午4:30:39
*/
public class RoleDomain {
private String roleId;
private String userId;
private String roleName;
private String roleDesc;
public String getRoleId() {
return roleId;
}
public void setRoleId(String roleId) {
this.roleId = roleId;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getRoleName() {
return roleName;
}
public void setRoleName(String roleName) {
this.roleName = roleName;
}
public String getRoleDesc() {
return roleDesc;
}
public void setRoleDesc(String roleDesc) {
this.roleDesc = roleDesc;
}
}
| ahwxl/ads | ads/src/main/java/com/bplow/netconn/systemmng/domain/RoleDomain.java | Java | apache-2.0 | 824 |
package it.breex.bus.impl.jms;
import it.breex.bus.event.AbstractResponseEvent;
import it.breex.bus.event.EventData;
import it.breex.bus.event.EventHandler;
import it.breex.bus.event.RequestEvent;
import it.breex.bus.impl.AbstractEventManager;
import java.util.UUID;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.ObjectMessage;
import javax.jms.Queue;
import javax.jms.Session;
public class JmsEventManager extends AbstractEventManager {
private final static String DEFAULT_REQUEST_QUEUE = "breexDefaulRequestQueue";
private final String nodeId = UUID.randomUUID().toString();
private final boolean transacted = false;
private final int acknowledgeMode = Session.AUTO_ACKNOWLEDGE;
private final Connection jmsConnection;
private final Session session;
private final Queue requestQueue;
private final MessageProducer requestMessageProducer;
private final Queue responseQueue;
private final MessageProducer responseMessageProducer;
public JmsEventManager(ConnectionFactory jmsConnectionFactory) {
try {
jmsConnection = jmsConnectionFactory.createConnection();
jmsConnection.start();
session = jmsConnection.createSession(transacted, acknowledgeMode);
requestQueue = session.createQueue(DEFAULT_REQUEST_QUEUE);
requestMessageProducer = session.createProducer(requestQueue);
responseQueue = session.createTemporaryQueue();
responseMessageProducer = session.createProducer(null);
session.createConsumer(responseQueue).setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
try {
EventData<?> eventData = (EventData<?>) ((ObjectMessage) message).getObject();
getLogger().debug("Event Response received. Event name: [{}], sender id: [{}]", eventData.getName(),
eventData.getSenderId());
//logger.debug("Event Response received. Event name: [{}], sender id: [{}]", eventData.eventId.eventName, eventData.eventId.nodeId);
AbstractResponseEvent responseEvent = new AbstractResponseEvent(eventData) {
};
processResponse(responseEvent, getResponseHandlers().remove(eventData.getId()));
} catch (JMSException e) {
new RuntimeException(e);
}
}
});
} catch (JMSException e) {
throw new RuntimeException(e);
}
}
@Override
public String getLocalNodeId() {
return nodeId;
}
@Override
protected <I, O> void prepareResponse(EventData<I> requestEventData, EventData<O> responseEventData) {
try {
Message responseMessage = session.createObjectMessage(responseEventData);
responseMessageProducer.send((Destination) requestEventData.getTransportData(), responseMessage);
} catch (JMSException e) {
new RuntimeException(e);
}
}
@Override
protected <I, O> void registerCallback(String eventName, EventHandler<RequestEvent<I, O>> eventHandler) {
getLogger().debug("Registering event. Event name: [{}]", eventName);
MessageConsumer eventConsumer;
try {
eventConsumer = session.createConsumer(requestQueue, "JMSCorrelationID='" + eventName + "'");
eventConsumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
EventData<I> requestEventData;
try {
requestEventData = (EventData<I>) ((ObjectMessage) message).getObject();
getLogger().debug("Received event. Event name: [{}] CorrelationID: [{}]", requestEventData.getName(),
message.getJMSCorrelationID());
processRequest(requestEventData);
} catch (JMSException e) {
new RuntimeException(e);
}
}
});
} catch (JMSException e) {
new RuntimeException(e);
}
}
@Override
protected <I> void prepareRequest(EventData<I> eventData) {
try {
eventData.setTransportData(responseQueue);
ObjectMessage message = session.createObjectMessage(eventData);
message.setJMSCorrelationID(eventData.getName());
message.setJMSReplyTo(responseQueue);
requestMessageProducer.send(message);
} catch (JMSException e) {
new RuntimeException(e);
}
}
}
| breex-it/breex-bus | breex-bus-jms/src/main/java/it/breex/bus/impl/jms/JmsEventManager.java | Java | apache-2.0 | 4,233 |
/**
* Copyright 2013 Oak Ridge National Laboratory
* Author: James Horey <horeyjl@ornl.gov>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package gov.ornl.paja.storage;
/**
* Java libs.
**/
import java.util.Iterator;
import java.nio.ByteBuffer;
/**
* A log message is the thing that gets written to the log.
*/
public class LogMessage {
private int logNum; // Current log ID.
private byte[] id; // ID of the log message.
private byte[] msg; // Actual log message
/**
* @param logNum Each log message has a unique log number
* @param id Application defined identification label
* @param msg Actual log message
*/
public LogMessage(int logNum, byte[] id, byte[] msg) {
this.logNum = logNum;
this.id = id;
this.msg = msg;
}
/**
* Get/set the log message ID.
*/
public void setID(byte[] id) {
this.id = id;
}
public byte[] getID() {
return id;
}
/**
* Get/set the log message.
*/
public void setMsg(byte[] msg) {
this.msg = msg;
}
public byte[] getMsg() {
return msg;
}
/**
* Get/set the log message num.
*/
public void setNum(int i) {
logNum = i;
}
public int getNum() {
return logNum;
}
} | jhorey/Paja | src/gov/ornl/paja/storage/LogMessage.java | Java | apache-2.0 | 1,764 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package michid.jsonjerk;
import michid.jsonjerk.JsonValue.JsonArray;
import michid.jsonjerk.JsonValue.JsonAtom;
import michid.jsonjerk.JsonValue.JsonObject;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Utility class for parsing JSON objects and arrays into {@link JsonObject}s
* and {@link JsonArray}s, respectively. In contrast to {@link FullJsonParser},
* this implementation resolves nested structures lazily. That, is it does a
* level order traverse of the JSON tree.
* <p/>
* The parser looks for 'hints' in the JSON text to speed up parsing: when it
* encounters an integer value with the key ":size" in an object, that value
* is used for the size of the entire object (including sub-objects).
*
* @see FullJsonParser
*/
public final class LevelOrderJsonParser {
private LevelOrderJsonParser() { }
/**
* Parse a JSON object from {@code tokenizer}
* @param tokenizer
* @return a {@code JsonObject}
* @throws ParseException
*/
public static JsonObject parseObject(JsonTokenizer tokenizer) {
ObjectHandler objectHandler = new ObjectHandler();
new JsonParser(objectHandler).parseObject(tokenizer);
return objectHandler.getObject();
}
/**
* Parse a JSON array from {@code tokenizer}
* @param tokenizer
* @return a {@code JsonArray}
* @throws ParseException
*/
public static JsonArray parseArray(JsonTokenizer tokenizer) {
ArrayHandler arrayHandler = new ArrayHandler();
new JsonParser(arrayHandler).parseArray(tokenizer);
return arrayHandler.getArray();
}
/**
* This implementation of a {@code JsonHandler} builds up a {@code JsonObject}
* from its constituents. Nested objects are not fully parsed though, but a
* reference to the parser is kept which is only invoked when that nested object
* is actually accessed.
*/
public static class ObjectHandler extends JsonHandler {
private final JsonObject object = new JsonObject(new LinkedHashMap<String, JsonValue>());
@Override
public void atom(Token key, Token value) {
object.put(key.text(), new JsonAtom(value));
}
@Override
public void object(JsonParser parser, Token key, JsonTokenizer tokenizer) {
object.put(key.text(), new DeferredObjectValue(tokenizer.copy()));
tokenizer.setPos(getNextPairPos(tokenizer.copy()));
}
@Override
public void array(JsonParser parser, Token key, JsonTokenizer tokenizer) {
object.put(key.text(), parseArray(tokenizer));
}
public JsonObject getObject() {
return object;
}
}
/**
* This implementation of a {@code JsonHandler} builds up a {@code JsonArray}
* from its constituents. Nested objects are not fully parsed though, but a
* reference to the parser is kept which is only invoked when that nested object
* is actually accessed.
*/
public static class ArrayHandler extends JsonHandler {
private final JsonArray array = new JsonArray(new ArrayList<JsonValue>());
@Override
public void atom(Token key, Token value) {
array.add(new JsonAtom(value));
}
@Override
public void object(JsonParser parser, Token key, JsonTokenizer tokenizer) {
array.add(new DeferredObjectValue(tokenizer.copy()));
tokenizer.setPos(getNextPairPos(tokenizer.copy()));
}
@Override
public void array(JsonParser parser, Token key, JsonTokenizer tokenizer) {
array.add(parseArray(tokenizer));
}
public JsonArray getArray() {
return array;
}
}
//------------------------------------------< private >---
private static class BreakException extends RuntimeException{
private static final BreakException BREAK = new BreakException();
}
private static int getNextPairPos(JsonTokenizer tokenizer) {
SkipObjectHandler skipObjectHandler = new SkipObjectHandler(tokenizer.pos());
try {
new JsonParser(skipObjectHandler).parseObject(tokenizer);
}
catch (BreakException e) {
return skipObjectHandler.newPos;
}
return tokenizer.pos();
}
private static class DeferredObjectValue extends JsonObject {
private final JsonTokenizer tokenizer;
public DeferredObjectValue(JsonTokenizer tokenizer) {
super(null);
this.tokenizer = tokenizer;
}
@Override
public void put(String key, JsonValue value) {
throw new IllegalStateException("Cannot add value");
}
@Override
public JsonValue get(String key) {
return value().get(key);
}
@Override
public Map<String, JsonValue> value() {
return parseObject(tokenizer.copy()).value();
}
@Override
public String toString() {
return "<deferred>";
}
}
private static class SkipObjectHandler extends JsonHandler {
private final int startPos;
private int newPos;
public SkipObjectHandler(int startPos) {
this.startPos = startPos;
}
@Override
public void atom(Token key, Token value) {
if (key != null && ":size".equals(key.text()) && Token.Type.NUMBER == value.type()) {
newPos = startPos + Integer.parseInt(value.text());
throw BreakException.BREAK;
}
}
}
}
| mduerig/json-jerk | src/main/java/michid/jsonjerk/LevelOrderJsonParser.java | Java | apache-2.0 | 6,473 |
/**
* For JavaDocs.
*@author dgagarsky
*@since 01.12.2016
*/
package ru.job4j; | degauhta/dgagarsky | chapter_001/src/test/java/ru/job4j/package-info.java | Java | apache-2.0 | 81 |
/*
* Copyright 2005-2007 Maarten Billemont
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lyndir.lhunath.opal.gui;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.*;
/**
* <i>{@link ListenerAction} - [in short] (TODO).</i><br> <br> [description / usage].<br> <br>
*
* @author lhunath
*/
public class ListenerAction extends AbstractAction {
private final ActionListener listener;
/**
* Create a new {@link ListenerAction} instance.
*
* @param listener The listener that will be notified of this action.
*/
public ListenerAction(final ActionListener listener) {
this.listener = listener;
}
/**
* Create a new {@link ListenerAction} instance.
*
* @param name The name of the action.
* @param listener The listener that will be notified of this action.
*/
public ListenerAction(final String name, final ActionListener listener) {
super( name );
this.listener = listener;
}
/**
* Create a new {@link ListenerAction} instance.
*
* @param name The name of the action.
* @param command The string that will identify the action that must be taken.
* @param icon The icon of the action.
* @param listener The listener that will be notified of this action.
*/
public ListenerAction(final String name, final String command, final Icon icon, final ActionListener listener) {
super( name, icon );
this.listener = listener;
setActionCommand( command );
}
/**
* Specify an action command string for this action.
*
* @param command The string that will identify the action that must be taken.
*/
public void setActionCommand(final String command) {
putValue( ACTION_COMMAND_KEY, command );
}
/**
* Specify an action command string for this action.
*
* @return The string that will identify the action that must be taken.
*/
public String getActionCommand() {
return getValue( ACTION_COMMAND_KEY ) == null? null: getValue( ACTION_COMMAND_KEY ).toString();
}
/**
* {@inheritDoc}
*/
@Override
public void actionPerformed(final ActionEvent e) {
if (listener != null)
listener.actionPerformed( e );
}
}
| Lyndir/Opal | discontinued/opal-geo/src/main/java/com/lyndir/lhunath/opal/gui/ListenerAction.java | Java | apache-2.0 | 2,900 |
/*
* Copyright 2008-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jpa.repository.config;
import static org.junit.Assert.*;
import org.springframework.test.context.ContextConfiguration;
/**
* Integration test to test {@link org.springframework.core.type.filter.TypeFilter} integration into namespace.
*
* @author Oliver Gierke
*/
@ContextConfiguration(locations = "classpath:config/namespace-autoconfig-typefilter-context.xml")
public class TypeFilterConfigTests extends AbstractRepositoryConfigTests {
/*
* (non-Javadoc)
*
* @see
* org.springframework.data.jpa.repository.config.AbstractRepositoryConfigTests
* #testContextCreation()
*/
@Override
public void testContextCreation() {
assertNotNull(userRepository);
assertNotNull(roleRepository);
assertNull(auditableUserRepository);
}
}
| sdw2330976/Research-spring-data-jpa | spring-data-jpa-1.7.1.RELEASE/src/test/java/org/springframework/data/jpa/repository/config/TypeFilterConfigTests.java | Java | apache-2.0 | 1,407 |
package org.drools.rule;
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This exception is thrown when an invalid package (ie one that has errors)
* it attempted to be added to a RuleBase.
* The package and builder should be interrogated to show the specific errors.
*
* @author Michael Neale
*/
public class InvalidRulePackage extends RuntimeException {
private static final long serialVersionUID = 400L;
public InvalidRulePackage(final String summary) {
super( summary );
}
} | bobmcwhirter/drools | drools-core/src/main/java/org/drools/rule/InvalidRulePackage.java | Java | apache-2.0 | 1,064 |
package com.twu.biblioteca.service.impl;
import com.twu.biblioteca.mapper.BookListMapper;
import com.twu.biblioteca.mapper.MyBatisUtil;
import com.twu.biblioteca.model.Book;
import com.twu.biblioteca.service.BookListService;
import org.apache.ibatis.session.SqlSession;
import java.util.ArrayList;
public class BookListServiceImpl implements BookListService {
private SqlSession sqlSession;
private BookListMapper bookListMapper;
public BookListServiceImpl() {
this.sqlSession = MyBatisUtil.getSqlSessionFactory().openSession();
this.bookListMapper = sqlSession.getMapper(BookListMapper.class);
}
public BookListServiceImpl(BookListMapper bookListMapper) {
this.bookListMapper = bookListMapper;
}
@Override
public ArrayList<Book> getBookList() {
return bookListMapper.getBookList();
}
}
| niuwanlu/twu-biblioteca-niuwanlu-tdd | src/main/java/com/twu/biblioteca/service/impl/BookListServiceImpl.java | Java | apache-2.0 | 863 |
/*
* Copyright 2015 Thomas Hoffmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.j4velin.wifiAutoOff;
import android.app.IntentService;
import android.content.Intent;
import com.google.android.gms.location.FusedLocationProviderApi;
import com.google.android.gms.location.Geofence;
import com.google.android.gms.location.GeofencingEvent;
import com.google.android.gms.maps.model.LatLng;
public class GeoFenceService extends IntentService {
public GeoFenceService() {
super("WiFiAutomaticGeoFenceService");
}
@Override
protected void onHandleIntent(final Intent intent) {
if (intent == null) return;
if (intent.hasExtra(FusedLocationProviderApi.KEY_LOCATION_CHANGED)) {
android.location.Location loc = (android.location.Location) intent.getExtras()
.get(FusedLocationProviderApi.KEY_LOCATION_CHANGED);
if (BuildConfig.DEBUG) Logger.log("Location update received " + loc);
Database db = Database.getInstance(this);
if (db.inRangeOfLocation(loc)) {
sendBroadcast(new Intent(this, Receiver.class)
.setAction(Receiver.LOCATION_ENTERED_ACTION));
}
db.close();
} else {
GeofencingEvent geofencingEvent = GeofencingEvent.fromIntent(intent);
// First check for errors
if (geofencingEvent.hasError()) {
// Get the error code with a static method
// Log the error
if (BuildConfig.DEBUG) Logger.log("Location Services error: " +
Integer.toString(geofencingEvent.getErrorCode()));
} else {
// Test that a valid transition was reported
if (geofencingEvent.getGeofenceTransition() == Geofence.GEOFENCE_TRANSITION_ENTER) {
Database db = Database.getInstance(this);
for (Geofence gf : geofencingEvent.getTriggeringGeofences()) {
if (BuildConfig.DEBUG) Logger.log("geofence entered: " + gf.getRequestId());
String[] data = gf.getRequestId().split("@");
LatLng ll = new LatLng(Double.parseDouble(data[0]),
Double.parseDouble(data[1]));
String name = db.getNameForLocation(ll);
if (name != null) {
sendBroadcast(new Intent(this, Receiver.class)
.setAction(Receiver.LOCATION_ENTERED_ACTION)
.putExtra(Receiver.EXTRA_LOCATION_NAME, name));
break;
}
}
db.close();
}
}
}
}
}
| j4velin/WiFi-Automatic | src/play/java/de/j4velin/wifiAutoOff/GeoFenceService.java | Java | apache-2.0 | 3,340 |
/*
* Copyright 2015-2020 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.analysis.individual.qc;
import org.junit.Test;
import org.opencb.biodata.models.clinical.qc.MendelianErrorReport;
import org.opencb.biodata.models.clinical.qc.RelatednessReport;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.biodata.models.variant.avro.IssueEntry;
import org.opencb.biodata.models.variant.avro.IssueType;
import org.opencb.opencga.analysis.family.qc.IBDComputation;
import org.opencb.opencga.core.common.JacksonUtils;
import org.opencb.opencga.core.exceptions.ToolException;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Paths;
import java.util.*;
import static org.opencb.opencga.storage.core.variant.VariantStorageBaseTest.getResourceUri;
public class IndividualQcUtilsTest {
@Test
public void buildRelatednessReport() throws ToolException, IOException {
URI resourceUri = getResourceUri("ibd.genome");
File file = Paths.get(resourceUri.getPath()).toFile();
List<RelatednessReport.RelatednessScore> relatednessReport = IBDComputation.parseRelatednessScores(file);
System.out.println(JacksonUtils.getDefaultNonNullObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(relatednessReport));
}
@Test
public void parseMendelianError() throws IOException {
URI resourceUri = getResourceUri("mendelian.error.variants.json");
File file = Paths.get(resourceUri.getPath()).toFile();
List<Variant> variants = Arrays.asList(JacksonUtils.getDefaultNonNullObjectMapper().readValue(file, Variant[].class));
System.out.println(variants.size());
MendelianErrorReport mendelianErrorReport = buildMendelianErrorReport(variants.iterator(), variants.size());
System.out.println(JacksonUtils.getDefaultNonNullObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(mendelianErrorReport));
// List<Variant> variants = JacksonUtils.getDefaultNonNullObjectMapper().readerFor(Variant.class).readValue(path.toFile());
// System.out.println(variants.size());
}
@Test
public void parseKaryotypicSexThresholds() throws IOException {
URI resourceUri = getResourceUri("karyotypic_sex_thresholds.json");
File file = Paths.get(resourceUri.getPath()).toFile();
Map<String, Double> thresholds = JacksonUtils.getDefaultNonNullObjectMapper().readerFor(Map.class).readValue(file);
System.out.println(JacksonUtils.getDefaultNonNullObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(thresholds));
}
private MendelianErrorReport buildMendelianErrorReport(Iterator iterator, long numVariants) {
// Create auxiliary map
// sample chrom error count
Map<String, Map<String, Map<String, Integer>>> counter = new HashMap<>();
int numErrors = 0;
while (iterator.hasNext()) {
Variant variant = (Variant) iterator.next();
// Get sampleId and error code from variant issues
boolean foundError = false;
for (IssueEntry issue : variant.getStudies().get(0).getIssues()) {
if (IssueType.MENDELIAN_ERROR == issue.getType() || IssueType.DE_NOVO == issue.getType()) {
foundError = true;
String sampleId = issue.getSample().getSampleId();
String errorCode = issue.getSample().getData().get(0);
if (!counter.containsKey(sampleId)) {
counter.put(sampleId, new HashMap<>());
}
if (!counter.get(sampleId).containsKey(variant.getChromosome())) {
counter.get(sampleId).put(variant.getChromosome(), new HashMap<>());
}
int val = 0;
if (counter.get(sampleId).get(variant.getChromosome()).containsKey(errorCode)) {
val = counter.get(sampleId).get(variant.getChromosome()).get(errorCode);
}
counter.get(sampleId).get(variant.getChromosome()).put(errorCode, val + 1);
}
}
if (foundError) {
numErrors++;
}
}
// Create mendelian error report from auxiliary map
MendelianErrorReport meReport = new MendelianErrorReport();
meReport.setNumErrors(numErrors);
for (String sampleId : counter.keySet()) {
MendelianErrorReport.SampleAggregation sampleAgg = new MendelianErrorReport.SampleAggregation();
int numSampleErrors = 0;
for (String chrom : counter.get(sampleId).keySet()) {
int numChromErrors = counter.get(sampleId).get(chrom).values().stream().mapToInt(Integer::intValue).sum();
MendelianErrorReport.SampleAggregation.ChromosomeAggregation chromAgg = new MendelianErrorReport.SampleAggregation.ChromosomeAggregation();
chromAgg.setChromosome(chrom);
chromAgg.setNumErrors(numChromErrors);
chromAgg.setErrorCodeAggregation(counter.get(sampleId).get(chrom));
// Update sample aggregation
sampleAgg.getChromAggregation().add(chromAgg);
numSampleErrors += numChromErrors;
}
sampleAgg.setSample(sampleId);
sampleAgg.setNumErrors(numSampleErrors);
sampleAgg.setRatio(1.0d * numSampleErrors / numVariants);
meReport.getSampleAggregation().add(sampleAgg);
}
return meReport;
}
} | opencb/opencga | opencga-analysis/src/test/java/org/opencb/opencga/analysis/individual/qc/IndividualQcUtilsTest.java | Java | apache-2.0 | 6,198 |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.siyeh.ig.redundancy;
import com.google.common.collect.ImmutableSet;
import com.intellij.codeInspection.ex.InspectionElementsMergerBase;
import com.intellij.util.ArrayUtilRt;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import java.util.Map;
import java.util.Set;
public class RedundantStringOperationMerger extends InspectionElementsMergerBase {
private static final String OLD_MERGER_NAME = "RedundantStringOperation";
private static final Set<String> OLD_SOURCE_NAMES = ImmutableSet.of("StringToString", "SubstringZero", "ConstantStringIntern");
@NotNull
@Override
public String getMergedToolName() {
return "StringOperationCanBeSimplified";
}
@Override
protected Element getSourceElement(@NotNull Map<String, Element> inspectionElements, @NotNull String sourceToolName) {
if (inspectionElements.containsKey(sourceToolName)) {
return inspectionElements.get(sourceToolName);
}
if (sourceToolName.equals(OLD_MERGER_NAME)) {//need to merge initial tools to get merged redundant string operations
return new InspectionElementsMergerBase(){
@NotNull
@Override
public String getMergedToolName() {
return OLD_MERGER_NAME;
}
@Override
public String @NotNull [] getSourceToolNames() {
return ArrayUtilRt.toStringArray(OLD_SOURCE_NAMES);
}
@Override
public Element merge(@NotNull Map<String, Element> inspectionElements) {
return super.merge(inspectionElements);
}
@Override
protected boolean writeMergedContent(@NotNull Element toolElement) {
return true;
}
}.merge(inspectionElements);
}
else if (OLD_SOURCE_NAMES.contains(sourceToolName)) {
Element merged = inspectionElements.get(OLD_MERGER_NAME);
if (merged != null) { // RedundantStringOperation already replaced the content
Element clone = merged.clone();
clone.setAttribute("class", sourceToolName);
return clone;
}
}
return null;
}
@Override
public String @NotNull [] getSourceToolNames() {
return new String[] {
"StringToString",
"SubstringZero",
"ConstantStringIntern",
"StringConstructor",
OLD_MERGER_NAME
};
}
@Override
public String @NotNull [] getSuppressIds() {
return new String[] {
"StringToString", "RedundantStringToString",
"SubstringZero", "ConstantStringIntern",
"RedundantStringConstructorCall", "StringConstructor", OLD_MERGER_NAME
};
}
}
| leafclick/intellij-community | plugins/InspectionGadgets/src/com/siyeh/ig/redundancy/RedundantStringOperationMerger.java | Java | apache-2.0 | 2,723 |
package com.app.annotation.aspect;
/**
* Created by baixiaokang on 17/1/31.
*/
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Permission {
String[] value();
} | AndroidAdu/material-News | lib/src/main/java/com/app/annotation/aspect/Permission.java | Java | apache-2.0 | 363 |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.module.impl;
import com.intellij.configurationStore.RenameableStateStorageManager;
import com.intellij.ide.highlighter.ModuleFileType;
import com.intellij.ide.plugins.ContainerDescriptor;
import com.intellij.ide.plugins.IdeaPluginDescriptorImpl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.*;
import com.intellij.openapi.components.impl.stores.IComponentStore;
import com.intellij.openapi.components.impl.stores.ModuleStore;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleComponent;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.impl.scopes.ModuleScopeProviderImpl;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ex.ProjectEx;
import com.intellij.openapi.roots.ExternalProjectSystemRegistry;
import com.intellij.openapi.roots.ProjectModelElement;
import com.intellij.openapi.roots.ProjectModelExternalSource;
import com.intellij.openapi.util.SimpleModificationTracker;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.pointers.VirtualFilePointer;
import com.intellij.openapi.vfs.pointers.VirtualFilePointerListener;
import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.serviceContainer.ComponentManagerImpl;
import com.intellij.util.xmlb.annotations.MapAnnotation;
import com.intellij.util.xmlb.annotations.Property;
import kotlin.Unit;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
public class ModuleImpl extends ComponentManagerImpl implements ModuleEx {
private static final Logger LOG = Logger.getInstance(ModuleImpl.class);
@NotNull private final Project myProject;
@Nullable protected VirtualFilePointer myImlFilePointer;
private volatile boolean isModuleAdded;
private String myName;
private final ModuleScopeProvider myModuleScopeProvider;
@ApiStatus.Internal
public ModuleImpl(@NotNull String name, @NotNull Project project, @NotNull String filePath) {
this(name, project);
myImlFilePointer = VirtualFilePointerManager.getInstance().create(
VfsUtilCore.pathToUrl(filePath), this,
new VirtualFilePointerListener() {
@Override
public void validityChanged(@NotNull VirtualFilePointer @NotNull [] pointers) {
if (myImlFilePointer == null) return;
VirtualFile virtualFile = myImlFilePointer.getFile();
if (virtualFile != null) {
((ModuleStore)getStore()).setPath(virtualFile.toNioPath(), virtualFile, false);
ModuleManager.getInstance(myProject).incModificationCount();
}
}
});
}
@ApiStatus.Internal
public ModuleImpl(@NotNull String name, @NotNull Project project, @Nullable VirtualFilePointer virtualFilePointer) {
this(name, project);
myImlFilePointer = virtualFilePointer;
}
@ApiStatus.Internal
public ModuleImpl(@NotNull String name, @NotNull Project project) {
super((ComponentManagerImpl)project);
registerServiceInstance(Module.class, this, ComponentManagerImpl.fakeCorePluginDescriptor);
myProject = project;
myModuleScopeProvider = new ModuleScopeProviderImpl(this);
myName = name;
}
@Override
public void init(@Nullable Runnable beforeComponentCreation) {
// do not measure (activityNamePrefix method not overridden by this class)
// because there are a lot of modules and no need to measure each one
registerComponents();
if (!isPersistent()) {
registerService(IComponentStore.class,
NonPersistentModuleStore.class,
ComponentManagerImpl.fakeCorePluginDescriptor,
true, ServiceDescriptor.PreloadMode.FALSE);
}
if (beforeComponentCreation != null) {
beforeComponentCreation.run();
}
createComponents(null);
}
private boolean isPersistent() {
return myImlFilePointer != null;
}
@Override
protected void setProgressDuringInit(@NotNull ProgressIndicator indicator) {
// Component loading progress is not reported for module, because at this stage minimal reporting unit it is the module itself.
// Stage "Loading modules" progress reported for each loaded module and module component count doesn't matter.
}
@Override
public final boolean isDisposed() {
// in case of light project in tests when it's temporarily disposed, the module should be treated as disposed too.
//noinspection TestOnlyProblems
return super.isDisposed() || ((ProjectEx)myProject).isLight() && myProject.isDisposed();
}
@Override
protected boolean isComponentSuitable(@NotNull ComponentConfig componentConfig) {
if (!super.isComponentSuitable(componentConfig)) {
return false;
}
Map<String, String> options = componentConfig.options;
if (options == null || options.isEmpty()) {
return true;
}
for (String optionName : options.keySet()) {
if ("workspace".equals(optionName) || "overrides".equals(optionName)) {
continue;
}
// we cannot filter using module options because at this moment module file data could be not loaded
String message = "Don't specify " + optionName + " in the component registration, transform component to service and implement your logic in your getInstance() method";
if (ApplicationManager.getApplication().isUnitTestMode()) {
LOG.error(message);
}
else {
LOG.warn(message);
}
}
return true;
}
@Override
@Nullable
public VirtualFile getModuleFile() {
if (myImlFilePointer == null) {
return null;
}
return myImlFilePointer.getFile();
}
@Override
public void rename(@NotNull String newName, boolean notifyStorage) {
myName = newName;
if (notifyStorage) {
((RenameableStateStorageManager)getStore().getStorageManager()).rename(newName + ModuleFileType.DOT_DEFAULT_EXTENSION);
}
}
protected @NotNull IComponentStore getStore() {
return Objects.requireNonNull(getService(IComponentStore.class));
}
@Override
public boolean canStoreSettings() {
return !(getStore() instanceof NonPersistentModuleStore);
}
@Override
@NotNull
public Path getModuleNioFile() {
if (!isPersistent()) {
return Paths.get("");
}
return getStore().getStorageManager().expandMacro(StoragePathMacros.MODULE_FILE);
}
@Override
public synchronized void dispose() {
isModuleAdded = false;
super.dispose();
}
@NotNull
@Override
protected ContainerDescriptor getContainerDescriptor(@NotNull IdeaPluginDescriptorImpl pluginDescriptor) {
return pluginDescriptor.moduleContainerDescriptor;
}
@Override
public void projectOpened() {
//noinspection deprecation
processInitializedComponents(ModuleComponent.class, (component, __) -> {
try {
//noinspection deprecation
component.projectOpened();
}
catch (Exception e) {
LOG.error(e);
}
return Unit.INSTANCE;
});
}
@Override
public void projectClosed() {
//noinspection deprecation
List<ModuleComponent> components = new ArrayList<>();
//noinspection deprecation
processInitializedComponents(ModuleComponent.class, (component, __) -> {
components.add(component);
return Unit.INSTANCE;
});
for (int i = components.size() - 1; i >= 0; i--) {
try {
//noinspection deprecation
components.get(i).projectClosed();
}
catch (Throwable e) {
LOG.error(e);
}
}
}
@Override
@NotNull
public Project getProject() {
return myProject;
}
@Override
@NotNull
public String getName() {
return myName;
}
@Override
public boolean isLoaded() {
return isModuleAdded;
}
@Override
public void moduleAdded() {
isModuleAdded = true;
//noinspection deprecation
processInitializedComponents(ModuleComponent.class, (component, __) -> {
//noinspection deprecation
component.moduleAdded();
return Unit.INSTANCE;
});
}
@Override
public void setOption(@NotNull String key, @Nullable String value) {
DeprecatedModuleOptionManager manager = getOptionManager();
if (value == null) {
if (manager.state.options.remove(key) != null) {
manager.incModificationCount();
}
}
else if (!value.equals(manager.state.options.put(key, value))) {
manager.incModificationCount();
}
}
@NotNull
private DeprecatedModuleOptionManager getOptionManager() {
//noinspection ConstantConditions
return ((Module)this).getService(DeprecatedModuleOptionManager.class);
}
@Override
public String getOptionValue(@NotNull String key) {
return getOptionManager().state.options.get(key);
}
@NotNull
@Override
public GlobalSearchScope getModuleScope() {
return myModuleScopeProvider.getModuleScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleScope(boolean includeTests) {
return myModuleScopeProvider.getModuleScope(includeTests);
}
@NotNull
@Override
public GlobalSearchScope getModuleWithLibrariesScope() {
return myModuleScopeProvider.getModuleWithLibrariesScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleWithDependenciesScope() {
return myModuleScopeProvider.getModuleWithDependenciesScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleContentScope() {
return myModuleScopeProvider.getModuleContentScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleContentWithDependenciesScope() {
return myModuleScopeProvider.getModuleContentWithDependenciesScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleWithDependenciesAndLibrariesScope(boolean includeTests) {
return myModuleScopeProvider.getModuleWithDependenciesAndLibrariesScope(includeTests);
}
@NotNull
@Override
public GlobalSearchScope getModuleWithDependentsScope() {
return myModuleScopeProvider.getModuleWithDependentsScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleTestsWithDependentsScope() {
return myModuleScopeProvider.getModuleTestsWithDependentsScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleRuntimeScope(boolean includeTests) {
return myModuleScopeProvider.getModuleRuntimeScope(includeTests);
}
@Override
public void clearScopesCache() {
myModuleScopeProvider.clearCache();
}
@Override
public String toString() {
if (myName == null) return "Module (not initialized)";
return "Module: '" + getName() + "'" + (isDisposed() ? " (disposed)" : "");
}
@Override
public long getOptionsModificationCount() {
return getOptionManager().getModificationCount();
}
@ApiStatus.Internal
@State(name = "DeprecatedModuleOptionManager", useLoadedStateAsExisting = false /* doesn't make sense to check it */)
public static class DeprecatedModuleOptionManager extends SimpleModificationTracker implements PersistentStateComponent<DeprecatedModuleOptionManager.State>,
ProjectModelElement {
private final Module module;
DeprecatedModuleOptionManager(@NotNull Module module) {
this.module = module;
}
@Override
@Nullable
public ProjectModelExternalSource getExternalSource() {
if (state.options.size() > 1 || state.options.size() == 1 && !state.options.containsKey(Module.ELEMENT_TYPE) /* unrealistic case, but just to be sure */) {
return null;
}
return ExternalProjectSystemRegistry.getInstance().getExternalSource(module);
}
static final class State {
@Property(surroundWithTag = false)
@MapAnnotation(surroundKeyWithTag = false, surroundValueWithTag = false, surroundWithTag = false, entryTagName = "option")
public final Map<String, String> options = new HashMap<>();
}
private State state = new State();
@Nullable
@Override
public State getState() {
return state;
}
@Override
public void loadState(@NotNull State state) {
this.state = state;
}
}
}
| jwren/intellij-community | platform/lang-impl/src/com/intellij/openapi/module/impl/ModuleImpl.java | Java | apache-2.0 | 12,736 |
/*
* Copyright (c) 2004, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.mirror.declaration;
import java.lang.annotation.Annotation;
import java.util.Collection;
import com.sun.mirror.type.*;
import com.sun.mirror.util.*;
/**
* Represents the declaration of a program element such as a package,
* class, or method. Each declaration represents a static, language-level
* construct (and not, for example, a runtime construct of the virtual
* machine), and typically corresponds one-to-one with a particular
* fragment of source code.
*
* <p> Declarations should be compared using the {@link #equals(Object)}
* method. There is no guarantee that any particular declaration will
* always be represented by the same object.
*
* @deprecated All components of this API have been superseded by the
* standardized annotation processing API. The replacement for the
* functionality of this interface is {@link
* javax.lang.model.element.Element}.
*
* @author Joseph D. Darcy
* @author Scott Seligman
*
* @see Declarations
* @see TypeMirror
* @since 1.5
*/
@Deprecated
@SuppressWarnings("deprecation")
public interface Declaration {
/**
* Tests whether an object represents the same declaration as this.
*
* @param obj the object to be compared with this declaration
* @return <tt>true</tt> if the specified object represents the same
* declaration as this
*/
boolean equals(Object obj);
/**
* Returns the text of the documentation ("javadoc") comment of
* this declaration.
*
* @return the documentation comment of this declaration, or <tt>null</tt>
* if there is none
*/
String getDocComment();
/**
* Returns the annotations that are directly present on this declaration.
*
* @return the annotations directly present on this declaration;
* an empty collection if there are none
*/
Collection<AnnotationMirror> getAnnotationMirrors();
/**
* Returns the annotation of this declaration having the specified
* type. The annotation may be either inherited or directly
* present on this declaration.
*
* <p> The annotation returned by this method could contain an element
* whose value is of type <tt>Class</tt>.
* This value cannot be returned directly: information necessary to
* locate and load a class (such as the class loader to use) is
* not available, and the class might not be loadable at all.
* Attempting to read a <tt>Class</tt> object by invoking the relevant
* method on the returned annotation
* will result in a {@link MirroredTypeException},
* from which the corresponding {@link TypeMirror} may be extracted.
* Similarly, attempting to read a <tt>Class[]</tt>-valued element
* will result in a {@link MirroredTypesException}.
*
* <blockquote>
* <i>Note:</i> This method is unlike
* others in this and related interfaces. It operates on run-time
* reflective information -- representations of annotation types
* currently loaded into the VM -- rather than on the mirrored
* representations defined by and used throughout these
* interfaces. It is intended for callers that are written to
* operate on a known, fixed set of annotation types.
* </blockquote>
*
* @param <A> the annotation type
* @param annotationType the <tt>Class</tt> object corresponding to
* the annotation type
* @return the annotation of this declaration having the specified type
*
* @see #getAnnotationMirrors()
*/
<A extends Annotation> A getAnnotation(Class<A> annotationType);
/**
* Returns the modifiers of this declaration, excluding annotations.
* Implicit modifiers, such as the <tt>public</tt> and <tt>static</tt>
* modifiers of interface members, are included.
*
* @return the modifiers of this declaration in undefined order;
* an empty collection if there are none
*/
Collection<Modifier> getModifiers();
/**
* Returns the simple (unqualified) name of this declaration.
* The name of a generic type does not include any reference
* to its formal type parameters.
* For example, the simple name of the interface declaration
* {@code java.util.Set<E>} is <tt>"Set"</tt>.
* If this declaration represents the empty package, an empty
* string is returned.
* If it represents a constructor, the simple name of its
* declaring class is returned.
*
* @return the simple name of this declaration
*/
String getSimpleName();
/**
* Returns the source position of the beginning of this declaration.
* Returns <tt>null</tt> if the position is unknown or not applicable.
*
* <p> This source position is intended for use in providing
* diagnostics, and indicates only approximately where a declaration
* begins.
*
* @return the source position of the beginning of this declaration,
* or null if the position is unknown or not applicable
*/
SourcePosition getPosition();
/**
* Applies a visitor to this declaration.
*
* @param v the visitor operating on this declaration
*/
void accept(DeclarationVisitor v);
}
| haikuowuya/android_system_code | src/com/sun/mirror/declaration/Declaration.java | Java | apache-2.0 | 5,514 |
package com.xsolla.android.sdk.data.model;
import java.util.List;
public class XHoldSubscriptionStatus {
private String status;
private List<XMessage> errors;
private XApi api;
public String getStatus() {
return status;
}
public List<XMessage> getErrors() {
return errors;
}
public String getErrorMsg() {
StringBuilder sb = new StringBuilder();
for (XMessage message : errors) {
sb.append(message.getMessage()).append("\n");
}
sb.deleteCharAt(sb.length() - 1);
return sb.toString();
}
@Override
public String toString() {
return "XHoldSubscription{" +
"status='" + status + '\'' +
", errors=" + errors +
", api=" + api +
'}';
}
}
| xsolla/xsolla-sdk-android | xsollasdk/src/main/java/com/xsolla/android/sdk/data/model/XHoldSubscriptionStatus.java | Java | apache-2.0 | 827 |
package br.eti.arthurgregorio.fulljeearch.domain.security;
/**
*
* @author Arthur
*/
public interface ApplicationRoles {
public final String USER = "Usuario";
public final String ADMINISTRATOR = "Administrador";
}
| arthurgregorio/exemplos | FullJeeArch/src/main/java/br/eti/arthurgregorio/fulljeearch/domain/security/ApplicationRoles.java | Java | apache-2.0 | 228 |
package com.almende.dialog.example.agent;
import java.io.Serializable;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import com.almende.dialog.Settings;
import com.almende.dialog.model.Answer;
import com.almende.dialog.model.Question;
import com.almende.util.ParallelInit;
import com.almende.util.twigmongo.QueryResultIterator;
import com.almende.util.twigmongo.TwigCompatibleMongoDatastore;
import com.almende.util.twigmongo.TwigCompatibleMongoDatastore.RootFindCommand;
import com.almende.util.twigmongo.annotations.Id;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
@Path("yesno")
public class YesNoAgent {
static final ObjectMapper om =ParallelInit.getObjectMapper();
private static final String URL = "http://"+Settings.HOST+"/yesno/";
private static final String SOUNDURL = "http://ask4604.ask46.customers.luna.net/rest/";
private static final Logger log = Logger
.getLogger("DialogHandler");
public Question getQuestion(int question_no, String preferred_medium, String phonenumber) {
String questionURL = URL+"questions/"+question_no;
String answerYesURL = URL+"answers/0";
String answerNoURL = URL+"answers/1";
if (preferred_medium != null && preferred_medium.startsWith("audio")){
questionURL = this.getAudioFile(question_no);
answerYesURL= SOUNDURL+"14.wav";
answerNoURL= SOUNDURL+"14.wav";
}
Question question=new Question();
question.setRequester(URL+"id/");
question.setType("closed");
question.setQuestion_text(questionURL);
question.setAnswers(new ArrayList<Answer>(Arrays.asList(
new Answer(answerYesURL, URL+"questions/"+question_no+"?preferred_medium="+preferred_medium+"&pn="+phonenumber+"&answer=yes"),
new Answer(answerNoURL, URL+"questions/"+question_no+"?preferred_medium="+preferred_medium+"&pn="+phonenumber+"&answer=no"))));
return question;
}
@GET
@Path("/id/")
public Response getId(@QueryParam("preferred_language") String preferred_language){
ObjectNode node= om.createObjectNode();
node.put("url", URL);
node.put("nickname", "YesNo");
return Response.ok(node.toString()).build();
}
@GET
@Produces("application/json")
public Response firstQuestion(@QueryParam("preferred_medium") String preferred_medium, @QueryParam("remoteAddress") String responder, @QueryParam("requester") String requester){
int questionNo=0;
if(requester.contains("live") || requester.contains("0107421217")){
questionNo=1;
}
try {
responder = URLDecoder.decode(responder, "UTF-8");
} catch (Exception ex) {
log.severe(ex.getMessage());
}
Question question = getQuestion(questionNo, preferred_medium, responder);
return Response.ok(question.toJSON()).build();
}
@Path("/questions/{question_no}")
@POST
@Produces("application/json")
@Consumes("*/*")
public Response answerQuestion(@PathParam("question_no") String question_no, @QueryParam("preferred_medium") String preferred_medium,
@QueryParam("pn") String phonenumber, @QueryParam("answer") String answer){
Group group = this.getGroup("Group."+question_no+"."+answer);
group.addMember(phonenumber);
TwigCompatibleMongoDatastore datastore = new TwigCompatibleMongoDatastore();
datastore.store(group);
int responseQuestion=99;
String questionURL = URL+"questions/"+responseQuestion;
if (preferred_medium != null && preferred_medium.startsWith("audio")){
questionURL = this.getAudioFile(responseQuestion);
}
Question question=new Question();
question.setRequester(URL+"id/");
question.setType("comment");
question.setQuestion_text(questionURL);
return Response.ok( question.toJSON() ).build();
}
@Path("/questions/{question_no}")
@GET
@Produces("text/plain")
@Consumes("*/*")
public Response getQuestionText(@PathParam("question_no") String question_no ){
Integer questionNo = Integer.parseInt(question_no);
String result = "";
// These messages are now static but should be loaded from the LifeRay Database.
switch (questionNo){
case 0: result="Press 1 if you are available, press 2 if you are unavailable."; break;
case 1: result="Are you available?"; break;
case 99: result="Thank you for your input"; break;
default: result="Sorry, for some strange reason I don't have that question text available...";
}
return Response.ok(result).build();
}
@Path("/answers/{answer_no}")
@GET
@Produces("text/plain")
@Consumes("*/*")
public Response getAnswerText(@PathParam("answer_no") String answer_no, @QueryParam("preferred_medium") String prefered_mimeType){
Integer answerNo = Integer.parseInt(answer_no);
String result="";
// These messages can be static, because they are always the same.
switch (answerNo){
case 0: result="Yes"; break;
case 1: result="No"; break;
default: result="Sorry, for some strange reason I don't have that answer text available...";
}
return Response.ok(result).build();
}
// This urls will present the results
@Path("result")
@GET
public Response getResults() {
String result="";
ArrayList<Group> groups = (ArrayList<Group>) this.getAllGroups();
try {
result = om.writeValueAsString(groups);
} catch(Exception ex) {
ex.printStackTrace();
}
return Response.ok( result ).build();
}
// These functions should get there data from the liferay database.
// These are the audio files linked to the questions
public String getAudioFile(int question_no) {
switch(question_no) {
case 0: return SOUNDURL+"571.wav";
case 1: return SOUNDURL+"572.wav";
case 99: return SOUNDURL+"567.wav";
default: return SOUNDURL+"529.wav";
}
}
// These 2 functions are the group management
public Group getGroup(String id) {
TwigCompatibleMongoDatastore datastore = new TwigCompatibleMongoDatastore();
Group group = datastore.load(Group.class, id);
if(group!=null)
return group;
group = new Group();
group.setId(id);
return group;
}
public List<Group> getAllGroups() {
TwigCompatibleMongoDatastore datastore = new TwigCompatibleMongoDatastore();
RootFindCommand<Group> command = datastore.find()
.type(Group.class);
QueryResultIterator<Group> it = command.now();
List<Group> groups = new ArrayList<Group>();
while (it.hasNext()) {
groups.add(it.next());
}
return groups;
}
}
@SuppressWarnings("serial")
class Group implements Serializable {
public Group() {
this.members=new HashSet<String>();
}
public String getId(){
return id;
}
public void setId(String id){
this.id=id;
}
public Set<String> getMembers() {
return this.members;
}
public void addMember(String member) {
this.members.add(member);
}
@Id private String id=null;
private Set<String> members=null;
}
| almende/dialog | dialoghandler/src/main/java/com/almende/dialog/example/agent/YesNoAgent.java | Java | apache-2.0 | 7,159 |
package com.suscipio_solutions.consecro_mud.Commands;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.core.CMParms;
@SuppressWarnings("rawtypes")
public class NoFollow extends Follow
{
public NoFollow(){}
private final String[] access=I(new String[]{"NOFOLLOW","NOFOL"});
@Override public String[] getAccessWords(){return access;}
@Override
public boolean execute(MOB mob, Vector commands, int metaFlags)
throws java.io.IOException
{
if((commands.size()>1)&&(commands.elementAt(0) instanceof String))
{
if(((String)commands.elementAt(0)).equalsIgnoreCase("UNFOLLOW"))
{
unfollow(mob,((commands.size()>1)&&(commands.elementAt(1) instanceof String)&&(((String)commands.elementAt(1)).equalsIgnoreCase("QUIETLY"))));
return false;
}
MOB M=mob.fetchFollower(CMParms.combine(commands,1));
if((M==null)&&(mob.location()!=null))
{
M=mob.location().fetchInhabitant(CMParms.combine(commands,1));
if(M!=null)
mob.tell(L("@x1 is not following you!",M.name(mob)));
else
mob.tell(L("There is noone here called '@x1' following you!",CMParms.combine(commands,1)));
return false;
}
if((mob.location()!=null)&&(M!=null)&&(M.amFollowing()==mob))
{
nofollow(M,true,false);
return true;
}
mob.tell(L("There is noone called '@x1' following you!",CMParms.combine(commands,1)));
return false;
}
if(!mob.isAttribute(MOB.Attrib.NOFOLLOW))
{
mob.setAttribute(MOB.Attrib.NOFOLLOW,true);
//unfollow(mob,false);
mob.tell(L("You are no longer accepting new followers."));
}
else
{
mob.setAttribute(MOB.Attrib.NOFOLLOW,false);
mob.tell(L("You are now accepting new followers."));
}
return false;
}
@Override public boolean canBeOrdered(){return true;}
}
| ConsecroMUD/ConsecroMUD | com/suscipio_solutions/consecro_mud/Commands/NoFollow.java | Java | apache-2.0 | 1,837 |
/**
Copyright 2008 University of Rochester
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package edu.ur.ir.researcher;
import edu.ur.ir.FileSystem;
import edu.ur.ir.FileSystemType;
import edu.ur.persistent.CommonPersistent;
/**
* This is a link in the researcher folder. This
* creates a link between a link and a researcher
* folder
*
* @author Sharmila Ranganathan
*
*/
public class ResearcherLink extends CommonPersistent implements FileSystem{
/** Eclipse generated id */
private static final long serialVersionUID = 3144484183634385274L;
/** Link */
private String url;
/** researcher folder the link belongs to. */
private ResearcherFolder parentFolder;
/** Researcher the link belongs to */
private Researcher researcher;
/** represents the file system type for this researcher link */
private FileSystemType fileSystemType = FileSystemType.RESEARCHER_LINK;
/**
* Package protected constructor.
*/
ResearcherLink(){};
/**
* Create a researcher link with a null researcher folder. This means this
* is a root researcher link.
*
* @param linkVersion
*/
ResearcherLink(Researcher researcher, String link)
{
setResearcher(researcher);
setUrl(link);
}
/**
* Create a link between a folder and link.
*
* @param link - link to create a link with
* @param parentFolder - folder the link is in.
*/
ResearcherLink(Researcher researcher, ResearcherFolder parentFolder, String link)
{
if(link == null)
{
throw new IllegalStateException("link cannot be null");
}
setResearcher(researcher);
setUrl(link);
setParentFolder(parentFolder);
}
/**
* Returns the path for this linkVersion.
*
* The path is the path of the parent folder
*
* @return
*/
public String getPath()
{
String path = null;
if(parentFolder == null)
{
path = PATH_SEPERATOR;
}
else
{
path = parentFolder.getFullPath();
}
return path;
}
/**
* Overridden to string method.
*
* @see java.lang.Object#toString()
*/
public String toString()
{
StringBuffer sb = new StringBuffer("[ id = ");
sb.append(id);
sb.append( " path = ");
sb.append(getPath());
sb.append( " parent Folder = ");
sb.append(parentFolder);
sb.append(" name = ");
sb.append(name);
sb.append(" link = ");
sb.append(url);
sb.append("]");
return sb.toString();
}
/**
* Get the full path of this linkVersion. If there is
* no parent folder the path is just the name of
* the link.
*
* @return the full path.
*/
public String getFullPath()
{
return getPath() + getName();
}
/**
* Hash code for a researcher link.
*
* @see java.lang.Object#hashCode()
*/
public int hashCode()
{
int value = 0;
value += parentFolder == null ? 0 : parentFolder.hashCode();
value += getName() == null ? 0 : getName().hashCode();
value += researcher == null ? 0 : researcher.hashCode();
return value;
}
/**
* Equals method for a researcher link.
*
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object o)
{
if (this == o) return true;
if (!(o instanceof ResearcherLink)) return false;
final ResearcherLink other = (ResearcherLink) o;
if( (other.getName() != null && !other.getName().equals(getName())) ||
(other.getName() == null && getName() != null ) ) return false;
if( (other.getResearcher() != null && !other.getResearcher().equals(getResearcher())) ||
(other.getResearcher() == null && getResearcher() != null ) ) return false;
if( (other.getFullPath() != null && !other.getFullPath().equals(getFullPath())) ||
(other.getFullPath() == null && getFullPath() != null ) ) return false;
return true;
}
/**
* Returns the name of the link.
*
* @see edu.ur.simple.type.NameAware#getName()
*/
public String getName() {
return name;
}
/**
* Returns the description of the link.
*
* @see edu.ur.simple.type.DescriptionAware#getDescription()
*/
public String getDescription() {
return description;
}
/* (non-Javadoc)
* @see edu.ur.ir.FileSystem#getFileSystemType()
*/
public FileSystemType getFileSystemType() {
return fileSystemType;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public ResearcherFolder getParentFolder() {
return parentFolder;
}
public void setParentFolder(ResearcherFolder parentFolder) {
this.parentFolder = parentFolder;
}
public Researcher getResearcher() {
return researcher;
}
public void setResearcher(Researcher researcher) {
this.researcher = researcher;
}
}
| nate-rcl/irplus | ir_core/src/edu/ur/ir/researcher/ResearcherLink.java | Java | apache-2.0 | 5,358 |
package org.dbflute.erflute.db.impl.mysql;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.dbflute.erflute.editor.model.dbimport.DBObject;
import org.dbflute.erflute.editor.model.dbimport.PreImportFromDBManager;
public class MySQLPreTableImportManager extends PreImportFromDBManager {
@Override
protected List<DBObject> importObjects(String[] types, String dbObjectType) throws SQLException {
final List<DBObject> list = new ArrayList<>();
ResultSet resultSet = null;
if (schemaList.isEmpty()) {
schemaList.add(null);
}
final String catalog = (8 <= metaData.getDriverMajorVersion()) ? dbSetting.getDatabase() : null;
for (final String schemaPattern : schemaList) {
try {
resultSet = metaData.getTables(catalog, schemaPattern, null, types);
while (resultSet.next()) {
final String schema = resultSet.getString("TABLE_SCHEM");
final String name = resultSet.getString("TABLE_NAME");
if (DBObject.TYPE_TABLE.equals(dbObjectType)) {
try {
getAutoIncrementColumnName(con, schema, name);
} catch (final SQLException e) {
e.printStackTrace();
// テーブル情報が取得できない場合(他のユーザの所有物などの場合)、
// このテーブルは使用しない。
continue;
}
}
final DBObject dbObject = new DBObject(schema, name, dbObjectType);
list.add(dbObject);
}
} finally {
if (resultSet != null) {
resultSet.close();
resultSet = null;
}
}
}
return list;
}
}
| dbflute-session/erflute | src/org/dbflute/erflute/db/impl/mysql/MySQLPreTableImportManager.java | Java | apache-2.0 | 2,037 |
package org.gradle.test.performance.mediummonolithicjavaproject.p36;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test730 {
Production730 objectUnderTest = new Production730();
@Test
public void testProperty0() {
String value = "value";
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
String value = "value";
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
String value = "value";
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
} | oehme/analysing-gradle-performance | my-lib/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p36/Test730.java | Java | apache-2.0 | 2,107 |
/**
* Copyright 2016 Yahoo Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yahoo.athenz.common.metrics.impl;
import com.yahoo.athenz.common.metrics.Metric;
public class NoOpMetric implements Metric {
/**
* Constructs a new NoOpMetric object in which all methods are stubs.
* No metrics are recorded with this implementation.
*/
public NoOpMetric() {
}
@Override
public void increment(String metric) {
}
@Override
public void increment(String metric, String domainName) {
}
@Override
public void increment(String metric, String domainName, int count) {
}
@Override
public Object startTiming(String metric, String domainName) {
return null;
}
@Override
public void stopTiming(Object timerMetric) {
}
@Override
public void flush() {
}
@Override
public void quit() {
}
}
| tatyano/athenz | libs/java/server_common/src/main/java/com/yahoo/athenz/common/metrics/impl/NoOpMetric.java | Java | apache-2.0 | 1,436 |
package ru.job4j.polymorphism;
/**
* Created on 01.09.2017.
*
* @author Aleks Sidorenko (alek.sidorenko1979@gmail.com).
* @version $Id$.
* @since 0.1.
*/
public class StubInput implements Input {
/**
* @param answers - array's param.
*/
private String[] answers;
/**
* @param position - param count position.
*/
private int position = 0;
/**
* Constructor.
* @param answers - array's param.
*/
public StubInput(String[] answers) {
this.answers = answers;
}
/**
* Method from interface.
* @param question - param of method interface.
* @return - string.
*/
public String ask(String question) {
return answers[position++];
}
}
| AlSidorenko/Junior | chapter_002/src/main/java/ru/job4j/polymorphism/StubInput.java | Java | apache-2.0 | 747 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.attribute;
import io.undertow.server.HttpServerExchange;
/**
* The thread name
*
* @author Stuart Douglas
*/
public class ThreadNameAttribute implements ExchangeAttribute {
public static final String THREAD_NAME_SHORT = "%I";
public static final String THREAD_NAME = "%{THREAD_NAME}";
public static final ExchangeAttribute INSTANCE = new ThreadNameAttribute();
private ThreadNameAttribute() {
}
@Override
public String readAttribute(final HttpServerExchange exchange) {
return Thread.currentThread().getName();
}
@Override
public void writeAttribute(final HttpServerExchange exchange, final String newValue) throws ReadOnlyAttributeException {
throw new ReadOnlyAttributeException("Thread name", newValue);
}
public static final class Builder implements ExchangeAttributeBuilder {
@Override
public String name() {
return "Thread name";
}
@Override
public ExchangeAttribute build(final String token) {
if (token.equals(THREAD_NAME) || token.equals(THREAD_NAME_SHORT)) {
return ThreadNameAttribute.INSTANCE;
}
return null;
}
}
}
| emag/codereading-undertow | core/src/main/java/io/undertow/attribute/ThreadNameAttribute.java | Java | apache-2.0 | 1,955 |
package org.techniche.technothlon.katana.tcd;
import android.content.Context;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.AsyncTask;
import android.os.Looper;
import android.util.Log;
import android.widget.TextView;
import android.widget.Toast;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.techniche.technothlon.katana.R;
import org.techniche.technothlon.katana.db.TCDDatabase;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* Helper class for providing sample content for user interfaces created by
* Android template wizards.
* <p/>
* TODO: Replace all uses of this class before publishing your app.
*/
public class TCDContent {
/**
* An array of sample (dummy) items.
*/
public static List<TCDQuestionMini> ITEMS = new ArrayList<TCDQuestionMini>();
/**
* A map of sample (dummy) items, by ID.
*/
public static Map<String, TCDQuestion> ITEM_MAP = new HashMap<String, TCDQuestion>();
private static String url = "http://localhost/technothlon/technocoupdoeil_app_gateway/android/?technocoupdoeil=fjalkfq2045rudacnavsofu0aswd988q29ra&lastFetchId=";
private static int download(Context context) {
SharedPreferences sharedPref = context.getSharedPreferences(
context.getString(R.string.preference_file_key), Context.MODE_PRIVATE);
long lastFetchID = sharedPref.getLong(context.getString(R.string.tcd_fetch_id), 0);
Log.d("Pref - log", lastFetchID + " from shared pref");
ConnectivityManager connMgr = (ConnectivityManager)
context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = connMgr.getActiveNetworkInfo();
if (networkInfo != null && networkInfo.isConnected()) {
try {
JSONObject json = new JSONObject(downloadUrl(url + lastFetchID));
if (json.getString("status").equals("success")) {
TCDDatabase db = new TCDDatabase(context);
JSONArray questions = json.getJSONArray("questions");
lastFetchID = json.getLong("lastFetchId");
int count = json.getInt("questions_count"), lastID;
for (int i = 0; i < count; i++) {
JSONObject q = questions.getJSONObject(i);
JSONObject links = q.getJSONObject("links");
lastID = q.getInt("uniqueId");
db.insert(
lastID,
q.getString("id"),
q.getString("color"),
q.getString("title"),
q.getString("question"),
links.getString("facebook"),
links.getString("google"),
links.getString("tumblr"),
links.getString("answer"),
q.getString("by"),
q.getString("time"),
q.getString("answer")
);
Log.d("Database - log", lastID + " loaded in database");
}
db.close();
SharedPreferences.Editor edit = sharedPref.edit();
edit.putLong(context.getString(R.string.tcd_fetch_id), lastFetchID);
edit.commit();
} else if (json.getString("status").equals("reset")) {
TCDDatabase db = new TCDDatabase(context);
db.reset();
db.close();
SharedPreferences.Editor edit = sharedPref.edit();
edit.putLong(context.getString(R.string.tcd_fetch_id), 0);
edit.commit();
download(context);
}
final Context ct = context;
new Thread() {
@Override
public void run() {
Looper.prepare();
Toast.makeText(ct, "Sync Completed.", Toast.LENGTH_SHORT).show();
Looper.loop();
}
}.start();
return 0;
} catch (JSONException e) {
e.printStackTrace();
final Context ct = context;
new Thread() {
@Override
public void run() {
Looper.prepare();
Toast.makeText(ct, "Sync Failed.", Toast.LENGTH_SHORT).show();
Looper.loop();
}
}.start();
return 3;
} catch (IOException e) {
e.printStackTrace();
final Context ct = context;
new Thread() {
@Override
public void run() {
Looper.prepare();
Toast.makeText(ct, "Sync Failed.", Toast.LENGTH_SHORT).show();
Looper.loop();
}
}.start();
return 2;
}
} else {
final Context ct = context;
new Thread() {
@Override
public void run() {
Looper.prepare();
Toast.makeText(ct, "No network connection available.", Toast.LENGTH_SHORT).show();
Looper.loop();
}
}.start();
return 1;
}
}
private static String downloadUrl(String myurl) throws IOException {
InputStream is = null;
// Only display the first 500 characters of the retrieved
// web page content.
try {
URL url = new URL(myurl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(10000 /* milliseconds */);
conn.setConnectTimeout(15000 /* milliseconds */);
conn.setRequestMethod("GET");
conn.setDoInput(true);
// Starts the query
conn.connect();
int response = conn.getResponseCode();
Log.d("TCD latest downloads", "The response is: " + response);
int size = conn.getContentLength();
Log.d("TCD latest downloads", "The content-length is: " + size);
is = conn.getInputStream();
// Convert the InputStream into a string
return readTextResponse(is);
// Makes sure that the InputStream is closed after the app is
// finished using it.
} finally {
if (is != null) {
is.close();
}
}
}
private static String readTextResponse(InputStream inputStream) throws IOException {
Reader in = new InputStreamReader(inputStream);
BufferedReader bufferedreader = new BufferedReader(in);
StringBuilder stringBuilder = new StringBuilder();
String stringReadLine;
while ((stringReadLine = bufferedreader.readLine()) != null) {
stringBuilder.append(stringReadLine);
}
return stringBuilder.toString();
}
public static void load(Context context) {
boolean update = ITEMS.isEmpty() ? false : true;
TCDDatabase helper = new TCDDatabase(context);
SQLiteDatabase db = helper.getReadableDatabase();
assert db != null;
Cursor c = db.rawQuery("SELECT * FROM " + TCDDatabase.Contracts.NAME + " ORDER BY " + TCDDatabase.Contracts.FIELD_TIME + " DESC, " + TCDDatabase.Contracts.FIELD_ID + " DESC", null);
Log.d("DB", c.getCount() + " object in database");
c.moveToFirst();
while (!c.isAfterLast()) {
addItem(new TCDQuestion(
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_ID)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_DISPLAY_ID)),
c.getInt(c.getColumnIndex(TCDDatabase.Contracts.FIELD_COLOR)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_TITLE)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_QUESTION)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_FACEBOOK)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_GOOGLE)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_TUMBLR)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_ANSWER_URL)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_BY)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_ANSWER)),
c.getString(c.getColumnIndex(TCDDatabase.Contracts.FIELD_TIME))
), update);
c.moveToNext();
}
c.close();
db.close();
}
private static void addItem(TCDQuestion item, boolean update) {
if (!ITEM_MAP.containsKey(item.uniqueId)) {
if (update) ITEMS.add(0, (new TCDQuestionMini(item.uniqueId)));
else ITEMS.add((new TCDQuestionMini(item.uniqueId)));
ITEM_MAP.put(item.uniqueId, item);
}
}
public abstract static class TCDLoader extends AsyncTask<Object, Integer, Integer> {
@Override
protected Integer doInBackground(Object[] params) {
int d = 4;
try {
d = download((Context) params[0]);
} catch (Exception e) {
e.printStackTrace();
} finally {
load((Context) params[0]);
}
return d;
}
@Override
protected void onPostExecute(Integer o) {
finished(o);
}
public abstract void finished(int result);
}
/**
* A dummy item representing a piece of content.
*/
public static class TCDQuestion {
public String id;
public String question;
public String facebook;
public String google;
public String tumblr;
public String answer_url;
public String by;
public String answer;
public String title;
public java.util.Date date = null;
public String dateString = "";
public int color = R.drawable.tcd_background_1;
public String uniqueId;
private String status;
private boolean ret = false;
public TCDQuestion(String uniqueId, String id, int color, String title, String question, String facebook, String google, String tumblr,
String answer_url, String by, String answer, String status) {
this.uniqueId = uniqueId;
this.id = id;
this.title = title;
this.question = question;
this.facebook = facebook;
this.google = google;
this.tumblr = tumblr;
this.answer_url = answer_url;
this.by = by;
this.color = getBackground(color);
this.answer = answer;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
try {
this.date = sdf.parse(status);
} catch (ParseException e) {
e.printStackTrace();
}
sdf = new SimpleDateFormat("yyyy-MM-dd");
assert this.date != null;
this.dateString = sdf.format(this.date);
this.status = getStatus();
}
private int getBackground(int color) {
switch (color) {
case 10:
return R.drawable.tcd_background_2;
case 20:
return R.drawable.tcd_background_3;
case 30:
return R.drawable.tcd_background_4;
case 40:
return R.drawable.tcd_background_5;
case 50:
return R.drawable.tcd_background_6;
default:
return R.drawable.tcd_background_1;
}
}
public String getStatus() {
if (ret) return status;
long seconds = Math.abs(((new Date()).getTime() - date.getTime()) / 1000);
if (seconds < 60) status = "about " + seconds + " seconds ago";
else if (seconds < 3600) status = "about " + (seconds / 60) + " minutes ago";
else if (seconds < 86400) status = "about " + (seconds / 3600) + " hours ago";
else if (seconds < 172800) status = "yesterday";
else if (seconds < 345600) status = (seconds / 86400) + " days ago";
else {
ret = true;
status = dateString;
}
return status;
}
}
public static class TCDHolder {
public TextView id, title, question, status;
}
public static class TCDQuestionMini {
public String id;
public TCDQuestionMini(String id) {
this.id = id;
}
}
}
| znck/technothlon-android-app | katana/src/main/java/org/techniche/technothlon/katana/tcd/TCDContent.java | Java | apache-2.0 | 13,524 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.spring.scan;
import java.lang.annotation.Annotation;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.HashSet;
import java.util.Set;
import org.apache.camel.impl.DefaultPackageScanClassResolver;
import org.apache.camel.spring.scan.a.ScanTargetOne;
import org.apache.camel.spring.scan.b.ScanTargetTwo;
import org.apache.camel.spring.scan.c.ScanTargetThree;
import org.junit.Before;
import org.junit.Test;
public class DefaultPackageScanClassResolverTest extends org.apache.camel.spring.scan.ScanTestSupport {
private DefaultPackageScanClassResolver resolver;
private Set<Class<? extends Annotation>> annotations = new HashSet<>();
private String scanPackage = "org.apache.camel.spring.scan";
@Before
public void setUp() throws Exception {
super.setUp();
resolver = new DefaultPackageScanClassResolver();
annotations.add(org.apache.camel.spring.scan.ScannableOne.class);
annotations.add(org.apache.camel.spring.scan.ScannableTwo.class);
}
@Test
public void testAccepableSchema() {
assertFalse("We should not accept the test by default!", resolver.isAcceptableScheme("test://test"));
resolver.setAcceptableSchemes("test:;test2:");
assertTrue("We should accept the test:!", resolver.isAcceptableScheme("test://test"));
assertTrue("We should accept the test2:!", resolver.isAcceptableScheme("test2://test"));
}
@Test
public void testFindByAnnotationWithoutExtraFilters() {
Set<Class<?>> scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class);
scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableTwo.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetThree.class);
}
@Test
public void testFindByAnnotationsWithoutExtraFilters() {
Set<Class<?>> scanned = resolver.findAnnotated(annotations, scanPackage);
validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class, ScanTargetThree.class);
}
@Test
public void testFindImplementationsWithoutExtraFilters() {
Set<Class<?>> scanned = resolver.findImplementations(ScanTargetOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class);
}
@Test
public void testFindByAnnotationWithIncludePackageFilter() {
filter.addIncludePattern(scanPackage + ".b.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetTwo.class);
scanned = resolver.findAnnotated(ScannableTwo.class, scanPackage);
validateMatchingSetContains(scanned);
}
@Test
public void testFindByAnnotationsWithIncludePackageFilter() {
filter.addIncludePattern(scanPackage + ".b.*");
filter.addIncludePattern(scanPackage + ".c.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findAnnotated(annotations, "org.apache.camel.spring.scan");
validateMatchingSetContains(scanned, ScanTargetTwo.class, ScanTargetThree.class);
}
@Test
public void testFindByAnnotationWithExcludePackageFilter() {
filter.addExcludePattern(scanPackage + ".b.*");
filter.addExcludePattern(scanPackage + ".c.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findAnnotated(ScannableOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetOne.class);
scanned = resolver.findAnnotated(org.apache.camel.spring.scan.ScannableTwo.class, scanPackage);
validateMatchingSetContains(scanned);
}
@Test
public void testFindByAnnotationsWithExcludePackageFilter() {
filter.addExcludePattern(scanPackage + ".a.*");
Set<Class<?>> scanned = resolver.findAnnotated(annotations, "org.apache.camel.spring.scan");
validateMatchingSetContains(scanned, ScanTargetTwo.class, ScanTargetThree.class);
}
@Test
public void testFindByFilterWithIncludePackageFilter() {
filter.addIncludePattern(scanPackage + ".**.ScanTarget*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findByFilter(filter, "org.apache.camel.spring.scan");
validateMatchingSetContains(scanned, ScanTargetOne.class, ScanTargetTwo.class, ScanTargetThree.class);
}
@Test
public void testFindImplementationsWithIncludePackageFilter() {
filter.addIncludePattern(scanPackage + ".b.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findImplementations(ScanTargetOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetTwo.class);
}
@Test
public void testFindImplementationsWithExcludePackageFilter() {
filter.addExcludePattern(scanPackage + ".a.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findImplementations(ScanTargetOne.class, scanPackage);
validateMatchingSetContains(scanned, ScanTargetTwo.class);
}
@Test
// Need to run the mvn clean install to create the jar file when running it from IDE
public void testFindByFilterPackageInJarUrl() throws Exception {
ClassLoader savedClassLoader = null;
try {
savedClassLoader = Thread.currentThread().getContextClassLoader();
// build a mock URLClassLoader
URL url = getClass().getResource("/package_scan_test.jar");
URL urls[] = {new URL("jar:" + url.toString() + "!/")};
URLClassLoader classLoader = new URLClassLoader(urls, savedClassLoader);
Thread.currentThread().setContextClassLoader(classLoader);
// recreate resolver since we mess with context class loader
resolver = new DefaultPackageScanClassResolver();
filter.addIncludePattern("a.*.c.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findByFilter(filter, "a.b.c");
assertEquals(1, scanned.size());
assertEquals("class a.b.c.Test", scanned.iterator().next().toString());
} finally {
if (savedClassLoader != null) {
Thread.currentThread().setContextClassLoader(savedClassLoader);
}
}
}
@Test
// Need to run the mvn clean install to create the test jar file when running it from IDE
public void testFindByFilterPackageInJarUrlWithPlusChars() throws Exception {
ClassLoader savedClassLoader = null;
try {
savedClassLoader = Thread.currentThread().getContextClassLoader();
URL url = getClass().getResource("/package+scan+test.jar");
URL urls[] = {new URL("jar:" + url.toString() + "!/")};
URLClassLoader classLoader = new URLClassLoader(urls, savedClassLoader);
Thread.currentThread().setContextClassLoader(classLoader);
// recreate resolver since we mess with context class loader
resolver = new DefaultPackageScanClassResolver();
filter.addIncludePattern("a.*.c.*");
resolver.addFilter(filter);
Set<Class<?>> scanned = resolver.findByFilter(filter, "a.b.c");
assertEquals(1, scanned.size());
assertEquals("class a.b.c.Test", scanned.iterator().next().toString());
} finally {
if (savedClassLoader != null) {
Thread.currentThread().setContextClassLoader(savedClassLoader);
}
}
}
}
| punkhorn/camel-upstream | components/camel-spring/src/test/java/org/apache/camel/spring/scan/DefaultPackageScanClassResolverTest.java | Java | apache-2.0 | 8,636 |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.snmp.ctl;
import com.btisystems.pronx.ems.core.snmp.ISnmpConfiguration;
import com.btisystems.pronx.ems.core.snmp.ISnmpConfigurationFactory;
import com.btisystems.pronx.ems.core.snmp.ISnmpSession;
import com.btisystems.pronx.ems.core.snmp.ISnmpSessionFactory;
import com.google.common.collect.Maps;
import org.junit.Before;
import org.junit.Test;
import org.onosproject.alarm.Alarm;
import org.onosproject.alarm.AlarmId;
import org.onosproject.alarm.DefaultAlarm;
import java.io.IOException;
import static org.junit.Assert.*;
/**
* DefaultSnmpController test class.
*/
public class DefaultSnmpControllerTest {
ISnmpSessionFactory mockSnmpSessionFactory = new MockISnmpSessionFactory();
DefaultSnmpController snmpController = new DefaultSnmpController();
DefaultSnmpDevice device = new DefaultSnmpDevice("1.1.1.1", 1, "test", "test");
ISnmpSession snmpSession = new ISnmpSessionAdapter();
long time = System.currentTimeMillis();
DefaultAlarm alarm = new DefaultAlarm.Builder(
AlarmId.alarmId(device.deviceId(), Long.toString(time)),
device.deviceId(), "SNMP alarm retrieval failed",
Alarm.SeverityLevel.CRITICAL,
time).build();
@Before
public void setUp() {
snmpController.factoryMap = Maps.newHashMap();
snmpController.factoryMap.put(1, mockSnmpSessionFactory);
}
@Test
public void testActivate() {
snmpController.activate(null);
assertTrue("Snmp session factory map should contain atleast one factory object",
snmpController.factoryMap.size() > 0);
}
@Test
public void testDeactivate() {
snmpController.deactivate();
assertEquals("Device map should be clear", 0, snmpController.getDevices().size());
assertEquals("Session map should be clear", 0, snmpController.sessionMap.size());
}
@Test
public void addDevice() {
snmpController.addDevice(device);
assertEquals("Controller should contain device", device, snmpController.getDevice(device.deviceId()));
}
/**
* tests session creation and get from map if already exists.
*/
@Test
public void getNotExistingSession() throws Exception {
addDevice();
assertEquals("Session should be created", snmpSession, snmpController.getSession(device.deviceId()));
assertEquals("Map should contain session", 1, snmpController.snmpDeviceMap.size());
assertEquals("Session should be fetched from map", snmpSession, snmpController.getSession(device.deviceId()));
}
@Test
public void removeDevice() {
addDevice();
snmpController.removeDevice(device.deviceId());
assertNull("Device shoudl not be present", snmpController.getDevice(device.deviceId()));
}
@Test
public void walkFailedAlarm() {
assertEquals("Alarms should be equals", alarm, snmpController.buildWalkFailedAlarm(device.deviceId()));
}
public class MockISnmpSessionFactory implements ISnmpSessionFactory {
@Override
public ISnmpSession createSession(ISnmpConfiguration configuration, String ipAddress) throws IOException {
new ISnmpSessionAdapter();
return snmpSession;
}
@Override
public ISnmpSession createSession(String ipAddress, String community)
throws IOException {
return snmpSession;
}
@Override
public ISnmpSession createSession(String ipAddress, String community,
String factoryName,
ISnmpConfigurationFactory.AccessType accessType)
throws IOException {
return snmpSession;
}
}
}
| opennetworkinglab/onos | protocols/snmp/ctl/src/test/java/org/onosproject/snmp/ctl/DefaultSnmpControllerTest.java | Java | apache-2.0 | 4,414 |
package com.zaaach.citypicker.db;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Environment;
import com.zaaach.citypicker.model.City;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* author Bro0cL on 2016/1/26.
*/
public class DBManager {
private static final String ASSETS_NAME = "china_cities.db";
private static final String DB_NAME = "china_cities.db";
private static final String TABLE_NAME = "city";
private static final String NAME = "name";
private static final String PINYIN = "pinyin";
private static final int BUFFER_SIZE = 1024;
private String DB_PATH;
private Context mContext;
public DBManager(Context context) {
this.mContext = context;
DB_PATH = File.separator + "data"
+ Environment.getDataDirectory().getAbsolutePath() + File.separator
+ context.getPackageName() + File.separator + "databases" + File.separator;
}
@SuppressWarnings("ResultOfMethodCallIgnored")
public void copyDBFile(){
File dir = new File(DB_PATH);
if (!dir.exists()){
dir.mkdirs();
}
File dbFile = new File(DB_PATH + DB_NAME);
if (!dbFile.exists()){
InputStream is;
OutputStream os;
try {
is = mContext.getResources().getAssets().open(ASSETS_NAME);
os = new FileOutputStream(dbFile);
byte[] buffer = new byte[BUFFER_SIZE];
int length;
while ((length = is.read(buffer, 0, buffer.length)) > 0){
os.write(buffer, 0, length);
}
os.flush();
os.close();
is.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public List<City> getAllCities(){
SQLiteDatabase db = SQLiteDatabase.openOrCreateDatabase(DB_PATH + DB_NAME, null);
Cursor cursor = db.rawQuery("select * from " + TABLE_NAME, null);
List<City> result = new ArrayList<>();
City city;
while (cursor.moveToNext()){
String name = cursor.getString(cursor.getColumnIndex(NAME));
String pinyin = cursor.getString(cursor.getColumnIndex(PINYIN));
city = new City(name, pinyin);
result.add(city);
}
cursor.close();
db.close();
Collections.sort(result, new CityComparator());
return result;
}
public List<City> searchCity(final String keyword){
SQLiteDatabase db = SQLiteDatabase.openOrCreateDatabase(DB_PATH + DB_NAME, null);
Cursor cursor = db.rawQuery("select * from " + TABLE_NAME +" where name like \"%" + keyword
+ "%\" or pinyin like \"%" + keyword + "%\"", null);
List<City> result = new ArrayList<>();
City city;
while (cursor.moveToNext()){
String name = cursor.getString(cursor.getColumnIndex(NAME));
String pinyin = cursor.getString(cursor.getColumnIndex(PINYIN));
city = new City(name, pinyin);
result.add(city);
}
cursor.close();
db.close();
Collections.sort(result, new CityComparator());
return result;
}
/**
* sort by a-z
*/
private class CityComparator implements Comparator<City>{
@Override
public int compare(City lhs, City rhs) {
String a = lhs.getPinyin().substring(0, 1);
String b = rhs.getPinyin().substring(0, 1);
return a.compareTo(b);
}
}
}
| weiwenqiang/GitHub | SelectWidget/city/CityPicker/citypicker/src/main/java/com/zaaach/citypicker/db/DBManager.java | Java | apache-2.0 | 3,876 |
package com.huawei.esdk.sms.north.http.common;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import com.huawei.esdk.platform.common.utils.ESDKIOUtils;
import com.huawei.esdk.platform.common.utils.help.DocumentBuilderFactories;
import com.huawei.esdk.sms.north.http.bean.PlaceHolderBean;
public abstract class AbstractXMLProcessor implements IXMLProcessor
{
private static Logger LOGGER = Logger.getLogger(AbstractXMLProcessor.class);
@Override
public List<PlaceHolderBean> processClasspathXMLFile(String fileName)
throws ParserConfigurationException, SAXException, IOException
{
String xmlContent = ESDKIOUtils.getClasspathFileContent(fileName);
return parseXML(xmlContent);
}
@Override
public List<PlaceHolderBean> processXML(String xmlContent)
throws ParserConfigurationException, SAXException, IOException
{
return parseXML(xmlContent);
}
protected List<PlaceHolderBean> parseXML(String xmlAsString)
throws ParserConfigurationException, SAXException, IOException
{
DocumentBuilderFactory dbFactory = DocumentBuilderFactories.newSecurityInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(new InputSource(new ByteArrayInputStream(xmlAsString.getBytes("utf-8"))));
doc.getDocumentElement().normalize();
Element rootElement = doc.getDocumentElement();
List<PlaceHolderBean> result = new ArrayList<PlaceHolderBean>();
return parseNode(rootElement, result);
}
protected List<PlaceHolderBean> parseNode(Node nNode, List<PlaceHolderBean> placerHolders)
{
StringBuilder sb = new StringBuilder();
if (LOGGER.isDebugEnabled())
{
sb.append("Current Node :").append(nNode.getNodeName());
sb.append("|Node Type:").append(nNode.getNodeType());
sb.append("|Node Value:").append(nNode.getNodeValue());
sb.append("|Text Value:" + nNode.getTextContent());
LOGGER.debug(sb.toString());
}
if (nNode.getNodeType() == Node.ELEMENT_NODE)
{
Element eElement = (Element)nNode;
if (hasSubElement(nNode))
{
NodeList nList = nNode.getChildNodes();
Node nodeItem;
for (int temp = 0; temp < nList.getLength(); temp++)
{
nodeItem = nList.item(temp);
parseNode(nodeItem, placerHolders);
}
}
else
{
if (LOGGER.isDebugEnabled())
{
sb.delete(0, sb.length());
sb.append("Tag Name:").append(eElement.getTagName());
sb.append("|Node Name:").append(eElement.getNodeName());
sb.append("|Node Value:").append(eElement.getNodeValue());
sb.append("|Text Content:").append(eElement.getTextContent());
LOGGER.debug(sb.toString());
}
//It's the element which hasn't child element and should be processed
PlaceHolderBean placeHolder = processElement(eElement);
if (null != placeHolder)
{
placerHolders.add(placeHolder);
}
}
}
return placerHolders;
}
private boolean hasSubElement(Node node)
{
if (null == node || Node.ELEMENT_NODE != node.getNodeType())
{
return false;
}
NodeList nList = node.getChildNodes();
Node nodeItem;
for (int temp = 0; temp < nList.getLength(); temp++)
{
nodeItem = nList.item(temp);
if (Node.ELEMENT_NODE == nodeItem.getNodeType())
{
return true;
}
}
return false;
}
protected abstract PlaceHolderBean processElement(Element element);
}
| eSDK/esdk_sms | source/esdk_sms_neadp_http/src/main/java/com/huawei/esdk/sms/north/http/common/AbstractXMLProcessor.java | Java | apache-2.0 | 4,493 |
package com.example;
/**
* Created by Nish on 2/21/15.
*/
public interface Movable {
public void moveLeft();
public void moveRight();
}
| nishtahir/Mektory-BeginnersAndroid | Week2/mygame/src/main/java/com/example/Movable.java | Java | apache-2.0 | 147 |
package ch.unibe.scg.regex;
import static java.util.Collections.singleton;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import ch.unibe.scg.regex.ParserProvider.Node;
import ch.unibe.scg.regex.ParserProvider.Node.Basic;
import ch.unibe.scg.regex.ParserProvider.Node.Group;
import ch.unibe.scg.regex.ParserProvider.Node.NonGreedyStar;
import ch.unibe.scg.regex.ParserProvider.Node.Optional;
import ch.unibe.scg.regex.ParserProvider.Node.Plus;
import ch.unibe.scg.regex.ParserProvider.Node.PositiveSet;
import ch.unibe.scg.regex.ParserProvider.Node.SetItem;
import ch.unibe.scg.regex.ParserProvider.Node.Simple;
import ch.unibe.scg.regex.ParserProvider.Node.Star;
import ch.unibe.scg.regex.ParserProvider.Node.Union;
import ch.unibe.scg.regex.TNFA.Builder;
import ch.unibe.scg.regex.Transition.Priority;
/**
* Not thread-safe! Use only from one thread at a time!
*
* @author nes
*/
class RegexToNFA {
final InputRangeCleanup inputRangeCleanup = new InputRangeCleanup();
TNFA convert(final Node node) {
Collection<InputRange> allInputRanges = new ArrayList<>();
allInputRanges.add(InputRange.ANY); // All regexes contain this implicitly.
findRanges(node, allInputRanges);
final Builder builder = Builder.make(allInputRanges);
builder.registerCaptureGroup(builder.captureGroupMaker.entireMatch);
final MiniAutomaton m =
makeInitialMiniAutomaton(builder, builder.captureGroupMaker.entireMatch);
final MiniAutomaton a = make(m, builder, node, builder.captureGroupMaker.entireMatch);
final State endTagger = builder.makeState();
builder.addEndTagTransition(a.finishing, endTagger, builder.captureGroupMaker.entireMatch,
Priority.NORMAL);
builder.setAsAccepting(endTagger);
return builder.build();
}
private void findRanges(Node n, Collection<InputRange> out) {
if (n instanceof Node.SetItem) {
out.add(((SetItem) n).inputRange);
}
for (Node c : n.getChildren()) {
findRanges(c, out);
}
}
static class MiniAutomaton {
final Collection<State> finishing;
final Collection<State> initial;
MiniAutomaton(final Collection<State> initial, final Collection<State> finishing) {
if (initial.iterator().next() == null) {
assert false;
}
this.initial = initial;
this.finishing = finishing;
}
MiniAutomaton(final Collection<State> initial, final State finishing) {
this(initial, singleton(finishing));
}
@Override
public String toString() {
return "" + initial + " -> " + finishing;
}
}
MiniAutomaton make(final MiniAutomaton last, final Builder builder, final Node node,
CaptureGroup captureGroup) {
MiniAutomaton ret;
if (node instanceof Node.Any) {
ret = makeAny(last, builder);
} else if (node instanceof Node.Char) {
ret = makeChar(last, builder, (Node.Char) node);
} else if (node instanceof Node.Simple) {
ret = makeSimple(last, builder, (Node.Simple) node, captureGroup);
} else if (node instanceof Node.Optional) {
ret = makeOptional(last, builder, (Node.Optional) node, captureGroup);
} else if (node instanceof Node.NonGreedyStar) {
ret = makeNonGreedyStar(last, builder, (Node.NonGreedyStar) node, captureGroup);
} else if (node instanceof Node.Star) {
ret = makeStar(last, builder, (Star) node, captureGroup);
} else if (node instanceof Node.Plus) {
ret = makePlus(last, builder, (Node.Plus) node, captureGroup);
} else if (node instanceof Node.Group) {
ret = makeGroup(last, builder, (Node.Group) node, captureGroup);
} else if (node instanceof Node.Eos) {
ret = makeEos(last, builder);
} else if (node instanceof Node.Char) {
ret = makeChar(last, builder, (Node.Char) node);
} else if (node instanceof Node.PositiveSet) {
ret = makePositiveSet(last, builder, (Node.PositiveSet) node);
} else if (node instanceof Node.Union) {
ret = makeUnion(last, builder, (Node.Union) node, captureGroup);
} else {
throw new AssertionError("Unknown node type: " + node);
}
assert !ret.initial.contains(null);
assert !ret.finishing.contains(null);
return ret;
}
MiniAutomaton makeAny(final MiniAutomaton last, final Builder builder) {
final State a = builder.makeState();
builder.addUntaggedTransition(InputRange.ANY, last.finishing, a);
return new MiniAutomaton(last.finishing, a);
}
MiniAutomaton makeChar(final MiniAutomaton last, final Builder b, final Node.Char character) {
final State a = b.makeState();
final MiniAutomaton ret = new MiniAutomaton(last.finishing, a);
b.addUntaggedTransition(character.inputRange, ret.initial, a);
return ret;
}
MiniAutomaton makeEos(final MiniAutomaton last, final Builder builder) {
final State a = builder.makeState();
builder.addUntaggedTransition(InputRange.EOS, last.finishing, a);
return new MiniAutomaton(last.finishing, a);
}
MiniAutomaton makeGroup(final MiniAutomaton last, final Builder builder, final Group group,
CaptureGroup parentCaptureGroup) {
final CaptureGroup cg = builder.makeCaptureGroup(parentCaptureGroup);
builder.registerCaptureGroup(cg);
final State startGroup = builder.makeState();
builder.addStartTagTransition(last.finishing, startGroup, cg, Priority.NORMAL);
final MiniAutomaton startGroupAutomaton = new MiniAutomaton(singleton(startGroup), singleton(startGroup));
final MiniAutomaton body = make(startGroupAutomaton, builder, group.body, cg);
final State endTag = builder.makeState();
builder.addEndTagTransition(body.finishing, endTag, cg, Priority.NORMAL);
return new MiniAutomaton(last.finishing, endTag);
}
MiniAutomaton makeInitialMiniAutomaton(final Builder builder, CaptureGroup entireMatch) {
final State init = builder.makeInitialState();
final State startTagger = builder.makeState();
builder.addStartTagTransition(singleton(init), startTagger, entireMatch, Priority.NORMAL);
return new MiniAutomaton(singleton(init), singleton(startTagger));
}
MiniAutomaton makeOptional(final MiniAutomaton last, final Builder builder,
final Optional optional, CaptureGroup captureGroup) {
final MiniAutomaton ma = make(last, builder, optional.elementary, captureGroup);
final List<State> f = new ArrayList<>(last.finishing);
f.addAll(ma.finishing);
return new MiniAutomaton(last.finishing, f);
}
MiniAutomaton makePlus(final MiniAutomaton last, final Builder builder, final Plus plus,
CaptureGroup captureGroup) {
final MiniAutomaton inner = make(last, builder, plus.elementary, captureGroup);
Collection<State> out = singleton(builder.makeState());
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, out, Priority.LOW);
final MiniAutomaton ret = new MiniAutomaton(last.finishing, out);
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing,
inner.initial, Priority.NORMAL);
return ret;
}
MiniAutomaton makeUnion(MiniAutomaton last, Builder builder, Union union,
CaptureGroup captureGroup) {
MiniAutomaton left = make(last, builder, union.left, captureGroup);
MiniAutomaton right = make(last, builder, union.right, captureGroup);
Collection<State> out = singleton(builder.makeState());
builder.makeUntaggedEpsilonTransitionFromTo(left.finishing, out, Priority.NORMAL);
builder.makeUntaggedEpsilonTransitionFromTo(right.finishing, out, Priority.LOW);
return new MiniAutomaton(last.finishing, out);
}
MiniAutomaton makePositiveSet(final MiniAutomaton last, final Builder builder,
final PositiveSet set) {
final List<SetItem> is = set.items;
final SortedSet<InputRange> ranges = new TreeSet<>();
for (final SetItem i : is) {
ranges.add(i.inputRange);
}
final List<InputRange> rangesList = new ArrayList<>(ranges);
final List<InputRange> cleanedRanges = inputRangeCleanup.cleanUp(rangesList);
final State a = builder.makeState();
for (InputRange range : cleanedRanges) {
builder.addUntaggedTransition(range, last.finishing, a);
}
return new MiniAutomaton(last.finishing, a);
}
MiniAutomaton makeSimple(final MiniAutomaton last, final Builder b, final Simple simple,
CaptureGroup captureGroup) {
final List<? extends Basic> bs = simple.basics;
MiniAutomaton lm = last;
for (final Basic e : bs) {
lm = make(lm, b, e, captureGroup);
}
return new MiniAutomaton(last.finishing, lm.finishing);
}
MiniAutomaton makeNonGreedyStar(MiniAutomaton last, Builder builder, NonGreedyStar nonGreedyStar,
CaptureGroup captureGroup) {
// Make start state and connect.
State start = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(last.finishing, singleton(start), Priority.NORMAL);
// Make inner machine.
MiniAutomaton innerLast = new MiniAutomaton(last.finishing, start);
final MiniAutomaton inner = make(innerLast, builder, nonGreedyStar.elementary, captureGroup);
// Connect inner machine back to start.
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, singleton(start), Priority.LOW);
// Make and connect `out` state.
State out = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(singleton(start), singleton(out), Priority.NORMAL);
return new MiniAutomaton(last.finishing, out);
}
MiniAutomaton makeStar(final MiniAutomaton last, final Builder builder, final Star star,
CaptureGroup captureGroup) {
// Make start state and connect.
State start = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(last.finishing, singleton(start), Priority.NORMAL);
// Make inner machine.
MiniAutomaton innerLast = new MiniAutomaton(singleton(start), start);
final MiniAutomaton inner = make(innerLast, builder, star.elementary, captureGroup);
// Connect inner machine back to start.
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, singleton(start), Priority.NORMAL);
// Make and connect `out` state.
State out = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(singleton(start), singleton(out), Priority.LOW);
return new MiniAutomaton(last.finishing, out);
}
}
| nes1983/tree-regex | src/ch/unibe/scg/regex/RegexToNFA.java | Java | apache-2.0 | 10,402 |
/*
* Copyright (c) 2015 TextGlass
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
public class TransformerIsNumber implements Transformer {
@Override
public String transform(String input) throws Exception {
try {
Double.parseDouble(input);
} catch(NumberFormatException nfe) {
throw new Exception(nfe.toString());
}
return input;
}
@Override
public String toString() {
return "TransformerIsNumber";
}
}
| TextGlass/reference | client/src/TransformerIsNumber.java | Java | apache-2.0 | 972 |
/*
* Copyright (c) 2017 Martin Pfeffer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pepperonas.materialdialog.adapter;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ResolveInfo;
import android.graphics.Typeface;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.pepperonas.materialdialog.R;
import com.pepperonas.materialdialog.utils.Utils;
import java.util.List;
/**
* @author Martin Pfeffer (pepperonas)
*/
public class ShareAdapter extends BaseAdapter {
private Object[] items;
private LayoutInflater mInflater;
private Context mCtx;
private Typeface mTypeface;
public ShareAdapter(@NonNull Context context) {
this.mInflater = LayoutInflater.from(context);
Intent sendIntent = new Intent(android.content.Intent.ACTION_SEND);
sendIntent.setType("text/plain");
List activities = context.getPackageManager().queryIntentActivities(sendIntent, 0);
items = activities.toArray();
mCtx = context;
}
public ShareAdapter(@NonNull Context context, Typeface typeface) {
this.mInflater = LayoutInflater.from(context);
Intent sendIntent = new Intent(android.content.Intent.ACTION_SEND);
sendIntent.setType("text/plain");
List activities = context.getPackageManager().queryIntentActivities(sendIntent, 0);
items = activities.toArray();
mCtx = context;
mTypeface = typeface;
}
public int getCount() {
return items.length;
}
public Object getItem(int position) {
return items[position];
}
public long getItemId(int position) {
return position;
}
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
if (convertView == null) {
convertView = mInflater.inflate(R.layout.custom_list_item_share_app, null);
holder = new ViewHolder();
holder.logo = (ImageView) convertView.findViewById(R.id.iv_simple_list_item_share_app);
holder.name = (TextView) convertView.findViewById(R.id.tv_simple_list_item_share_app);
if (mTypeface != null) {
holder.name.setTypeface(mTypeface);
}
convertView.setTag(holder);
} else {
holder = (ViewHolder) convertView.getTag();
}
holder.name.setText(((ResolveInfo) items[position]).activityInfo
.applicationInfo.loadLabel(mCtx.getPackageManager()).toString());
holder.logo.setImageDrawable(((ResolveInfo) items[position]).activityInfo
.applicationInfo.loadIcon(mCtx.getPackageManager()));
LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT);
layoutParams.setMargins(
Utils.dp2px(mCtx, 16),
Utils.dp2px(mCtx, 4),
Utils.dp2px(mCtx, 4),
Utils.dp2px(mCtx, 4));
holder.logo.setLayoutParams(layoutParams);
return convertView;
}
static class ViewHolder {
TextView name;
ImageView logo;
}
} | pepperonas/MaterialDialog | library/src/main/java/com/pepperonas/materialdialog/adapter/ShareAdapter.java | Java | apache-2.0 | 3,990 |
package com.siqisoft.stone.admin.dict.controller;
import java.util.List;
import org.siqisource.stone.dict.model.Dict;
import org.siqisource.stone.dict.service.DictService;
import org.siqisource.stone.orm.condition.Condition;
import org.siqisource.stone.ui.AjaxResponse;
import org.siqisource.stone.ui.Notify;
import org.siqisource.stone.ui.easyui.PagedRows;
import org.siqisource.stone.ui.easyui.Paging;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.siqisoft.stone.admin.dict.service.DictConditionBuilder;
@Controller
public class DictController {
@Autowired
DictService service;
@RequestMapping("/dict/DictList.do")
public String list(Model model) {
return "dict/DictList";
}
@RequestMapping("/dict/dictListData.do")
@ResponseBody
public PagedRows<Dict> listData(DictQueryForm dictQueryForm, Paging paging) {
Condition condition = DictConditionBuilder.listCondition(dictQueryForm);
int count = service.count(condition);
List<Dict> dictList = service.list(condition, paging.getRowBounds());
return new PagedRows<Dict>(count, dictList);
}
@RequestMapping("/dict/DictRead.do")
public String read(String code, Model model) {
Dict dict = service.read(code);
model.addAttribute("dict", dict);
return "dict/DictRead";
}
@RequestMapping("/dict/DictAddInit.do")
public String addInit(Dict dict, Model model) {
return "dict/DictAdd";
}
@RequestMapping("/dict/DictAdd.do")
public String add(Dict dict, Model model) {
service.insert(dict);
return this.read(dict.getCode(), model);
}
@RequestMapping("/dict/dictDelete.do")
@ResponseBody
public AjaxResponse delete(String[] codeList, Model model) {
// 判断是否被关联
if (codeList != null) {
service.deleteBatch(codeList);
}
return new Notify("成功删除"+codeList.length+"条记录");
}
@RequestMapping("/dict/DictEditInit.do")
public String editInit(String code, Model model) {
Dict dict = service.read(code);
model.addAttribute("dict", dict);
return "dict/DictEdit";
}
@RequestMapping("/dict/DictEdit.do")
public String edit(Dict dict, Model model) {
service.update(dict);
return this.read(dict.getCode(), model);
}
}
| ylyxf/stone-sdk | src/main/java/com/siqisoft/stone/admin/dict/controller/DictController.java | Java | apache-2.0 | 2,471 |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.listeners;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.psi.PsiElement;
/**
* Refactorings invoke {@link #getListener(com.intellij.psi.PsiElement)} of registered
* {@linkplain RefactoringElementListenerProvider} before particular element is subjected to refactoring.
* @author dsl
*/
public interface RefactoringElementListenerProvider {
ExtensionPointName<RefactoringElementListenerProvider> EP_NAME = ExtensionPointName.create("com.intellij.refactoring.elementListenerProvider");
/**
*
* Should return a listener for particular element. Invoked in read action.
*/
@javax.annotation.Nullable
RefactoringElementListener getListener(PsiElement element);
}
| consulo/consulo | modules/base/lang-api/src/main/java/com/intellij/refactoring/listeners/RefactoringElementListenerProvider.java | Java | apache-2.0 | 1,347 |
/*
* Copyright 2016 Shredder121.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.shredder121.gh_event_api.handler.pull_request;
/**
* The handler interface for receiving {@code pull_request} events.
*
* @author Shredder121
*/
@FunctionalInterface
public interface PullRequestHandler {
void handle(PullRequestPayload payload);
}
| johnktims/gh-event-api | src/main/java/com/github/shredder121/gh_event_api/handler/pull_request/PullRequestHandler.java | Java | apache-2.0 | 873 |
package com.nguyenmanhtuan.benhandientu;
import android.app.Activity;
import android.content.Intent;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.util.HashMap;
import java.util.Locale;
import com.nguyenmanhtuan.utils.DatabaseHandler;
public class RegisteredActivity extends Activity {
private Locale myLocale;
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_registered);
DatabaseHandler db = new DatabaseHandler(getApplicationContext());
HashMap<String, String> user = new HashMap<String, String>();
user = db.getUserDetails();
/**
* Displays the registration details in Text view
**/
final TextView fname = (TextView) findViewById(R.id.fname);
final TextView lname = (TextView) findViewById(R.id.lname);
final TextView uname = (TextView) findViewById(R.id.uname);
final TextView email = (TextView) findViewById(R.id.email);
final TextView address = (TextView) findViewById(R.id.tvadd);
final TextView phonenumber = (TextView) findViewById(R.id.tvphone);
final TextView birthyear = (TextView) findViewById(R.id.tvBirthyear);
final TextView created_at = (TextView) findViewById(R.id.regat);
fname.setText(user.get("fname"));
lname.setText(user.get("lname"));
uname.setText(user.get("uname"));
email.setText(user.get("email"));
address.setText(user.get("address"));
phonenumber.setText(user.get("phonenumber"));
birthyear.setText(user.get("birthyear"));
created_at.setText(user.get("created_at"));
Button login = (Button) findViewById(R.id.login);
login.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
Intent myIntent = new Intent(view.getContext(), LoginActivity.class);
startActivityForResult(myIntent, 0);
finish();
}
});
}
public void setLocale(String lang) {
myLocale = new Locale(lang);
Resources res = getResources();
DisplayMetrics dm = res.getDisplayMetrics();
Configuration conf = res.getConfiguration();
conf.locale = myLocale;
res.updateConfiguration(conf, dm);
Intent refresh = new Intent(this, RegisteredActivity.class);
startActivity(refresh);
}
}
| techmaster-prj/BenhAnDienTu | BenhAnDienTu/src/com/nguyenmanhtuan/benhandientu/RegisteredActivity.java | Java | apache-2.0 | 2,788 |
package com.vertabelo.mobileorm.myplaces.orm.gen;
public class AddressViewDAOImpl
extends com.vertabelo.mobileorm.myplaces.orm.runtime.dao.BaseDAO<AddressView>
implements AddressViewDAO {
public AddressViewDAOImpl(com.vertabelo.mobileorm.myplaces.orm.runtime.util.SQLiteDataSource dataSource) {
super(dataSource);
}
public AddressViewDAOImpl(com.vertabelo.mobileorm.myplaces.orm.runtime.util.SQLiteDataSource dataSource,
com.vertabelo.mobileorm.myplaces.orm.runtime.util.DAOMonitor daoMonitor) {
super(dataSource, daoMonitor);
}
@Override
public Class<AddressView> getPojoClass() {
return POJO_CLASS;
}
@Override
public com.vertabelo.mobileorm.myplaces.orm.runtime.query.TableExpression getTableExpression() {
return TABLE_EXPRESSION;
}
@Override
public com.vertabelo.mobileorm.myplaces.orm.runtime.util.ResultSetHandler getResultSetHandler() {
return RESULT_SET_HANDLER;
}
@Override
public java.util.List<AddressView> getAddressViewList() {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.orderBy(orderBy);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy, com.vertabelo.mobileorm.myplaces.orm.runtime.query.OrderByDirection asc) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.orderBy(orderBy, asc);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.setWhere(where);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where,
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.setWhere(where);
query.orderBy(orderBy);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public java.util.List<AddressView> getAddressViewList(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where,
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp orderBy, com.vertabelo.mobileorm.myplaces.orm.runtime.query.OrderByDirection asc) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION);
query.setWhere(where);
query.orderBy(orderBy, asc);
com.vertabelo.mobileorm.myplaces.orm.runtime.dao.SelectObjectListResult<AddressView>
selectObjectListResult = select(query, RESULT_SET_HANDLER);
return selectObjectListResult.getObjectList();
}
@Override
public Long getCount() {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION,
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.fun("COUNT",
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.ASTERISK));
java.util.List<Long> list = select(query, new com.vertabelo.mobileorm.myplaces.orm.runtime.util.handlers.LongResultSetHandler()).getObjectList();
if (list.size() > 1) {
throw new RuntimeException("More than one object returned");
} else if (list.size() == 1) {
return list.get(0);
} else {
throw new RuntimeException("Cannot retrieve count() method result");
}
}
@Override
public Long getCount(com.vertabelo.mobileorm.myplaces.orm.runtime.query.LExp where) {
com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery query =
new com.vertabelo.mobileorm.myplaces.orm.runtime.query.SelectQuery(TABLE_EXPRESSION,
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.fun("COUNT",
com.vertabelo.mobileorm.myplaces.orm.runtime.query.AExp.ASTERISK));
query.setWhere(where);
java.util.List<Long> list = select(query, new com.vertabelo.mobileorm.myplaces.orm.runtime.util.handlers.LongResultSetHandler()).getObjectList();
if (list.size() > 1) {
throw new RuntimeException("More than one object returned");
} else if (list.size() == 1) {
return list.get(0);
} else {
throw new RuntimeException("Cannot retrieve count() method result");
}
}
}
| Vertabelo/mobiorm-demo-android | app/src/main/java/com/vertabelo/mobileorm/myplaces/orm/gen/AddressViewDAOImpl.java | Java | apache-2.0 | 6,557 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cep.operator;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.cep.Event;
import org.apache.flink.cep.SubEvent;
import org.apache.flink.cep.nfa.NFA;
import org.apache.flink.cep.nfa.compiler.NFACompiler;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.runtime.checkpoint.OperatorSubtaskState;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness;
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
import org.apache.flink.streaming.util.OperatorSnapshotUtil;
import org.apache.flink.streaming.util.migration.MigrationTestUtil;
import org.apache.flink.streaming.util.migration.MigrationVersion;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedQueue;
import static org.apache.flink.cep.operator.CepOperatorTestUtilities.getKeyedCepOpearator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Tests for checking whether CEP operator can restore from snapshots that were done
* using previous Flink versions.
*
* <p>For regenerating the binary snapshot file of previous versions you have to run the
* {@code write*()} method on the corresponding Flink release-* branch.
*/
@RunWith(Parameterized.class)
public class CEPMigrationTest {
/**
* TODO change this to the corresponding savepoint version to be written (e.g. {@link MigrationVersion#v1_3} for 1.3)
* TODO and remove all @Ignore annotations on write*Snapshot() methods to generate savepoints
*/
private final MigrationVersion flinkGenerateSavepointVersion = null;
private final MigrationVersion migrateVersion;
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
return Arrays.asList(MigrationVersion.v1_3, MigrationVersion.v1_4, MigrationVersion.v1_5);
}
public CEPMigrationTest(MigrationVersion migrateVersion) {
this.migrateVersion = migrateVersion;
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeAfterBranchingPatternSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<Event>(startEvent, 1));
harness.processElement(new StreamRecord<Event>(new Event(42, "foobar", 1.0), 2));
harness
.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3));
harness.processElement(new StreamRecord<Event>(middleEvent1, 2));
harness.processElement(new StreamRecord<Event>(middleEvent2, 3));
harness.processWatermark(new Watermark(5));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-after-branching-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testRestoreAfterBranchingPattern() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
final Event endEvent = new Event(42, "end", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-after-branching-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
harness.processElement(new StreamRecord<>(new Event(42, "start", 1.0), 4));
harness.processElement(new StreamRecord<>(endEvent, 5));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and 2 results
assertEquals(3, result.size());
Object resultObject1 = result.poll();
assertTrue(resultObject1 instanceof StreamRecord);
StreamRecord<?> resultRecord1 = (StreamRecord<?>) resultObject1;
assertTrue(resultRecord1.getValue() instanceof Map);
Object resultObject2 = result.poll();
assertTrue(resultObject2 instanceof StreamRecord);
StreamRecord<?> resultRecord2 = (StreamRecord<?>) resultObject2;
assertTrue(resultRecord2.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap1 =
(Map<String, List<Event>>) resultRecord1.getValue();
assertEquals(startEvent, patternMap1.get("start").get(0));
assertEquals(middleEvent1, patternMap1.get("middle").get(0));
assertEquals(endEvent, patternMap1.get("end").get(0));
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap2 =
(Map<String, List<Event>>) resultRecord2.getValue();
assertEquals(startEvent, patternMap2.get("start").get(0));
assertEquals(middleEvent2, patternMap2.get("middle").get(0));
assertEquals(endEvent, patternMap2.get("end").get(0));
// and now go for a checkpoint with the new serializers
final Event startEvent1 = new Event(42, "start", 2.0);
final SubEvent middleEvent3 = new SubEvent(42, "foo", 1.0, 11.0);
final Event endEvent1 = new Event(42, "end", 2.0);
harness.processElement(new StreamRecord<Event>(startEvent1, 21));
harness.processElement(new StreamRecord<Event>(middleEvent3, 23));
// simulate snapshot/restore with some elements in internal sorting queue
OperatorSubtaskState snapshot = harness.snapshot(1L, 1L);
harness.close();
harness = new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
harness.setup();
harness.initializeState(snapshot);
harness.open();
harness.processElement(new StreamRecord<>(endEvent1, 25));
harness.processWatermark(new Watermark(50));
result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject3 = result.poll();
assertTrue(resultObject3 instanceof StreamRecord);
StreamRecord<?> resultRecord3 = (StreamRecord<?>) resultObject3;
assertTrue(resultRecord3.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap3 =
(Map<String, List<Event>>) resultRecord3.getValue();
assertEquals(startEvent1, patternMap3.get("start").get(0));
assertEquals(middleEvent3, patternMap3.get("middle").get(0));
assertEquals(endEvent1, patternMap3.get("end").get(0));
} finally {
harness.close();
}
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeStartingNewPatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<Event>(startEvent1, 1));
harness.processElement(new StreamRecord<Event>(new Event(42, "foobar", 1.0), 2));
harness
.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3));
harness.processElement(new StreamRecord<Event>(middleEvent1, 2));
harness.processWatermark(new Watermark(5));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-starting-new-pattern-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testRestoreStartingNewPatternAfterMigration() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
final Event startEvent2 = new Event(42, "start", 5.0);
final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
final Event endEvent = new Event(42, "end", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-starting-new-pattern-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
harness.processElement(new StreamRecord<>(startEvent2, 5));
harness.processElement(new StreamRecord<Event>(middleEvent2, 6));
harness.processElement(new StreamRecord<>(endEvent, 7));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and 3 results
assertEquals(4, result.size());
Object resultObject1 = result.poll();
assertTrue(resultObject1 instanceof StreamRecord);
StreamRecord<?> resultRecord1 = (StreamRecord<?>) resultObject1;
assertTrue(resultRecord1.getValue() instanceof Map);
Object resultObject2 = result.poll();
assertTrue(resultObject2 instanceof StreamRecord);
StreamRecord<?> resultRecord2 = (StreamRecord<?>) resultObject2;
assertTrue(resultRecord2.getValue() instanceof Map);
Object resultObject3 = result.poll();
assertTrue(resultObject3 instanceof StreamRecord);
StreamRecord<?> resultRecord3 = (StreamRecord<?>) resultObject3;
assertTrue(resultRecord3.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap1 =
(Map<String, List<Event>>) resultRecord1.getValue();
assertEquals(startEvent1, patternMap1.get("start").get(0));
assertEquals(middleEvent1, patternMap1.get("middle").get(0));
assertEquals(endEvent, patternMap1.get("end").get(0));
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap2 =
(Map<String, List<Event>>) resultRecord2.getValue();
assertEquals(startEvent1, patternMap2.get("start").get(0));
assertEquals(middleEvent2, patternMap2.get("middle").get(0));
assertEquals(endEvent, patternMap2.get("end").get(0));
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap3 =
(Map<String, List<Event>>) resultRecord3.getValue();
assertEquals(startEvent2, patternMap3.get("start").get(0));
assertEquals(middleEvent2, patternMap3.get("middle").get(0));
assertEquals(endEvent, patternMap3.get("end").get(0));
// and now go for a checkpoint with the new serializers
final Event startEvent3 = new Event(42, "start", 2.0);
final SubEvent middleEvent3 = new SubEvent(42, "foo", 1.0, 11.0);
final Event endEvent1 = new Event(42, "end", 2.0);
harness.processElement(new StreamRecord<Event>(startEvent3, 21));
harness.processElement(new StreamRecord<Event>(middleEvent3, 23));
// simulate snapshot/restore with some elements in internal sorting queue
OperatorSubtaskState snapshot = harness.snapshot(1L, 1L);
harness.close();
harness = new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
harness.setup();
harness.initializeState(snapshot);
harness.open();
harness.processElement(new StreamRecord<>(endEvent1, 25));
harness.processWatermark(new Watermark(50));
result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject4 = result.poll();
assertTrue(resultObject4 instanceof StreamRecord);
StreamRecord<?> resultRecord4 = (StreamRecord<?>) resultObject4;
assertTrue(resultRecord4.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap4 =
(Map<String, List<Event>>) resultRecord4.getValue();
assertEquals(startEvent3, patternMap4.get("start").get(0));
assertEquals(middleEvent3, patternMap4.get("middle").get(0));
assertEquals(endEvent1, patternMap4.get("end").get(0));
} finally {
harness.close();
}
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeSinglePatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new SinglePatternNFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processWatermark(new Watermark(5));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-single-pattern-afterwards-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testSinglePatternAfterMigration() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new SinglePatternNFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-single-pattern-afterwards-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
harness.processElement(new StreamRecord<>(startEvent1, 5));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject = result.poll();
assertTrue(resultObject instanceof StreamRecord);
StreamRecord<?> resultRecord = (StreamRecord<?>) resultObject;
assertTrue(resultRecord.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap =
(Map<String, List<Event>>) resultRecord.getValue();
assertEquals(startEvent1, patternMap.get("start").get(0));
} finally {
harness.close();
}
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeAndOrSubtypConditionsPatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAComplexConditionsFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<>(startEvent1, 5));
harness.processWatermark(new Watermark(6));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-conditions-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testAndOrSubtypeConditionsAfterMigration() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAComplexConditionsFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-conditions-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
final Event endEvent = new SubEvent(42, "end", 1.0, 2.0);
harness.processElement(new StreamRecord<>(endEvent, 9));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject = result.poll();
assertTrue(resultObject instanceof StreamRecord);
StreamRecord<?> resultRecord = (StreamRecord<?>) resultObject;
assertTrue(resultRecord.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap =
(Map<String, List<Event>>) resultRecord.getValue();
assertEquals(startEvent1, patternMap.get("start").get(0));
assertEquals(endEvent, patternMap.get("start").get(1));
} finally {
harness.close();
}
}
private static class SinglePatternNFAFactory implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private SinglePatternNFAFactory() {
this(false);
}
private SinglePatternNFAFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new StartFilter())
.within(Time.milliseconds(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static class NFAComplexConditionsFactory implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private NFAComplexConditionsFactory() {
this(false);
}
private NFAComplexConditionsFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern = Pattern.<Event>begin("start")
.subtype(SubEvent.class)
.where(new MiddleFilter())
.or(new SubEventEndFilter())
.times(2)
.within(Time.milliseconds(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static class NFAFactory implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private NFAFactory() {
this(false);
}
private NFAFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new StartFilter())
.followedByAny("middle")
.subtype(SubEvent.class)
.where(new MiddleFilter())
.followedByAny("end")
.where(new EndFilter())
// add a window timeout to test whether timestamps of elements in the
// priority queue in CEP operator are correctly checkpointed/restored
.within(Time.milliseconds(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static class StartFilter extends SimpleCondition<Event> {
private static final long serialVersionUID = 5726188262756267490L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}
private static class MiddleFilter extends SimpleCondition<SubEvent> {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getVolume() > 5.0;
}
}
private static class EndFilter extends SimpleCondition<Event> {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
}
private static class SubEventEndFilter extends SimpleCondition<SubEvent> {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getName().equals("end");
}
}
}
| zhangminglei/flink | flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/operator/CEPMigrationTest.java | Java | apache-2.0 | 24,021 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.generation;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.intention.AddAnnotationPsiFix;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.intellij.codeInsight.AnnotationUtil.CHECK_EXTERNAL;
import static com.intellij.codeInsight.AnnotationUtil.CHECK_TYPE;
/**
* @author anna
*/
public interface OverrideImplementsAnnotationsHandler {
ExtensionPointName<OverrideImplementsAnnotationsHandler> EP_NAME = ExtensionPointName.create("com.intellij.overrideImplementsAnnotationsHandler");
/**
* Returns annotations which should be copied from a source to an implementation (by default, no annotations are copied).
*/
default String[] getAnnotations(@NotNull PsiFile file) {
//noinspection deprecation
return getAnnotations(file.getProject());
}
/**
* @deprecated Use {@link #getAnnotations(PsiFile)}
*/
@Deprecated
String[] getAnnotations(Project project);
@Deprecated
@NotNull
default String[] annotationsToRemove(Project project, @NotNull String fqName) {
return ArrayUtil.EMPTY_STRING_ARRAY;
}
/** Perform post processing on the annotations, such as deleting or renaming or otherwise updating annotations in the override */
default void cleanup(PsiModifierListOwner source, @Nullable PsiElement targetClass, PsiModifierListOwner target) {
}
static void repeatAnnotationsFromSource(PsiModifierListOwner source, @Nullable PsiElement targetClass, PsiModifierListOwner target) {
Module module = ModuleUtilCore.findModuleForPsiElement(targetClass != null ? targetClass : target);
GlobalSearchScope moduleScope = module != null ? GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(module) : null;
Project project = target.getProject();
JavaPsiFacade facade = JavaPsiFacade.getInstance(project);
for (OverrideImplementsAnnotationsHandler each : EP_NAME.getExtensionList()) {
for (String annotation : each.getAnnotations(target.getContainingFile())) {
if (moduleScope != null && facade.findClass(annotation, moduleScope) == null) continue;
int flags = CHECK_EXTERNAL | CHECK_TYPE;
if (AnnotationUtil.isAnnotated(source, annotation, flags) && !AnnotationUtil.isAnnotated(target, annotation, flags)) {
each.transferToTarget(annotation, source, target);
}
}
}
for (OverrideImplementsAnnotationsHandler each : EP_NAME.getExtensionList()) {
each.cleanup(source, targetClass, target);
}
}
default void transferToTarget(String annotation, PsiModifierListOwner source, PsiModifierListOwner target) {
PsiModifierList modifierList = target.getModifierList();
assert modifierList != null : target;
PsiAnnotation srcAnnotation = AnnotationUtil.findAnnotation(source, annotation);
PsiNameValuePair[] valuePairs = srcAnnotation != null ? srcAnnotation.getParameterList().getAttributes() : PsiNameValuePair.EMPTY_ARRAY;
AddAnnotationPsiFix.addPhysicalAnnotation(annotation, valuePairs, modifierList);
}
} | paplorinc/intellij-community | java/java-impl/src/com/intellij/codeInsight/generation/OverrideImplementsAnnotationsHandler.java | Java | apache-2.0 | 3,542 |
package com.chisw.work.addressbook.test;
import com.chisw.work.addressbook.Data.GroupData;
import com.chisw.work.addressbook.Data.Groups;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
public class TestGroupModification extends TestBase {
@BeforeMethod
public void checkPreconditions() {
if (app.db().groups().size() == 0) {
app.goTo().groupPage();
app.groups().createGroupInBeforeMethod();
}
}
@Test
public void checkGroupModification() {
Groups before = app.db().groups();
GroupData modifiedGroup = before.iterator().next();
GroupData group = new GroupData()
.withId(modifiedGroup.getId()).withGroupName("test 258").withGroupLogo("Logo 123").withGroupComment("Comment 12345");
app.goTo().groupPage();
app.groups().modifyGroup(group);
assertThat(app.groups().count(),equalTo(before.size()));
Groups after = app.db().groups();
assertThat(after, equalTo(before.withoutAdded(modifiedGroup).withAdded(group)));
verifyGroupsListInUi();
}
}
| Tarrest/java_home | addressbook/src/test/java/com/chisw/work/addressbook/test/TestGroupModification.java | Java | apache-2.0 | 1,230 |
package io.omengye.common.utils.constants;
public class Constants {
private Constants(){}
public static final String RESULT_FLAG = "flag";
}
| omengye/ws | common/src/main/java/io/omengye/common/utils/constants/Constants.java | Java | apache-2.0 | 153 |
/*
* Created on Mar 29, 2009
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright @2013 the original author or authors.
*/
package org.fest.assertions.api;
import static org.fest.test.ExpectedException.none;
import org.fest.test.ExpectedException;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
/**
* Tests for {@link LongAssert#isNull()}.
*
* @author Yvonne Wang
*/
public class LongAssert_isNull_Test {
@Rule
public ExpectedException thrown = none();
private LongAssert assertions;
private Long actual;
@Before
public void setUp() {
actual = null;
assertions = new LongAssert(actual);
}
@Test
public void should_pass_if_actual_is_null() {
assertions.isNull();
}
@Test
public void should_fail_if_actual_is_not_null() {
thrown.expect(AssertionError.class);
actual = new Long(6l);
assertions = new LongAssert(actual);
assertions.isNull();
}
}
| alexruiz/fest-assert-2.x | src/test/java/org/fest/assertions/api/LongAssert_isNull_Test.java | Java | apache-2.0 | 1,440 |
/*******************************************************************************
* Copyright 2006 - 2012 Vienna University of Technology,
* Department of Software Technology and Interactive Systems, IFS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This work originates from the Planets project, co-funded by the European Union under the Sixth Framework Programme.
******************************************************************************/
package eu.scape_project.planning.model.transform;
import java.io.Serializable;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.DiscriminatorColumn;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Inheritance;
import javax.persistence.ManyToOne;
import eu.scape_project.planning.model.ChangeLog;
import eu.scape_project.planning.model.IChangesHandler;
import eu.scape_project.planning.model.ITouchable;
import eu.scape_project.planning.model.Values;
import eu.scape_project.planning.model.values.INumericValue;
import eu.scape_project.planning.model.values.IOrdinalValue;
import eu.scape_project.planning.model.values.TargetValues;
import eu.scape_project.planning.model.values.Value;
import eu.scape_project.planning.validation.ValidationError;
/**
* Implements basic transformation functionality, i.e. aggregation over {@link Values} and
* common properties of transformers.
* @author Hannes Kulovits
*/
@Entity
@Inheritance
@DiscriminatorColumn(name = "type")
public abstract class Transformer implements ITransformer, Serializable, ITouchable
{
private static final long serialVersionUID = -3708795251848706848L;
@Id
@GeneratedValue
protected int id;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
@ManyToOne(cascade=CascadeType.ALL)
private ChangeLog changeLog = new ChangeLog();
/**
* Transforms all the values in the list of the provided {@link Values}.
* According to the type of each {@link Value}, either
* {@link ITransformer#transform(INumericValue)} or {@link ITransformer#transform(IOrdinalValue)}
* is called.
* @param values List of values to be transformed
* @return {@link TargetValues}, which contains a list of all transformed values corresponding to the provided input
*/
public TargetValues transformValues(Values values) {
TargetValues result = new TargetValues();
for (Value v : values.getList()) {
if (v instanceof INumericValue) {
result.add(transform((INumericValue) v));
} else {
result.add(transform((IOrdinalValue) v));
}
}
return result;
}
public ChangeLog getChangeLog() {
return this.changeLog;
}
public void setChangeLog(ChangeLog value) {
changeLog = value;
}
public boolean isChanged() {
return changeLog.isAltered();
}
public void touch(String username) {
getChangeLog().touch(username);
}
public void touch() {
getChangeLog().touch();
}
/**
* @see ITouchable#handleChanges(IChangesHandler)
*/
public void handleChanges(IChangesHandler h){
h.visit(this);
}
/**
* If this Transformer is not correctly configured, this method adds
* an appropriate error-message to the given list and returns false.
*
* @return true if this transformer is correctly configured
*/
public abstract boolean isTransformable(List<ValidationError> errors);
public abstract Transformer clone();
}
| openpreserve/plato | plato-model/src/main/java/eu/scape_project/planning/model/transform/Transformer.java | Java | apache-2.0 | 4,334 |
/*
* Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dtolabs.rundeck.core.execution.workflow;
/*
* StepFirstWorkflowStrategyTests.java
*
* User: Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
* Created: 3/25/11 9:30 AM
*
*/
import java.io.File;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.dtolabs.rundeck.core.common.*;
import com.dtolabs.rundeck.core.execution.*;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.tools.ant.BuildListener;
import org.junit.Assert;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import com.dtolabs.rundeck.core.execution.dispatch.Dispatchable;
import com.dtolabs.rundeck.core.execution.dispatch.DispatcherResult;
import com.dtolabs.rundeck.core.execution.service.NodeExecutorResult;
import com.dtolabs.rundeck.core.execution.workflow.steps.FailureReason;
import com.dtolabs.rundeck.core.execution.workflow.steps.NodeDispatchStepExecutor;
import com.dtolabs.rundeck.core.execution.workflow.steps.StepExecutionResult;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepException;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutionItem;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutionService;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepExecutor;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepResult;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.NodeStepResultImpl;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ExecCommandBase;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ExecCommandExecutionItem;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ScriptFileCommandBase;
import com.dtolabs.rundeck.core.execution.workflow.steps.node.impl.ScriptFileCommandExecutionItem;
import com.dtolabs.rundeck.core.tools.AbstractBaseTest;
import com.dtolabs.rundeck.core.utils.FileUtils;
import com.dtolabs.rundeck.core.utils.NodeSet;
public class TestStepFirstWorkflowStrategy extends AbstractBaseTest {
Framework testFramework;
String testnode;
private static final String TEST_PROJECT = "StepFirstWorkflowStrategyTests";
public TestStepFirstWorkflowStrategy(String name) {
super(name);
}
public static Test suite() {
return new TestSuite(TestStepFirstWorkflowStrategy.class);
}
protected void setUp() {
super.setUp();
testFramework = getFrameworkInstance();
testnode=testFramework.getFrameworkNodeName();
final IRundeckProject frameworkProject = testFramework.getFrameworkProjectMgr().createFrameworkProject(
TEST_PROJECT,
generateProjectResourcesFile(
new File("src/test/resources/com/dtolabs/rundeck/core/common/test-nodes1.xml")
)
);
}
protected void tearDown() throws Exception {
super.tearDown();
File projectdir = new File(getFrameworkProjectsBase(), TEST_PROJECT);
FileUtils.deleteDir(projectdir);
}
public static void main(String args[]) {
junit.textui.TestRunner.run(suite());
}
static class testWorkflowCmdItem extends BaseExecutionItem implements NodeStepExecutionItem {
private String type;
int flag=-1;
@Override
public String toString() {
return "testWorkflowCmdItem{" +
"type='" + type + '\'' +
", flag=" + flag +
'}';
}
@Override
public String getNodeStepType() {
return type;
}
public String getType() {
return "NodeDispatch";
}
}
/*static class testWorkflowJobCmdItem extends testWorkflowCmdItem implements IWorkflowJobItem {
private String jobIdentifier;
public String getJobIdentifier() {
return jobIdentifier;
}
}*/
static class testListener implements ExecutionListenerOverride {
public boolean isTerse() {
return false;
}
public String getLogFormat() {
return null;
}
public void log(int i, String s) {
}
@Override
public void event(String eventType, String message, Map eventMeta) {
}
public FailedNodesListener getFailedNodesListener() {
return null;
}
public void beginStepExecution(ExecutionContext context, StepExecutionItem item) {
}
public void finishStepExecution(StatusResult result, ExecutionContext context, StepExecutionItem item) {
}
public void beginNodeExecution(ExecutionContext context, String[] command, INodeEntry node) {
}
public void finishNodeExecution(NodeExecutorResult result, ExecutionContext context, String[] command,
INodeEntry node) {
}
public void beginNodeDispatch(ExecutionContext context, StepExecutionItem item) {
}
public void beginNodeDispatch(ExecutionContext context, Dispatchable item) {
}
public void finishNodeDispatch(DispatcherResult result, ExecutionContext context, StepExecutionItem item) {
}
public void finishNodeDispatch(DispatcherResult result, ExecutionContext context, Dispatchable item) {
}
public void beginFileCopyFileStream(ExecutionContext context, InputStream input, INodeEntry node) {
}
public void beginFileCopyFile(ExecutionContext context, File input, INodeEntry node) {
}
public void beginFileCopyScriptContent(ExecutionContext context, String input, INodeEntry node) {
}
public void finishFileCopy(String result, ExecutionContext context, INodeEntry node) {
}
public void beginExecuteNodeStep(ExecutionContext context, NodeStepExecutionItem item, INodeEntry node) {
}
public void finishExecuteNodeStep(NodeStepResult result, ExecutionContext context, StepExecutionItem item,
INodeEntry node) {
}
public BuildListener getBuildListener() {
return null;
}
public ExecutionListenerOverride createOverride() {
return this;
}
public void setTerse(boolean terse) {
}
public void setLogFormat(String format) {
}
public void setFailedNodesListener(FailedNodesListener listener) {
}
}
static class testInterpreter implements NodeStepExecutor {
List<StepExecutionItem> executionItemList = new ArrayList<StepExecutionItem>();
List<ExecutionContext> executionContextList = new ArrayList<ExecutionContext>();
List<INodeEntry> nodeEntryList = new ArrayList<INodeEntry>();
int index = 0;
List<NodeStepResult> resultList = new ArrayList<NodeStepResult>();
boolean shouldThrowException = false;
public NodeStepResult executeNodeStep(StepExecutionContext executionContext,
NodeStepExecutionItem executionItem, INodeEntry iNodeEntry) throws
NodeStepException {
executionItemList.add(executionItem);
executionContextList.add(executionContext);
nodeEntryList.add(iNodeEntry);
if (shouldThrowException) {
throw new NodeStepException("testInterpreter test exception",null,iNodeEntry.getNodename());
}
// System.out.println("return index: (" + index + ") in size: " + resultList.size());
return resultList.get(index++);
}
}
static enum Reason implements FailureReason{
Test
}
static class testResult extends NodeStepResultImpl {
boolean success;
int flag;
INodeEntry node;
testResult(boolean success, int flag) {
super(null,success?null: TestStepFirstWorkflowStrategy.Reason.Test,success?null:"test failure",null);
this.success = success;
this.flag = flag;
}
@Override
public Exception getException() {
return null;
}
public boolean isSuccess() {
return success;
}
@Override
public String toString() {
return "testResult{" +
"success=" + success +
", flag=" + flag +
'}';
}
public INodeEntry getNode() {
return node;
}
}
public void testExecuteWorkflow() throws Exception {
final IRundeckProject frameworkProject = testFramework.getFrameworkProjectMgr().getFrameworkProject(
TEST_PROJECT);
final INodeSet nodes = frameworkProject.getNodeSet();
assertNotNull(nodes);
assertEquals(2, nodes.getNodes().size());
}
public void testExecuteWorkflow_empty() throws Exception {
//test empty workflow
final NodeSet nodeset = new NodeSet();
final WorkflowImpl workflow = new WorkflowImpl(new ArrayList<StepExecutionItem>(), 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet())
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, interpreterMock);
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(0, interpreterMock.executionItemList.size());
}
public void testExecuteWorkflow_undefined_item() throws Exception {
//test undefined workflow item
final NodeSet nodeset = new NodeSet();
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
commands.add(new testWorkflowCmdItem());
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset.nodeSelectorWithDefaultAll())
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(nodeset.nodeSelectorWithDefaultAll(),
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, interpreterMock);
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.out);
}
assertFalse(result.isSuccess());
assertEquals(0, interpreterMock.executionItemList.size());
assertNotNull("threw exception: " + result.getException(), result.getException());
assertTrue("threw exception: " + result.getException(),
result.getException() instanceof NullPointerException);
assertEquals("threw exception: " + result.getException(),
"provider name was null for Service: WorkflowNodeStep",
result.getException().getMessage());
}
public void testExecuteWorkflow_scriptExec() throws Exception {
//test script exec item
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ScriptFileCommandBase(){
@Override
public String getScript() {
return "a command";
}
};
commands.add(testWorkflowCmdItem);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("exec", failMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, failMock);
//set resturn result
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(1, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) executionItem1;
assertEquals("a command", scriptItem.getScript());
assertNull(scriptItem.getScriptAsStream());
assertNull(scriptItem.getServerScriptFilePath());
assertEquals(1, interpreterMock.executionContextList.size());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals("expected " + nodeset + ", but was " + executionContext.getNodeSelector(), nodeset,
executionContext.getNodeSelector());
}
public void testExecuteWorkflow_commandexec() throws Exception {
//test command exec item
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "command"};
}
};
commands.add(testWorkflowCmdItem);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, failMock);
//set resturn result
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(1, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(2, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("command", execItem.getCommand()[1]);
assertEquals(1, interpreterMock.executionContextList.size());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
public void testExecuteWorkflowThreeItems() throws Exception{
{
//test workflow of three successful items
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2","command"};
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "a command";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "1"};
}
};
commands.add(testWorkflowCmdItemScript);
final StepExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() {
@Override
public String getServerScriptFilePath() {
return "/some/file/path";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "2"};
}
};
commands.add(testWorkflowCmdItemScript2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results
interpreterMock.resultList.add(new testResult(true, 0));
interpreterMock.resultList.add(new testResult(true, 1));
interpreterMock.resultList.add(new testResult(true, 2));
final WorkflowExecutionResult result = strategy.executeWorkflow(context,executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
assertEquals(3, test1.size());
for (final int i : new int[]{0, 1, 2}) {
final StepExecutionResult interpreterResult = test1.get(i);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertTrue(val.isSuccess());
assertEquals(i, val.flag);
}
assertEquals(3, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final StepExecutionItem item2 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + item2.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2;
assertEquals("a command", scriptItem.getScript());
assertNull(scriptItem.getScriptAsStream());
assertNull(scriptItem.getServerScriptFilePath());
final StepExecutionItem item3 = interpreterMock.executionItemList.get(2);
assertTrue("wrong class: " + item3.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem2 = (ScriptFileCommandExecutionItem) item3;
assertNull(scriptItem2.getScript());
assertNull(scriptItem2.getScriptAsStream());
assertEquals("/some/file/path", scriptItem2.getServerScriptFilePath());
assertNotNull(scriptItem2.getArgs());
assertEquals(2, scriptItem2.getArgs().length);
assertEquals("-testargs", scriptItem2.getArgs()[0]);
assertEquals("2", scriptItem2.getArgs()[1]);
assertEquals(3, interpreterMock.executionContextList.size());
for (final int i : new int[]{0, 1, 2}) {
final ExecutionContext executionContext = interpreterMock.executionContextList.get(i);
assertEquals("item "+i,TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull("item " + i, executionContext.getDataContext());
assertNotNull("item " + i, executionContext.getDataContext().get("node"));
assertEquals("item " + i,0, executionContext.getLoglevel());
assertEquals("item " + i,"user1", executionContext.getUser());
assertEquals("item " + i,nodeset, executionContext.getNodeSelector());
}
}
}
public void testWorkflowFailNoKeepgoing() throws Exception{
{
//test a workflow with a failing item (1), with keepgoing=false
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "a command";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "1"};
}
};
commands.add(testWorkflowCmdItemScript);
final StepExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() {
@Override
public String getServerScriptFilePath() {
return "/some/file/path";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "2"};
}
};
commands.add(testWorkflowCmdItemScript2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(false);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results, fail on second item
interpreterMock.resultList.add(new testResult(true, 0));
interpreterMock.resultList.add(new testResult(false, 1));
interpreterMock.resultList.add(new testResult(true, 2));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (null != result.getException()) {
result.getException().printStackTrace(System.out);
}
assertFalse(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
StepExecutionResult result1 = result.getResultSet().get(1);
final DispatcherResult executionResult = NodeDispatchStepExecutor.extractDispatcherResult(result1);
assertNotNull(executionResult.getResults());
assertEquals(1, executionResult.getResults().size());
assertNotNull(executionResult.getResults().get(testnode));
final StatusResult testnode1 = executionResult.getResults().get(testnode);
assertNotNull(testnode1);
assertTrue(testnode1 instanceof testResult);
testResult failResult = (testResult) testnode1;
assertEquals(1, failResult.flag);
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
assertEquals(2, test1.size());
for (final int i : new int[]{0, 1}) {
final StepExecutionResult interpreterResult = test1.get(i);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(i, val.flag);
if(0==i){
assertTrue(val.isSuccess());
}else{
assertFalse(val.isSuccess());
}
}
assertEquals(2, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final StepExecutionItem item2 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + item2.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2;
assertEquals("a command", scriptItem.getScript());
assertNull(scriptItem.getScriptAsStream());
assertNull(scriptItem.getServerScriptFilePath());
assertNotNull(scriptItem.getArgs());
assertEquals(2, scriptItem.getArgs().length);
assertEquals("-testargs", scriptItem.getArgs()[0]);
assertEquals("1",scriptItem.getArgs()[1]);
assertEquals(2, interpreterMock.executionContextList.size());
for (final int i : new int[]{0, 1}) {
final ExecutionContext executionContext = interpreterMock.executionContextList.get(i);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
}
public void testWorkflowFailYesKeepgoing() throws Exception{
{
//test a workflow with a failing item (1), with keepgoing=true
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "a command";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "1"};
}
};
commands.add(testWorkflowCmdItemScript);
final StepExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() {
@Override
public String getServerScriptFilePath() {
return "/some/file/path";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "2"};
}
};
commands.add(testWorkflowCmdItemScript2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(true);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", interpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results, fail on second item
interpreterMock.resultList.add(new testResult(true, 0));
interpreterMock.resultList.add(new testResult(false, 1));
interpreterMock.resultList.add(new testResult(true, 2));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertFalse(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
assertEquals(3, test1.size());
for (final int i : new int[]{0, 1, 2}) {
final StepExecutionResult interpreterResult = test1.get(i);
assertTrue(NodeDispatchStepExecutor.isWrappedDispatcherResult(interpreterResult));
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(i, val.flag);
if (1 == i) {
assertFalse(val.isSuccess());
} else {
assertTrue(val.isSuccess());
}
}
assertEquals(3, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final StepExecutionItem item2 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + item2.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2;
assertEquals("a command", scriptItem.getScript());
assertNull(scriptItem.getScriptAsStream());
assertNull(scriptItem.getServerScriptFilePath());
assertNotNull(scriptItem.getArgs());
assertEquals(2, scriptItem.getArgs().length);
assertEquals("-testargs", scriptItem.getArgs()[0]);
assertEquals("1",scriptItem.getArgs()[1]);
final StepExecutionItem item3 = interpreterMock.executionItemList.get(2);
assertTrue("wrong class: " + item2.getClass().getName(),
item2 instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem scriptItem3 = (ScriptFileCommandExecutionItem) item3;
assertEquals("/some/file/path", scriptItem3.getServerScriptFilePath());
assertNull(scriptItem3.getScript());
assertNull(scriptItem3.getScriptAsStream());
assertNotNull(scriptItem3.getArgs());
assertEquals(2, scriptItem3.getArgs().length);
assertEquals("-testargs", scriptItem3.getArgs()[0]);
assertEquals("2", scriptItem3.getArgs()[1]);
assertEquals(3, interpreterMock.executionContextList.size());
for (final int i : new int[]{0, 1}) {
final ExecutionContext executionContext = interpreterMock.executionContextList.get(i);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
}
public void testFailureHandlerItemNoKeepgoing() throws Exception{
{
//test a workflow with a failing item (1), with keepgoing=false, and a failureHandler
final boolean KEEPGOING_TEST = false;
final boolean STEP_0_RESULT = false;
final boolean STEP_1_RESULT = true;
final boolean HANDLER_RESULT = true;
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testHandlerItem = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "failure handler script";
}
@Override
public String[] getArgs() {
return new String[]{"failure","script","args"};
}
};
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "a command";
}
@Override
public String[] getArgs() {
return new String[]{"-testargs", "1"};
}
};
commands.add(testWorkflowCmdItemScript);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(KEEPGOING_TEST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter handlerInterpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", handlerInterpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results, fail on second item
interpreterMock.resultList.add(new testResult(STEP_0_RESULT, 0));
interpreterMock.resultList.add(new testResult(STEP_1_RESULT, 1));
handlerInterpreterMock.resultList.add(new testResult(HANDLER_RESULT, 0));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertFalse(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
StepExecutionResult result1 = result.getResultSet().get(0);
final DispatcherResult executionResult
= NodeDispatchStepExecutor.extractDispatcherResult(result1);
assertNotNull(executionResult.getResults());
assertEquals(1, executionResult.getResults().size());
assertNotNull(executionResult.getResults().get(testnode));
final StatusResult testnode1 = executionResult.getResults().get(testnode);
assertNotNull(testnode1);
assertTrue(testnode1 instanceof testResult);
testResult failResult = (testResult) testnode1;
assertEquals(0, failResult.flag);
assertEquals(1, result.getResultSet().size());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
assertEquals(1, test1.size());
final int i =0;
final StepExecutionResult interpreterResult = test1.get(i);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(i, val.flag);
assertFalse(val.isSuccess());
assertEquals(1, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
assertEquals(1, interpreterMock.executionContextList.size());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(i);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
//check handler item was executed
assertEquals(1, handlerInterpreterMock.executionItemList.size());
final StepExecutionItem executionItemX = handlerInterpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItemX.getClass().getName(),
executionItemX instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem execItemX = (ScriptFileCommandExecutionItem) executionItemX;
assertNotNull(execItemX.getScript());
assertNotNull(execItemX.getArgs());
assertEquals("failure handler script", execItemX.getScript());
assertEquals(3, execItemX.getArgs().length);
assertEquals("failure", execItemX.getArgs()[0]);
assertEquals("script", execItemX.getArgs()[1]);
assertEquals("args", execItemX.getArgs()[2]);
assertEquals(1, handlerInterpreterMock.executionContextList.size());
final ExecutionContext executionContextX = handlerInterpreterMock.executionContextList.get(i);
assertEquals(TEST_PROJECT, executionContextX.getFrameworkProject());
assertNotNull(executionContextX.getDataContext());
assertNotNull(executionContextX.getDataContext().get("node"));
assertEquals(0, executionContextX.getLoglevel());
assertEquals("user1", executionContextX.getUser());
assertEquals(nodeset, executionContextX.getNodeSelector());
}
}
public void testFailureHandlerItemYesKeepgoing() throws Exception{
{
//test a workflow with a failing item (1), with keepgoing=true, and a failureHandler that fails
final boolean KEEPGOING_TEST = true;
final boolean STEP_0_RESULT = false;
final boolean STEP_1_RESULT = true;
final boolean HANDLER_RESULT = false;
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testHandlerItem = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "failure handler script";
}
@Override
public String[] getArgs() {
return new String[]{"failure","script","args"};
}
@Override
public String toString() {
return "testHandlerItem";
}
};
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
@Override
public String toString() {
return "testWorkflowCmdItem";
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItem2 = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "3", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
@Override
public String toString() {
return "testWorkflowCmdItem2";
}
};
commands.add(testWorkflowCmdItem2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(KEEPGOING_TEST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter handlerInterpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", handlerInterpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results
interpreterMock.resultList.add(new testResult(STEP_0_RESULT, 0));
interpreterMock.resultList.add(new testResult(STEP_1_RESULT, 1));
handlerInterpreterMock.resultList.add(new testResult(HANDLER_RESULT, 0));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertFalse(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
System.out.println("results: "+test1);
assertEquals(2, interpreterMock.executionItemList.size());
assertEquals(2, interpreterMock.executionContextList.size());
//check handler item was executed
assertEquals(1, handlerInterpreterMock.executionItemList.size());
assertEquals(1, handlerInterpreterMock.executionContextList.size());
assertEquals(2, test1.size());
int resultIndex =0;
int stepNum=0;
{
//first step result
final StepExecutionResult interpreterResult = test1.get(resultIndex);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(0, val.flag);
assertFalse(val.isSuccess());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(stepNum);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("2", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(stepNum);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
resultIndex=1;
//
// {
// //failure handler result
// final StepExecutionResult interpreterResult = test1.get(resultIndex);
// final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
// assertEquals(1, dr.getResults().size());
// final NodeStepResult nrs = dr.getResults().values().iterator().next();
// assertTrue("unexpected class: " + nrs.getClass(),
// nrs instanceof testResult);
// testResult val = (testResult) nrs;
// assertEquals(0, val.flag);
// assertFalse(val.isSuccess());
//
// final StepExecutionItem executionItemX = handlerInterpreterMock.executionItemList.get(stepNum);
// assertTrue("wrong class: " + executionItemX.getClass().getName(),
// executionItemX instanceof ScriptFileCommandExecutionItem);
// ScriptFileCommandExecutionItem execItemX = (ScriptFileCommandExecutionItem) executionItemX;
// assertNotNull(execItemX.getScript());
// assertNotNull(execItemX.getArgs());
// assertEquals("failure handler script", execItemX.getScript());
// assertEquals(3, execItemX.getArgs().length);
// assertEquals("failure", execItemX.getArgs()[0]);
// assertEquals("script", execItemX.getArgs()[1]);
// assertEquals("args", execItemX.getArgs()[2]);
//
//
// final ExecutionContext executionContextX = handlerInterpreterMock.executionContextList.get(stepNum);
// assertEquals(TEST_PROJECT, executionContextX.getFrameworkProject());
// assertNull(executionContextX.getDataContext());
// assertEquals(0, executionContextX.getLoglevel());
// assertEquals("user1", executionContextX.getUser());
// assertEquals(nodeset, executionContextX.getNodeSelector());
// assertNull(executionContextX.getArgs());
// }
// resultIndex=2;
stepNum = 1;
{
//second step result
final StepExecutionResult interpreterResult = test1.get(resultIndex);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(1, val.flag);
assertTrue(val.isSuccess());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(stepNum);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("3", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(stepNum);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
}
public void testFailureHandlerItemYesKeepgoingHandlerSuccess() throws Exception {
{
//test a workflow with a failing item (1), with keepgoing=true, and a failureHandler that succeeds
final boolean KEEPGOING_TEST = true;
final boolean STEP_0_RESULT = false;
final boolean STEP_1_RESULT = true;
final boolean HANDLER_RESULT = true;
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final StepExecutionItem testHandlerItem = new ScriptFileCommandBase() {
@Override
public String getScript() {
return "failure handler script";
}
@Override
public String[] getArgs() {
return new String[]{"failure","script","args"};
}
@Override
public String toString() {
return "testHandlerItem";
}
};
final StepExecutionItem testWorkflowCmdItem = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "2", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
@Override
public String toString() {
return "testWorkflowCmdItem";
}
};
commands.add(testWorkflowCmdItem);
final StepExecutionItem testWorkflowCmdItem2 = new ExecCommandBase() {
@Override
public String[] getCommand() {
return new String[]{"a", "3", "command"};
}
@Override
public StepExecutionItem getFailureHandler() {
return testHandlerItem;
}
@Override
public String toString() {
return "testWorkflowCmdItem2";
}
};
commands.add(testWorkflowCmdItem2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
workflow.setKeepgoing(KEEPGOING_TEST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter handlerInterpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
// interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, interpreterMock);
interpreterService.registerInstance("exec", interpreterMock);
interpreterService.registerInstance("script", handlerInterpreterMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn results
interpreterMock.resultList.add(new testResult(STEP_0_RESULT, 0));
interpreterMock.resultList.add(new testResult(STEP_1_RESULT, 1));
handlerInterpreterMock.resultList.add(new testResult(HANDLER_RESULT, 0));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertTrue(result.isSuccess());
assertNull("threw exception: " + result.getException(), result.getException());
assertNotNull(result.getResultSet());
final List<StepExecutionResult> test1 = result.getResultSet();
System.err.println("results: "+test1);
assertEquals(2, test1.size());
assertEquals(2, interpreterMock.executionItemList.size());
assertEquals(2, interpreterMock.executionContextList.size());
//check handler item was executed
assertEquals(1, handlerInterpreterMock.executionItemList.size());
assertEquals(1, handlerInterpreterMock.executionContextList.size());
int resultIndex =0;
int stepNum=0;
{
//failure handler result
final StepExecutionResult interpreterResult = test1.get(resultIndex);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(0, val.flag);
assertTrue(val.isSuccess());
final StepExecutionItem executionItemX = handlerInterpreterMock.executionItemList.get(stepNum);
assertTrue("wrong class: " + executionItemX.getClass().getName(),
executionItemX instanceof ScriptFileCommandExecutionItem);
ScriptFileCommandExecutionItem execItemX = (ScriptFileCommandExecutionItem) executionItemX;
assertNotNull(execItemX.getScript());
assertNotNull(execItemX.getArgs());
assertEquals("failure handler script", execItemX.getScript());
assertEquals(3, execItemX.getArgs().length);
assertEquals("failure", execItemX.getArgs()[0]);
assertEquals("script", execItemX.getArgs()[1]);
assertEquals("args", execItemX.getArgs()[2]);
final ExecutionContext executionContextX = handlerInterpreterMock.executionContextList.get(stepNum);
assertEquals(TEST_PROJECT, executionContextX.getFrameworkProject());
assertNotNull(executionContextX.getDataContext());
assertNotNull(executionContextX.getDataContext().get("node"));
assertEquals(0, executionContextX.getLoglevel());
assertEquals("user1", executionContextX.getUser());
assertEquals(nodeset, executionContextX.getNodeSelector());
}
resultIndex=1;
stepNum = 1;
{
//second step result
final StepExecutionResult interpreterResult = test1.get(resultIndex);
final DispatcherResult dr = NodeDispatchStepExecutor.extractDispatcherResult(interpreterResult);
assertEquals(1, dr.getResults().size());
final NodeStepResult nrs = dr.getResults().values().iterator().next();
assertTrue("unexpected class: " + nrs.getClass(),
nrs instanceof testResult);
testResult val = (testResult) nrs;
assertEquals(1, val.flag);
assertTrue(val.isSuccess());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(stepNum);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof ExecCommandExecutionItem);
ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1;
assertNotNull(execItem.getCommand());
assertEquals(3, execItem.getCommand().length);
assertEquals("a", execItem.getCommand()[0]);
assertEquals("3", execItem.getCommand()[1]);
assertEquals("command", execItem.getCommand()[2]);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(stepNum);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
}
public void testGenericItem() throws Exception{
{
//test jobref item
final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName());
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final testWorkflowCmdItem item = new testWorkflowCmdItem();
item.type = "my-type";
commands.add(item);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("my-type", interpreterMock);
interpreterService.registerInstance("exec", failMock);
interpreterService.registerInstance("script", failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn result
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(1, interpreterMock.executionItemList.size());
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(1, interpreterMock.executionContextList.size());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(nodeset, executionContext.getNodeSelector());
}
}
public void testMultipleNodes() throws Exception{
{
//test jobref item
final NodeSet nodeset = new NodeSet();
nodeset.createInclude().setName(".*");
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final testWorkflowCmdItem item = new testWorkflowCmdItem();
item.type = "my-type";
commands.add(item);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("my-type", interpreterMock);
interpreterService.registerInstance("exec", failMock);
interpreterService.registerInstance("script", failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn result node 1
interpreterMock.resultList.add(new NodeStepResultImpl(null));
//set resturn result node 2
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(2, interpreterMock.executionItemList.size());
assertEquals(2, interpreterMock.executionContextList.size());
{
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("test1"), executionContext.getNodeSelector());
}
{
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
final ExecutionContext executionContext = interpreterMock.executionContextList.get(1);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("testnode2"), executionContext.getNodeSelector());
}
}
}
public void testMultipleItemsAndNodes() throws Exception{
{
//test jobref item
final NodeSet nodeset = new NodeSet();
nodeset.createInclude().setName(".*");
final ArrayList<StepExecutionItem> commands = new ArrayList<StepExecutionItem>();
final testWorkflowCmdItem item = new testWorkflowCmdItem();
item.flag=0;
item.type = "my-type";
commands.add(item);
final testWorkflowCmdItem item2 = new testWorkflowCmdItem();
item2.flag = 1;
item2.type = "my-type";
commands.add(item2);
final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false,
WorkflowExecutor.STEP_FIRST);
final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow);
final StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
final StepExecutionContext context =
new ExecutionContextImpl.Builder()
.frameworkProject(TEST_PROJECT)
.user("user1")
.nodeSelector(nodeset)
.executionListener(new testListener())
.framework(testFramework)
.nodes(NodeFilter.filterNodes(
nodeset,
testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT).getNodeSet()
))
.build();
//setup testInterpreter for all command types
final NodeStepExecutionService interpreterService = NodeStepExecutionService.getInstanceForFramework(
testFramework);
testInterpreter interpreterMock = new testInterpreter();
testInterpreter failMock = new testInterpreter();
failMock.shouldThrowException = true;
interpreterService.registerInstance("my-type", interpreterMock);
interpreterService.registerInstance("exec", failMock);
interpreterService.registerInstance("script", failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock);
interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock);
//set resturn result node 1 step 1
interpreterMock.resultList.add(new NodeStepResultImpl(null));
//set resturn result node 2 step 1
interpreterMock.resultList.add(new NodeStepResultImpl(null));
//set resturn result node 1 step 2
interpreterMock.resultList.add(new NodeStepResultImpl(null));
//set resturn result node 2 step 2
interpreterMock.resultList.add(new NodeStepResultImpl(null));
final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem);
assertNotNull(result);
if (!result.isSuccess() && null != result.getException()) {
result.getException().printStackTrace(System.err);
}
assertNull("threw exception: " + result.getException(), result.getException());
assertTrue(result.isSuccess());
assertEquals(4, interpreterMock.executionItemList.size());
assertEquals(4, interpreterMock.executionContextList.size());
{//node 1 step 1
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(0);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(0, execItem.flag);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(0);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("test1"), executionContext.getNodeSelector());
}
{//node 2 step 1
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(1);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(0, execItem.flag);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(1);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("testnode2"), executionContext.getNodeSelector());
}
{//node 1 step 2
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(2);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(1, execItem.flag);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(2);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("test1"), executionContext.getNodeSelector());
}
{//node 2 step 2
final StepExecutionItem executionItem1 = interpreterMock.executionItemList.get(3);
assertTrue("wrong class: " + executionItem1.getClass().getName(),
executionItem1 instanceof testWorkflowCmdItem);
testWorkflowCmdItem execItem = (testWorkflowCmdItem) executionItem1;
assertNotNull(execItem.getNodeStepType());
assertEquals("my-type", execItem.getNodeStepType());
assertEquals(1, execItem.flag);
final ExecutionContext executionContext = interpreterMock.executionContextList.get(3);
assertEquals(TEST_PROJECT, executionContext.getFrameworkProject());
assertNotNull(executionContext.getDataContext());
assertNotNull(executionContext.getDataContext().get("node"));
assertEquals(0, executionContext.getLoglevel());
assertEquals("user1", executionContext.getUser());
assertEquals(SelectorUtils.singleNode("testnode2"), executionContext.getNodeSelector());
}
}
}
public void testCreatePrintableDataContext() {
Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>();
String otherKey = "other";
Map<String, String> otherData = new HashMap<String, String>();
dataContext.put(otherKey, otherData);
Map<String, String> secureData = new HashMap<String, String>();
String secureKey = "secureKey";
secureData.put(secureKey, "secureValue");
dataContext.put(StepFirstWorkflowExecutor.SECURE_OPTION_KEY, secureData);
Map<String, String> regularData = new HashMap<String, String>();
String insecureKey = "insecureKey";
regularData.put(insecureKey, "insecureValue");
regularData.put(secureKey, "secureValue");
dataContext.put(StepFirstWorkflowExecutor.OPTION_KEY, regularData);
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext);
Assert.assertSame("Expected other data to be present", otherData, result.get(otherKey));
Map<String, String> resultSecureData = result.get(StepFirstWorkflowExecutor.SECURE_OPTION_KEY);
Assert.assertEquals("Expected secure value to be replaced", StepFirstWorkflowExecutor.SECURE_OPTION_VALUE, resultSecureData.get(secureKey));
Map<String, String> resultRegularData = result.get(StepFirstWorkflowExecutor.OPTION_KEY);
Assert.assertEquals("Expected secure value to be replaced", StepFirstWorkflowExecutor.SECURE_OPTION_VALUE, resultRegularData.get(secureKey));
Assert.assertEquals("Expected insecure value to be untouched", regularData.get(insecureKey), resultRegularData.get(insecureKey));
}
public void testCreatePrintableDataContextNoDataContext() {
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(null);
Assert.assertTrue("Expected empty data context", result.isEmpty());
}
public void testCreatePrintableDataContextEmptyDataContext() {
Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>();
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext);
Assert.assertTrue("Expected empty data context", result.isEmpty());
}
public void testCreatePrintableDataContextNoSecureData() {
Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>();
String otherKey = "other";
Map<String, String> otherData = new HashMap<String, String>();
dataContext.put(otherKey, otherData);
Map<String, String> regularData = new HashMap<String, String>();
String insecureKey = "insecureKey";
regularData.put(insecureKey, "insecureValue");
dataContext.put(StepFirstWorkflowExecutor.OPTION_KEY, regularData);
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext);
Assert.assertSame("Expected other data to be present", otherData, result.get(otherKey));
Map<String, String> resultRegularData = result.get(StepFirstWorkflowExecutor.OPTION_KEY);
Assert.assertEquals("Expected insecure value to be untouched", regularData.get(insecureKey), resultRegularData.get(insecureKey));
}
public void testCreatePrintableDataContextNoRegularData() {
Map<String, Map<String, String>> dataContext = new HashMap<String, Map<String, String>>();
String otherKey = "other";
Map<String, String> otherData = new HashMap<String, String>();
dataContext.put(otherKey, otherData);
Map<String, String> secureData = new HashMap<String, String>();
String secureKey = "secureKey";
secureData.put(secureKey, "secureValue");
dataContext.put(StepFirstWorkflowExecutor.SECURE_OPTION_KEY, secureData);
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
Map<String, Map<String, String>> result = strategy.createPrintableDataContext(dataContext);
Assert.assertSame("Expected other data to be present", otherData, result.get(otherKey));
Map<String, String> resultSecureData = result.get(StepFirstWorkflowExecutor.SECURE_OPTION_KEY);
Assert.assertEquals("Expected secure value to be replaced", StepFirstWorkflowExecutor.SECURE_OPTION_VALUE, resultSecureData.get(secureKey));
}
@SuppressWarnings("unchecked")
public void testExecuteWorkflowUsesPrintableDataContext() {
ExecutionListener listener = Mockito.mock(ExecutionListener.class);
StepExecutionContext context = Mockito.mock(StepExecutionContext.class);
Mockito.when(context.getExecutionListener()).thenReturn(listener);
String printableContextToString = "this is hopefully some string that won't appear elsewhere";
Map<String, Map<String, String>> printableContext = Mockito.mock(Map.class);
Mockito.when(printableContext.toString()).thenReturn(printableContextToString);
String dataContextToString = "this is another magic string that hopefully won't appear elsewhere";
Map<String, Map<String, String>> dataContext = Mockito.mock(Map.class);
Mockito.when(dataContext.toString()).thenReturn(dataContextToString);
Mockito.when(context.getDataContext()).thenReturn(dataContext);
StepFirstWorkflowExecutor strategy = new StepFirstWorkflowExecutor(testFramework);
strategy = Mockito.spy(strategy);
Mockito.doReturn(printableContext).when(strategy).createPrintableDataContext(Mockito.same(dataContext));
WorkflowExecutionItem item = Mockito.mock(WorkflowExecutionItem.class);
IWorkflow workflow = Mockito.mock(IWorkflow.class);
Mockito.doReturn(workflow).when(item).getWorkflow();
strategy.executeWorkflowImpl(context, item);
ArgumentCaptor<String> logLineCaptor = ArgumentCaptor.forClass(String.class);
Mockito.verify(listener, Mockito.atLeastOnce()).log(Mockito.anyInt(), logLineCaptor.capture());
for (String line : logLineCaptor.getAllValues()) {
if (line.startsWith(StepFirstWorkflowExecutor.DATA_CONTEXT_PREFIX)) {
Assert.assertTrue("Expected printable data context string.", line.contains(printableContextToString));
Assert.assertFalse("Not expecting raw data context string.", line.contains(dataContextToString));
}
}
}
}
| jgpacker/rundeck | core/src/test/java/com/dtolabs/rundeck/core/execution/workflow/TestStepFirstWorkflowStrategy.java | Java | apache-2.0 | 100,809 |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.googlesource.gerrit.plugins.hooks.rtc.network;
import java.net.URI;
import org.apache.http.client.methods.HttpPost;
public class HttpPatch extends HttpPost {
public HttpPatch() {
super();
}
public HttpPatch(String uri) {
super(uri);
}
public HttpPatch(URI uri) {
super(uri);
}
@Override
public String getMethod() {
return "PATCH";
}
}
| GerritCodeReview/plugins_hooks-rtc | src/main/java/com/googlesource/gerrit/plugins/hooks/rtc/network/HttpPatch.java | Java | apache-2.0 | 999 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.runtime;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.io.IOReadableWritable;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.runtime.io.network.api.reader.RecordReader;
import org.apache.flink.runtime.io.network.api.writer.RecordWriter;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.jobgraph.DistributionPattern;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.jobmanager.scheduler.SlotSharingGroup;
import org.apache.flink.test.util.JavaProgramTestBase;
import org.apache.flink.util.TestLogger;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
/**
* Manually test the throughput of the network stack.
*/
@Ignore
public class NetworkStackThroughputITCase extends TestLogger {
private static final Logger LOG = LoggerFactory.getLogger(NetworkStackThroughputITCase.class);
private static final String DATA_VOLUME_GB_CONFIG_KEY = "data.volume.gb";
private static final String USE_FORWARDER_CONFIG_KEY = "use.forwarder";
private static final String PARALLELISM_CONFIG_KEY = "num.subtasks";
private static final String NUM_SLOTS_PER_TM_CONFIG_KEY = "num.slots.per.tm";
private static final String IS_SLOW_SENDER_CONFIG_KEY = "is.slow.sender";
private static final String IS_SLOW_RECEIVER_CONFIG_KEY = "is.slow.receiver";
private static final int IS_SLOW_SLEEP_MS = 10;
private static final int IS_SLOW_EVERY_NUM_RECORDS = (2 * 32 * 1024) / SpeedTestRecord.RECORD_SIZE;
// ------------------------------------------------------------------------
// wrapper to reuse JavaProgramTestBase code in runs via main()
private static class TestBaseWrapper extends JavaProgramTestBase {
private int dataVolumeGb;
private boolean useForwarder;
private boolean isSlowSender;
private boolean isSlowReceiver;
private int parallelism;
public TestBaseWrapper(Configuration config) {
super(config);
dataVolumeGb = config.getInteger(DATA_VOLUME_GB_CONFIG_KEY, 1);
useForwarder = config.getBoolean(USE_FORWARDER_CONFIG_KEY, true);
isSlowSender = config.getBoolean(IS_SLOW_SENDER_CONFIG_KEY, false);
isSlowReceiver = config.getBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, false);
parallelism = config.getInteger(PARALLELISM_CONFIG_KEY, 1);
int numSlots = config.getInteger(NUM_SLOTS_PER_TM_CONFIG_KEY, 1);
if (parallelism % numSlots != 0) {
throw new RuntimeException("The test case defines a parallelism that is not a multiple of the slots per task manager.");
}
setNumTaskManagers(parallelism / numSlots);
setTaskManagerNumSlots(numSlots);
}
protected JobGraph getJobGraph() throws Exception {
return createJobGraph(dataVolumeGb, useForwarder, isSlowSender, isSlowReceiver, parallelism);
}
private JobGraph createJobGraph(int dataVolumeGb, boolean useForwarder, boolean isSlowSender,
boolean isSlowReceiver, int numSubtasks) {
JobGraph jobGraph = new JobGraph("Speed Test");
SlotSharingGroup sharingGroup = new SlotSharingGroup();
JobVertex producer = new JobVertex("Speed Test Producer");
jobGraph.addVertex(producer);
producer.setSlotSharingGroup(sharingGroup);
producer.setInvokableClass(SpeedTestProducer.class);
producer.setParallelism(numSubtasks);
producer.getConfiguration().setInteger(DATA_VOLUME_GB_CONFIG_KEY, dataVolumeGb);
producer.getConfiguration().setBoolean(IS_SLOW_SENDER_CONFIG_KEY, isSlowSender);
JobVertex forwarder = null;
if (useForwarder) {
forwarder = new JobVertex("Speed Test Forwarder");
jobGraph.addVertex(forwarder);
forwarder.setSlotSharingGroup(sharingGroup);
forwarder.setInvokableClass(SpeedTestForwarder.class);
forwarder.setParallelism(numSubtasks);
}
JobVertex consumer = new JobVertex("Speed Test Consumer");
jobGraph.addVertex(consumer);
consumer.setSlotSharingGroup(sharingGroup);
consumer.setInvokableClass(SpeedTestConsumer.class);
consumer.setParallelism(numSubtasks);
consumer.getConfiguration().setBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, isSlowReceiver);
if (useForwarder) {
forwarder.connectNewDataSetAsInput(producer, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED);
consumer.connectNewDataSetAsInput(forwarder, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED);
}
else {
consumer.connectNewDataSetAsInput(producer, DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED);
}
return jobGraph;
}
@Override
protected void testProgram() throws Exception {
JobExecutionResult jer = executor.submitJobAndWait(getJobGraph(), false);
int dataVolumeGb = this.config.getInteger(DATA_VOLUME_GB_CONFIG_KEY, 1);
long dataVolumeMbit = dataVolumeGb * 8192;
long runtimeSecs = jer.getNetRuntime(TimeUnit.SECONDS);
int mbitPerSecond = (int) (((double) dataVolumeMbit) / runtimeSecs);
LOG.info(String.format("Test finished with throughput of %d MBit/s (runtime [secs]: %d, " +
"data volume [gb/mbits]: %d/%d)", mbitPerSecond, runtimeSecs, dataVolumeGb, dataVolumeMbit));
}
}
// ------------------------------------------------------------------------
private static class SpeedTestProducer extends AbstractInvokable {
@Override
public void invoke() throws Exception {
RecordWriter<SpeedTestRecord> writer = new RecordWriter<>(getEnvironment().getWriter(0));
try {
// Determine the amount of data to send per subtask
int dataVolumeGb = getTaskConfiguration().getInteger(NetworkStackThroughputITCase.DATA_VOLUME_GB_CONFIG_KEY, 1);
long dataMbPerSubtask = (dataVolumeGb * 1024) / getCurrentNumberOfSubtasks();
long numRecordsToEmit = (dataMbPerSubtask * 1024 * 1024) / SpeedTestRecord.RECORD_SIZE;
LOG.info(String.format("%d/%d: Producing %d records (each record: %d bytes, total: %.2f GB)",
getIndexInSubtaskGroup() + 1, getCurrentNumberOfSubtasks(), numRecordsToEmit,
SpeedTestRecord.RECORD_SIZE, dataMbPerSubtask / 1024.0));
boolean isSlow = getTaskConfiguration().getBoolean(IS_SLOW_SENDER_CONFIG_KEY, false);
int numRecords = 0;
SpeedTestRecord record = new SpeedTestRecord();
for (long i = 0; i < numRecordsToEmit; i++) {
if (isSlow && (numRecords++ % IS_SLOW_EVERY_NUM_RECORDS) == 0) {
Thread.sleep(IS_SLOW_SLEEP_MS);
}
writer.emit(record);
}
}
finally {
writer.flush();
}
}
}
private static class SpeedTestForwarder extends AbstractInvokable {
@Override
public void invoke() throws Exception {
RecordReader<SpeedTestRecord> reader = new RecordReader<>(
getEnvironment().getInputGate(0),
SpeedTestRecord.class,
getEnvironment().getTaskManagerInfo().getTmpDirectories());
RecordWriter<SpeedTestRecord> writer = new RecordWriter<>(getEnvironment().getWriter(0));
try {
SpeedTestRecord record;
while ((record = reader.next()) != null) {
writer.emit(record);
}
}
finally {
reader.clearBuffers();
writer.flush();
}
}
}
private static class SpeedTestConsumer extends AbstractInvokable {
@Override
public void invoke() throws Exception {
RecordReader<SpeedTestRecord> reader = new RecordReader<>(
getEnvironment().getInputGate(0),
SpeedTestRecord.class,
getEnvironment().getTaskManagerInfo().getTmpDirectories());
try {
boolean isSlow = getTaskConfiguration().getBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, false);
int numRecords = 0;
while (reader.next() != null) {
if (isSlow && (numRecords++ % IS_SLOW_EVERY_NUM_RECORDS) == 0) {
Thread.sleep(IS_SLOW_SLEEP_MS);
}
}
}
finally {
reader.clearBuffers();
}
}
}
private static class SpeedTestRecord implements IOReadableWritable {
private static final int RECORD_SIZE = 128;
private final byte[] buf = new byte[RECORD_SIZE];
public SpeedTestRecord() {
for (int i = 0; i < RECORD_SIZE; ++i) {
this.buf[i] = (byte) (i % 128);
}
}
@Override
public void write(DataOutputView out) throws IOException {
out.write(this.buf);
}
@Override
public void read(DataInputView in) throws IOException {
in.readFully(this.buf);
}
}
// ------------------------------------------------------------------------
public void testThroughput() throws Exception {
Object[][] configParams = new Object[][]{
new Object[]{1, false, false, false, 4, 2},
new Object[]{1, true, false, false, 4, 2},
new Object[]{1, true, true, false, 4, 2},
new Object[]{1, true, false, true, 4, 2},
new Object[]{2, true, false, false, 4, 2},
new Object[]{4, true, false, false, 4, 2},
new Object[]{4, true, false, false, 8, 4},
};
for (Object[] p : configParams) {
Configuration config = new Configuration();
config.setInteger(DATA_VOLUME_GB_CONFIG_KEY, (Integer) p[0]);
config.setBoolean(USE_FORWARDER_CONFIG_KEY, (Boolean) p[1]);
config.setBoolean(IS_SLOW_SENDER_CONFIG_KEY, (Boolean) p[2]);
config.setBoolean(IS_SLOW_RECEIVER_CONFIG_KEY, (Boolean) p[3]);
config.setInteger(PARALLELISM_CONFIG_KEY, (Integer) p[4]);
config.setInteger(NUM_SLOTS_PER_TM_CONFIG_KEY, (Integer) p[5]);
TestBaseWrapper test = new TestBaseWrapper(config);
test.startCluster();
System.out.println(Arrays.toString(p));
test.testProgram();
test.stopCluster();
}
}
private void runAllTests() throws Exception {
testThroughput();
System.out.println("Done.");
}
public static void main(String[] args) throws Exception {
new NetworkStackThroughputITCase().runAllTests();
}
}
| mtunique/flink | flink-tests/src/test/java/org/apache/flink/test/runtime/NetworkStackThroughputITCase.java | Java | apache-2.0 | 10,771 |
/*
* *************************************************************************
* Copyright (C) FRS Belgium NV ("FRSGlobal"). All rights reserved.
*
* This computer program is protected by copyright law and international
* treaties. Unauthorized reproduction or distribution of this program,
* or any portion of it, may result in severe civil and criminal penalties,
* and will be prosecuted to the maximum extent possible under the law.
* *************************************************************************
*/
package org.cluj.bus.servlet;
import com.google.gson.Gson;
import org.cluj.bus.model.BusSchedule;
import org.cluj.bus.model.BusScheduleDTO;
import org.cluj.bus.model.CategorySchedule;
import org.cluj.bus.services.JPARepository;
import org.cluj.bus.util.ScheduleUtilities;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.text.ParseException;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
public class BusScheduleServlet extends HttpServlet
{
private static final Logger LOGGER = Logger.getLogger(BusScheduleServlet.class.getName());
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException
{
doPost(req, resp);
}
@Override
protected void doPost(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException
{
String busId = httpServletRequest.getParameter(ServletUtils.BUS_ID_PARAMETER_KEY);
ServletUtils.sendResponse(httpServletResponse, getResponseString(busId));
}
private String getResponseString(String busId)
{
List<BusSchedule> busSchedules = new JPARepository<>(BusSchedule.class).findAll("busId", busId);
Map<String, CategorySchedule> categorySchedules = new HashMap<>();
for (BusSchedule busSchedule : busSchedules)
{
String days = busSchedule.getDays();
CategorySchedule categorySchedule = categorySchedules.get(days);
if (categorySchedule == null)
{
categorySchedule = new CategorySchedule();
categorySchedules.put(days, categorySchedule);
categorySchedule.setDisplayName(busSchedule.getCategory());
categorySchedule.setApplicableDays(getApplicableDays(days));
}
Collection<Date> startTimes = categorySchedule.getStartTimes();
if (startTimes == null)
{
startTimes = new ArrayList<>();
categorySchedule.setStartTimes(startTimes);
}
try
{
startTimes.add(ScheduleUtilities.getStartTime(busSchedule.getStartTime()));
}
catch (ParseException e)
{
LOGGER.log(Level.SEVERE, "Error parsing start time", e);
}
}
BusScheduleDTO schedule = new BusScheduleDTO();
schedule.setSchedules(categorySchedules.values());
return new Gson().toJson(schedule);
}
private Collection<Integer> getApplicableDays(String days)
{
List<Integer> applicableDays = new ArrayList<>();
for (char aChar : days.toCharArray())
{
int day = Integer.parseInt(String.valueOf(aChar));
applicableDays.add(day);
}
return applicableDays;
}
}
| abotos/ClujLiveTransit | Java/appengine-code/appengine-web-ui/src/java/org/cluj/bus/servlet/BusScheduleServlet.java | Java | apache-2.0 | 3,589 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty4;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ThreadFactory;
import io.netty.util.concurrent.DefaultEventExecutorGroup;
import io.netty.util.concurrent.EventExecutorGroup;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.impl.UriEndpointComponent;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.concurrent.CamelThreadFactory;
public class NettyComponent extends UriEndpointComponent {
private NettyConfiguration configuration;
private volatile EventExecutorGroup executorService;
public NettyComponent() {
super(NettyEndpoint.class);
}
public NettyComponent(Class<? extends Endpoint> endpointClass) {
super(endpointClass);
}
public NettyComponent(CamelContext context) {
super(context, NettyEndpoint.class);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
NettyConfiguration config;
if (configuration != null) {
config = configuration.copy();
} else {
config = new NettyConfiguration();
}
config = parseConfiguration(config, remaining, parameters);
// merge any custom bootstrap configuration on the config
NettyServerBootstrapConfiguration bootstrapConfiguration = resolveAndRemoveReferenceParameter(parameters, "bootstrapConfiguration", NettyServerBootstrapConfiguration.class);
if (bootstrapConfiguration != null) {
Map<String, Object> options = new HashMap<String, Object>();
if (IntrospectionSupport.getProperties(bootstrapConfiguration, options, null, false)) {
IntrospectionSupport.setProperties(getCamelContext().getTypeConverter(), config, options);
}
}
// validate config
config.validateConfiguration();
NettyEndpoint nettyEndpoint = new NettyEndpoint(remaining, this, config);
setProperties(nettyEndpoint.getConfiguration(), parameters);
return nettyEndpoint;
}
/**
* Parses the configuration
*
* @return the parsed and valid configuration to use
*/
protected NettyConfiguration parseConfiguration(NettyConfiguration configuration, String remaining, Map<String, Object> parameters) throws Exception {
configuration.parseURI(new URI(remaining), parameters, this, "tcp", "udp");
return configuration;
}
public NettyConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(NettyConfiguration configuration) {
this.configuration = configuration;
}
public void setExecutorService(EventExecutorGroup executorService) {
this.executorService = executorService;
}
public synchronized EventExecutorGroup getExecutorService() {
if (executorService == null) {
executorService = createExecutorService();
}
return executorService;
}
@Override
protected void doStart() throws Exception {
if (configuration == null) {
configuration = new NettyConfiguration();
}
if (configuration.isUsingExecutorService() && executorService == null) {
executorService = createExecutorService();
}
super.doStart();
}
protected EventExecutorGroup createExecutorService() {
// Provide the executor service for the application
// and use a Camel thread factory so we have consistent thread namings
// we should use a shared thread pool as recommended by Netty
String pattern = getCamelContext().getExecutorServiceManager().getThreadNamePattern();
ThreadFactory factory = new CamelThreadFactory(pattern, "NettyEventExecutorGroup", true);
return new DefaultEventExecutorGroup(configuration.getMaximumPoolSize(), factory);
}
@Override
protected void doStop() throws Exception {
if (executorService != null) {
getCamelContext().getExecutorServiceManager().shutdownNow(executorService);
executorService = null;
}
super.doStop();
}
}
| logzio/camel | components/camel-netty4/src/main/java/org/apache/camel/component/netty4/NettyComponent.java | Java | apache-2.0 | 5,124 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.valves.rewrite;
import java.nio.charset.Charset;
/**
* Resolver abstract class.
*/
public abstract class Resolver {
public abstract String resolve(String key);
public String resolveEnv(String key) {
return System.getProperty(key);
}
public abstract String resolveSsl(String key);
public abstract String resolveHttp(String key);
public abstract boolean resolveResource(int type, String name);
/**
* @return The name of the encoding to use to %nn encode URIs
*
* @deprecated This will be removed in Tomcat 9.0.x
*/
@Deprecated
public abstract String getUriEncoding();
public abstract Charset getUriCharset();
}
| IAMTJW/Tomcat-8.5.20 | tomcat-8.5.20/java/org/apache/catalina/valves/rewrite/Resolver.java | Java | apache-2.0 | 1,568 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.droids.impl;
import java.util.Date;
import java.util.Queue;
import java.util.concurrent.TimeUnit;
import org.apache.droids.api.DelayTimer;
import org.apache.droids.api.Droid;
import org.apache.droids.api.Task;
import org.apache.droids.api.TaskExceptionHandler;
import org.apache.droids.api.TaskExceptionResult;
import org.apache.droids.api.TaskMaster;
import org.apache.droids.api.Worker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SequentialTaskMaster<T extends Task> implements TaskMaster<T>
{
private static final Logger LOG = LoggerFactory.getLogger(SequentialTaskMaster.class);
private final Object mutex;
private volatile boolean completed;
private volatile Date startedWorking = null;
private volatile Date finishedWorking = null;
private volatile int completedTask = 0;
private volatile T lastCompletedTask = null;
private volatile ExecutionState state = ExecutionState.INITIALIZED;
private DelayTimer delayTimer = null;
private TaskExceptionHandler exHandler = null;
public SequentialTaskMaster() {
super();
this.mutex = new Object();
}
/**
* The queue has been initialized
*/
@Override
public synchronized void start(final Queue<T> queue, final Droid<T> droid) {
this.completed = false;
this.startedWorking = new Date();
this.finishedWorking = null;
this.completedTask = 0;
this.state = ExecutionState.RUNNING;
boolean terminated = false;
while (!terminated) {
T task = queue.poll();
if (task == null) {
break;
}
if (delayTimer != null) {
long delay = delayTimer.getDelayMillis();
if (delay > 0) {
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
}
}
}
Worker<T> worker = droid.getNewWorker();
try {
if (!task.isAborted()) {
worker.execute(task);
}
completedTask++;
lastCompletedTask = task;
} catch (Exception ex) {
TaskExceptionResult result = TaskExceptionResult.WARN;
if (exHandler != null) {
result = exHandler.handleException(ex);
}
switch (result) {
case WARN:
LOG.warn(ex.toString() + " " + task.getId());
if (LOG.isDebugEnabled()) {
LOG.debug(ex.toString(), ex);
}
break;
case FATAL:
LOG.error(ex.getMessage(), ex);
terminated = true;
break;
}
}
}
finishedWorking = new Date();
this.state = ExecutionState.STOPPED;
droid.finished();
synchronized (mutex) {
completed = true;
mutex.notifyAll();
}
}
@Override
public final void setExceptionHandler(TaskExceptionHandler exHandler) {
this.exHandler = exHandler;
}
@Override
public final void setDelayTimer(DelayTimer delayTimer) {
this.delayTimer = delayTimer;
}
public boolean isWorking() {
return startedWorking != null && finishedWorking == null;
}
@Override
public Date getStartTime() {
return startedWorking;
}
@Override
public Date getFinishedWorking() {
return finishedWorking;
}
@Override
public long getCompletedTasks() {
return completedTask;
}
@Override
public T getLastCompletedTask() {
return lastCompletedTask;
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
if (timeout < 0) {
timeout = 0;
}
synchronized (this.mutex) {
long deadline = System.currentTimeMillis() + unit.toMillis(timeout);
long remaining = timeout;
while (!completed) {
this.mutex.wait(remaining);
if (timeout >= 0) {
remaining = deadline - System.currentTimeMillis();
if (remaining <= 0) {
return false; // Reach if timeout is over and no finish.
}
}
}
}
return true;
}
@Override
public ExecutionState getExecutionState() {
return state;
}
}
| fogbeam/Heceta_droids | droids-core/src/main/java/org/apache/droids/impl/SequentialTaskMaster.java | Java | apache-2.0 | 4,864 |
package com.wjyup.coolq.util;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.common.hash.HashCode;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import com.google.gson.JsonObject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.util.DigestUtils;
import java.nio.charset.StandardCharsets;
/**
* 发送消息工具类
* @author WJY
*/
public class SendMessageUtil {
private static Logger log = LogManager.getLogger(SendMessageUtil.class);
/**
* 发送json数据并获取返回值
* @param message 消息
* @return 发送消息的结果
*/
public static String sendSocketData(String message){
try {
ConfigCache configCache = SpringContext.getConfigCache();
//判断发送消息方式
if(StaticConf.MSG_SEND_TYPE_HTTP.equalsIgnoreCase(configCache.getMSG_SEND_TYPE())){// http
String url = String.format("http://%s:%s", configCache.getHTTP_HOST(), configCache.getHTTP_PORT());
if(configCache.isUSE_TOKEN()){// 使用token
long authTime = System.currentTimeMillis() / 1000;
String key = configCache.getKEY()+":"+authTime;
String authToken = DigestUtils.md5DigestAsHex(key.getBytes(StandardCharsets.UTF_8));
JSONObject jsonObject = JSON.parseObject(message);
jsonObject.put("authTime", authTime);
jsonObject.put("authToken", authToken);
message = jsonObject.toJSONString();
}
log.debug("发送的json文本:"+message);
try{
String result = WebUtil.post(url, message);
log.debug("返回结果:" + result);
return result;
}catch (Exception e){
log.error(e.getMessage(),e);
}
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return null;
}
}
| ForeverWJY/CoolQ_Java_Plugin | src/main/java/com/wjyup/coolq/util/SendMessageUtil.java | Java | apache-2.0 | 1,815 |
/**
* Copyright (c) 2008-2010 Andrey Somov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yaml.snakeyaml.tokens;
import java.util.List;
import org.yaml.snakeyaml.error.Mark;
import org.yaml.snakeyaml.error.YAMLException;
/**
* @see <a href="http://pyyaml.org/wiki/PyYAML">PyYAML</a> for more information
*/
public final class DirectiveToken<T> extends Token {
private final String name;
private final List<T> value;
public DirectiveToken(String name, List<T> value, Mark startMark, Mark endMark) {
super(startMark, endMark);
this.name = name;
if (value != null && value.size() != 2) {
throw new YAMLException("Two strings must be provided instead of "
+ String.valueOf(value.size()));
}
this.value = value;
}
public String getName() {
return this.name;
}
public List<T> getValue() {
return this.value;
}
@Override
protected String getArguments() {
if (value != null) {
return "name=" + name + ", value=[" + value.get(0) + ", " + value.get(1) + "]";
} else {
return "name=" + name;
}
}
@Override
public Token.ID getTokenId() {
return ID.Directive;
}
}
| spariev/snakeyaml | src/main/java/org/yaml/snakeyaml/tokens/DirectiveToken.java | Java | apache-2.0 | 1,789 |
/*
Copyright 2018 Nationale-Nederlanden
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nl.nn.adapterframework.http.cxf;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Iterator;
import java.util.Properties;
import javax.activation.DataHandler;
import javax.xml.soap.AttachmentPart;
import javax.xml.soap.MessageFactory;
import javax.xml.soap.MimeHeader;
import javax.xml.soap.SOAPConstants;
import javax.xml.soap.SOAPException;
import javax.xml.soap.SOAPMessage;
import javax.xml.transform.stream.StreamSource;
import javax.xml.ws.WebServiceContext;
import org.apache.soap.util.mime.ByteArrayDataSource;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.w3c.dom.Element;
import nl.nn.adapterframework.core.PipeLineSession;
import nl.nn.adapterframework.stream.Message;
import nl.nn.adapterframework.util.DomBuilderException;
import nl.nn.adapterframework.util.Misc;
import nl.nn.adapterframework.util.XmlUtils;
@RunWith(MockitoJUnitRunner.class)
public class SoapProviderTest {
@BeforeClass
public static void setUp() {
Properties prop = System.getProperties();
String vendor = prop.getProperty("java.vendor");
System.out.println("JVM Vendor : " + vendor);
assumeThat(vendor, not(equalTo("IBM Corporation")));
/*
* The above exclusion of IBM JDK to work around the below error, seen when executing these tests with an IBM JDK:
*
java.lang.VerifyError: JVMVRFY012 stack shape inconsistent; class=com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl, method=createDocumentFragment()Lorg/w3c/dom/DocumentFragment;, pc=5; Type Mismatch, argument 0 in signature com/sun/xml/messaging/saaj/soap/SOAPDocumentFragment.<init>:(Lcom/sun/org/apache/xerces/internal/dom/CoreDocumentImpl;)V does not match
Exception Details:
Location:
com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl.createDocumentFragment()Lorg/w3c/dom/DocumentFragment; @5: JBinvokespecial
Reason:
Type 'com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl' (current frame, stack[2]) is not assignable to 'com/sun/org/apache/xerces/internal/dom/CoreDocumentImpl'
Current Frame:
bci: @5
flags: { }
locals: { 'com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl' }
stack: { 'uninitialized', 'uninitialized', 'com/sun/xml/messaging/saaj/soap/SOAPDocumentImpl' }
at com.sun.xml.messaging.saaj.soap.SOAPPartImpl.<init>(SOAPPartImpl.java:106)
at com.sun.xml.messaging.saaj.soap.ver1_1.SOAPPart1_1Impl.<init>(SOAPPart1_1Impl.java:70)
at com.sun.xml.messaging.saaj.soap.ver1_1.Message1_1Impl.getSOAPPart(Message1_1Impl.java:90)
at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.createMessage(SoapProviderTest.java:109)
at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.createMessage(SoapProviderTest.java:98)
at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.createMessage(SoapProviderTest.java:94)
at nl.nn.adapterframework.extensions.cxf.SoapProviderTest.sendMessageWithInputStreamAttachmentsTest(SoapProviderTest.java:228)
*/
}
@Spy
WebServiceContext webServiceContext = new WebServiceContextStub();
@InjectMocks
private SoapProviderStub SOAPProvider = new SoapProviderStub();
private final String ATTACHMENT_CONTENT = "<dummy/>";
private final String ATTACHMENT_MIMETYPE = "plain/text";
private final String ATTACHMENT2_CONTENT = "<I'm a pdf file/>";
private final String ATTACHMENT2_NAME = "document.pdf";
private final String ATTACHMENT2_MIMETYPE = "application/pdf";
private final String MULTIPART_XML = "<parts><part type=\"file\" name=\""+ATTACHMENT2_NAME+"\" "
+ "sessionKey=\"part_file\" size=\"72833\" "
+ "mimeType=\""+ATTACHMENT2_MIMETYPE+"\"/></parts>";
private final String BASEDIR = "/Soap/";
protected InputStream getFile(String file) throws IOException {
URL url = this.getClass().getResource(BASEDIR+file);
if (url == null) {
throw new IOException("file not found");
}
return url.openStream();
}
private SOAPMessage createMessage(String filename) throws IOException, SOAPException {
return createMessage(filename, false, false);
}
private SOAPMessage createMessage(String filename, boolean addAttachment, boolean isSoap1_1) throws IOException, SOAPException {
MessageFactory factory = MessageFactory.newInstance(isSoap1_1 ? SOAPConstants.SOAP_1_1_PROTOCOL : SOAPConstants.SOAP_1_2_PROTOCOL);
SOAPMessage soapMessage = factory.createMessage();
StreamSource streamSource = new StreamSource(getFile(filename));
soapMessage.getSOAPPart().setContent(streamSource);
if(addAttachment) {
InputStream fis = new ByteArrayInputStream(ATTACHMENT_CONTENT.getBytes());
DataHandler dataHander = new DataHandler(new ByteArrayDataSource(fis, ATTACHMENT_MIMETYPE));
AttachmentPart part = soapMessage.createAttachmentPart(dataHander);
soapMessage.addAttachmentPart(part);
}
return soapMessage;
}
private void assertAttachmentInSession(PipeLineSession session) throws DomBuilderException, IOException {
assertNotNull(session.get("mimeHeaders"));
assertNotNull(session.get("attachments"));
Element xml = XmlUtils.buildElement((String) session.get("attachments"));
Element attachment = XmlUtils.getFirstChildTag(xml, "attachment");
assertNotNull(attachment);
//Retrieve sessionkey the attachment was stored in
String sessionKey = XmlUtils.getChildTagAsString(attachment, "sessionKey");
assertNotNull(sessionKey);
Message attachmentMessage = session.getMessage(sessionKey);
//Verify that the attachment sent, was received properly
assertEquals(ATTACHMENT_CONTENT, attachmentMessage.asString());
//Verify the content type
Element mimeTypes = XmlUtils.getFirstChildTag(attachment, "mimeHeaders");
mimeTypes.getElementsByTagName("mimeHeader");
//TODO check what happens when multiple attachments are returned...
String mimeType = XmlUtils.getChildTagAsString(mimeTypes, "mimeHeader");
assertEquals(ATTACHMENT_MIMETYPE, mimeType);
}
private void assertAttachmentInReceivedMessage(SOAPMessage message) throws SOAPException, IOException {
assertEquals(1, message.countAttachments());
Iterator<?> attachmentParts = message.getAttachments();
while (attachmentParts.hasNext()) {
AttachmentPart soapAttachmentPart = (AttachmentPart)attachmentParts.next();
String attachment = Misc.streamToString(soapAttachmentPart.getRawContent());
//ContentID should be equal to the filename
assertEquals(ATTACHMENT2_NAME, soapAttachmentPart.getContentId());
//Validate the attachment's content
assertEquals(ATTACHMENT2_CONTENT, attachment);
//Make sure at least the content-type header has been set
Iterator<?> headers = soapAttachmentPart.getAllMimeHeaders();
String contentType = null;
while (headers.hasNext()) {
MimeHeader header = (MimeHeader) headers.next();
if("Content-Type".equalsIgnoreCase(header.getName()))
contentType = header.getValue();
}
assertEquals(ATTACHMENT2_MIMETYPE, contentType);
}
}
@Test
/**
* Receive SOAP message without attachment
* Reply SOAP message without attachment
* @throws Throwable
*/
public void simpleMessageTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml");
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
PipeLineSession session = SOAPProvider.getSession();
assertNotNull(session.get("mimeHeaders"));
assertNotNull(session.get("attachments"));
assertEquals("<attachments/>", session.get("attachments").toString().trim());
}
@Test
/**
* Receive faulty message without attachment
* @throws Throwable
*/
public void errorMessageTest() throws Throwable {
SOAPMessage message = SOAPProvider.invoke(null);
String result = XmlUtils.nodeToString(message.getSOAPPart());
assertTrue(result.indexOf("SOAPMessage is null") > 0);
}
@Test
/**
* Receive SOAP message with MTOM attachment
* Reply SOAP message without attachment
* @throws Throwable
*/
public void receiveMessageWithAttachmentsTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml", true, false);
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
PipeLineSession session = SOAPProvider.getSession();
assertAttachmentInSession(session);
}
@Test
/**
* Receive SOAP message without attachment
* Reply SOAP message with (InputStream) attachment
* @throws Throwable
*/
public void sendMessageWithInputStreamAttachmentsTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml");
PipeLineSession session = new PipeLineSession();
session.put("attachmentXmlSessionKey", MULTIPART_XML);
session.put("part_file", new ByteArrayInputStream(ATTACHMENT2_CONTENT.getBytes()));
SOAPProvider.setAttachmentXmlSessionKey("attachmentXmlSessionKey");
SOAPProvider.setSession(session);
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
assertAttachmentInReceivedMessage(message);
}
@Test
/**
* Receive SOAP message without attachment
* Reply SOAP message with (String) attachment
* @throws Throwable
*/
public void sendMessageWithStringAttachmentsTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml");
PipeLineSession session = new PipeLineSession();
session.put("attachmentXmlSessionKey", MULTIPART_XML);
session.put("part_file", ATTACHMENT2_CONTENT);
SOAPProvider.setAttachmentXmlSessionKey("attachmentXmlSessionKey");
SOAPProvider.setSession(session);
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
assertAttachmentInReceivedMessage(message);
}
@Test
/**
* Receive SOAP message with attachment
* Reply SOAP message with attachment
* @throws Throwable
*/
public void receiveAndSendMessageWithAttachmentsTest() throws Throwable {
SOAPMessage request = createMessage("correct-soapmsg.xml", true, false);
PipeLineSession session = new PipeLineSession();
session.put("attachmentXmlSessionKey", MULTIPART_XML);
session.put("part_file", ATTACHMENT2_CONTENT);
SOAPProvider.setAttachmentXmlSessionKey("attachmentXmlSessionKey");
SOAPProvider.setSession(session);
SOAPMessage message = SOAPProvider.invoke(request);
String result = XmlUtils.nodeToString(message.getSOAPPart());
String expected = Misc.streamToString(getFile("correct-soapmsg.xml"));
assertEquals(expected.replaceAll("\r", ""), result.replaceAll("\r", ""));
//Validate an attachment was sent to the listener
assertAttachmentInSession(SOAPProvider.getSession());
//Validate the listener returned an attachment back
assertAttachmentInReceivedMessage(message);
}
@Test
public void soapActionInSessionKeySOAP1_1() throws Throwable {
// Soap protocol 1.1
SOAPMessage request = createMessage("soapmsg1_1.xml", false, true);
String value = "1.1-SoapAction";
webServiceContext.getMessageContext().put("SOAPAction", value);
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertEquals(value, SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void noSoapActionInSessionKeySOAP1_1() throws Throwable {
// Soap protocol 1.1
SOAPMessage request = createMessage("soapmsg1_1.xml", false, true);
SOAPProvider.invoke(request);
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soap1_1MessageWithActionInContentTypeHeader() throws Throwable {
// Soap protocol 1.1
SOAPMessage request = createMessage("soapmsg1_1.xml", false, true);
String value = "ActionInContentTypeHeader";
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action="+value);
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soapActionInSessionKeySOAP1_2ActionIsTheLastItem() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
String value = "SOAP1_2ActionIsTheLastItem";
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action="+value);
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertEquals(value, SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soapActionInSessionKeySOAP1_2ActionIsInMiddle() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
String value = "SOAP1_2ActionIsInMiddle";
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action="+value+";somethingelse");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertEquals(value, SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soapActionInSessionKeySOAP1_2ActionIsAtTheBeginning() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
String value = "SOAP1_2ActionIsAtTheBeginning";
webServiceContext.getMessageContext().put("Content-Type", "action="+value+";application/soap+xml; somethingelse");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertEquals(value, SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void noSoapActionInSessionKey1_2() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; somethingelse");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void emptySoapActionInSessionKey1_2() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
webServiceContext.getMessageContext().put("Content-Type", "application/soap+xml; action=; somethingelse");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
@Test
public void soap1_2MessageWithSOAPActionHeader() throws Throwable {
SOAPMessage request = createMessage("soapmsg1_2.xml");
webServiceContext.getMessageContext().put("SOAPAction", "action");
SOAPProvider.invoke(request);
webServiceContext.getMessageContext().clear();
assertNull(SOAPProvider.getSession().get("SOAPAction"));
}
}
| ibissource/iaf | core/src/test/java/nl/nn/adapterframework/http/cxf/SoapProviderTest.java | Java | apache-2.0 | 16,059 |
package mx.emite.sdk.scot.request;
import java.util.List;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
import lombok.Builder;
import lombok.Data;
import lombok.Singular;
import mx.emite.sdk.cfdi32.anotaciones.Rfc;
import mx.emite.sdk.scot.request.extra.SucursalInfo;
@Data
@Builder
public class SucursalesAltaRequest {
/**
* Token del <b>Integrador</b> obtenido con el servicio de Token
* -- SETTER --
*
* @param token
* Token del <b>Integrador</b> obtenido de Scot©
*
*/
@NotNull
private String token;
/**
* @param rfc del emisor, si se deja en blanco se consultan todos los emisores
*/
@Rfc
private String rfc;
/**
* @param sucursales lista de sucursales a dar de alta
*/
@Valid @NotEmpty @Singular("sucursal")
private List<SucursalInfo> sucursales;
/**
* modificar si la sucursal ya se encuentra dado de alta
*/
@NotNull
public Boolean modificar;
}
| emite-mx/ef-sdk-java | ef-sdk-java/src/main/java/mx/emite/sdk/scot/request/SucursalesAltaRequest.java | Java | apache-2.0 | 1,030 |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rust;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.CxxPlatforms;
import com.facebook.buck.cxx.Linker;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.FlavorDomain;
import com.facebook.buck.model.Flavored;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.AbstractDescriptionArg;
import com.facebook.buck.rules.BinaryWrapperRule;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.CellPathResolver;
import com.facebook.buck.rules.Description;
import com.facebook.buck.rules.ImplicitDepsInferringDescription;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.ToolProvider;
import com.facebook.buck.versions.VersionRoot;
import com.facebook.infer.annotation.SuppressFieldNotInitialized;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Stream;
public class RustTestDescription implements
Description<RustTestDescription.Arg>,
ImplicitDepsInferringDescription<RustTestDescription.Arg>,
Flavored,
VersionRoot<RustTestDescription.Arg> {
private final RustBuckConfig rustBuckConfig;
private final FlavorDomain<CxxPlatform> cxxPlatforms;
private final CxxPlatform defaultCxxPlatform;
public RustTestDescription(
RustBuckConfig rustBuckConfig,
FlavorDomain<CxxPlatform> cxxPlatforms, CxxPlatform defaultCxxPlatform) {
this.rustBuckConfig = rustBuckConfig;
this.cxxPlatforms = cxxPlatforms;
this.defaultCxxPlatform = defaultCxxPlatform;
}
@Override
public Arg createUnpopulatedConstructorArg() {
return new Arg();
}
@Override
public <A extends Arg> BuildRule createBuildRule(
TargetGraph targetGraph,
BuildRuleParams params,
BuildRuleResolver resolver,
CellPathResolver cellRoots,
A args) throws NoSuchBuildTargetException {
final BuildTarget buildTarget = params.getBuildTarget();
BuildTarget exeTarget = params.getBuildTarget()
.withAppendedFlavors(InternalFlavor.of("unittest"));
Optional<Map.Entry<Flavor, RustBinaryDescription.Type>> type =
RustBinaryDescription.BINARY_TYPE.getFlavorAndValue(buildTarget);
boolean isCheck = type.map(t -> t.getValue().isCheck()).orElse(false);
BinaryWrapperRule testExeBuild = resolver.addToIndex(
RustCompileUtils.createBinaryBuildRule(
params.withBuildTarget(exeTarget),
resolver,
rustBuckConfig,
cxxPlatforms,
defaultCxxPlatform,
args.crate,
args.features,
Stream.of(
args.framework ? Stream.of("--test") : Stream.<String>empty(),
rustBuckConfig.getRustTestFlags().stream(),
args.rustcFlags.stream())
.flatMap(x -> x).iterator(),
args.linkerFlags.iterator(),
RustCompileUtils.getLinkStyle(params.getBuildTarget(), args.linkStyle),
args.rpath, args.srcs,
args.crateRoot,
ImmutableSet.of("lib.rs", "main.rs"),
isCheck
));
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
Tool testExe = testExeBuild.getExecutableCommand();
BuildRuleParams testParams = params.copyAppendingExtraDeps(
testExe.getDeps(ruleFinder));
return new RustTest(
testParams,
ruleFinder,
testExeBuild,
args.labels,
args.contacts);
}
@Override
public void findDepsForTargetFromConstructorArgs(
BuildTarget buildTarget,
CellPathResolver cellRoots,
Arg constructorArg,
ImmutableCollection.Builder<BuildTarget> extraDepsBuilder,
ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) {
ToolProvider compiler = rustBuckConfig.getRustCompiler();
extraDepsBuilder.addAll(compiler.getParseTimeDeps());
extraDepsBuilder.addAll(CxxPlatforms.getParseTimeDeps(cxxPlatforms.getValues()));
}
@Override
public boolean hasFlavors(ImmutableSet<Flavor> flavors) {
if (cxxPlatforms.containsAnyOf(flavors)) {
return true;
}
for (RustBinaryDescription.Type type : RustBinaryDescription.Type.values()) {
if (flavors.contains(type.getFlavor())) {
return true;
}
}
return false;
}
@Override
public Optional<ImmutableSet<FlavorDomain<?>>> flavorDomains() {
return Optional.of(ImmutableSet.of(cxxPlatforms, RustBinaryDescription.BINARY_TYPE));
}
@Override
public boolean isVersionRoot(ImmutableSet<Flavor> flavors) {
return true;
}
@SuppressFieldNotInitialized
public static class Arg extends AbstractDescriptionArg {
public ImmutableSortedSet<SourcePath> srcs = ImmutableSortedSet.of();
public ImmutableSet<String> contacts = ImmutableSet.of();
public ImmutableSortedSet<String> features = ImmutableSortedSet.of();
public ImmutableList<String> rustcFlags = ImmutableList.of();
public ImmutableList<String> linkerFlags = ImmutableList.of();
public ImmutableSortedSet<BuildTarget> deps = ImmutableSortedSet.of();
public Optional<Linker.LinkableDepType> linkStyle;
public boolean rpath = true;
public boolean framework = true;
public Optional<String> crate;
public Optional<SourcePath> crateRoot;
}
}
| vschs007/buck | src/com/facebook/buck/rust/RustTestDescription.java | Java | apache-2.0 | 6,463 |
package org.efix.util.buffer;
import org.efix.util.ByteSequenceWrapper;
import org.efix.util.StringUtil;
public class BufferUtil {
public static UnsafeBuffer fromString(String string) {
return new UnsafeBuffer(StringUtil.asciiBytes(string));
}
public static String toString(Buffer buffer) {
return toString(buffer, 0, buffer.capacity());
}
public static String toString(Buffer buffer, int offset, int length) {
return new ByteSequenceWrapper(buffer, offset, length).toString();
}
}
| artyomkorzun/efix | src/main/java/org/efix/util/buffer/BufferUtil.java | Java | apache-2.0 | 536 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.example.db.discovery.spring.namespace.jdbc.repository;
import org.apache.shardingsphere.example.db.discovery.spring.namespace.jdbc.entity.Address;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.LinkedList;
import java.util.List;
public final class AddressRepository {
private final DataSource dataSource;
public AddressRepository(final DataSource dataSource) {
this.dataSource = dataSource;
}
public void createTableIfNotExists() throws SQLException {
String sql = "CREATE TABLE IF NOT EXISTS t_address "
+ "(address_id BIGINT NOT NULL, address_name VARCHAR(100) NOT NULL, PRIMARY KEY (address_id))";
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement()) {
statement.executeUpdate(sql);
}
}
public void dropTable() throws SQLException {
String sql = "DROP TABLE t_address";
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement()) {
statement.executeUpdate(sql);
}
}
public void truncateTable() throws SQLException {
String sql = "TRUNCATE TABLE t_address";
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement()) {
statement.executeUpdate(sql);
}
}
public Long insert(final Address entity) throws SQLException {
String sql = "INSERT INTO t_address (address_id, address_name) VALUES (?, ?)";
try (Connection connection = dataSource.getConnection();
PreparedStatement preparedStatement = connection.prepareStatement(sql)) {
preparedStatement.setLong(1, entity.getAddressId());
preparedStatement.setString(2, entity.getAddressName());
preparedStatement.executeUpdate();
}
return entity.getAddressId();
}
public void delete(final Long primaryKey) throws SQLException {
String sql = "DELETE FROM t_address WHERE address_id=?";
try (Connection connection = dataSource.getConnection();
PreparedStatement preparedStatement = connection.prepareStatement(sql)) {
preparedStatement.setLong(1, primaryKey);
preparedStatement.executeUpdate();
}
}
public List<Address> selectAll() throws SQLException {
String sql = "SELECT * FROM t_address";
return getAddress(sql);
}
private List<Address> getAddress(final String sql) throws SQLException {
List<Address> result = new LinkedList<>();
try (Connection connection = dataSource.getConnection();
PreparedStatement preparedStatement = connection.prepareStatement(sql);
ResultSet resultSet = preparedStatement.executeQuery()) {
while (resultSet.next()) {
Address address = new Address();
address.setAddressId(resultSet.getLong(1));
address.setAddressName(resultSet.getString(2));
result.add(address);
}
}
return result;
}
}
| apache/incubator-shardingsphere | examples/shardingsphere-sample/shardingsphere-example-generated/shardingsphere-jdbc-sample/shardingsphere-jdbc-memory-local-db-discovery-spring-namespace-jdbc-example/src/main/java/org/apache/shardingsphere/example/db/discovery/spring/namespace/jdbc/repository/AddressRepository.java | Java | apache-2.0 | 4,185 |
/*
* Copyright (c) 2017 Antony Esik
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ae.camunda.dispatcher.mapper.xml;
import com.ae.camunda.dispatcher.api.mapper.TaskMapper;
import com.ae.camunda.dispatcher.exception.CamundaMappingException;
import org.eclipse.persistence.jaxb.JAXBContextFactory;
import org.springframework.stereotype.Component;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.Collections;
/**
* @author AEsik
* Date 09.10.2017
*/
@Component
public class XmlTaskMapper implements TaskMapper {
@Override
public String map(Object task) {
try {
JAXBContext context = JAXBContextFactory.createContext(new Class[]{task.getClass()}, Collections.emptyMap());
StringWriter sw = new StringWriter();
context.createMarshaller().marshal(task, sw);
return sw.toString();
} catch (JAXBException e) {
throw new CamundaMappingException(e);
}
}
@Override
public Object map(String body, Class<?> clazz) {
try {
JAXBContext context = JAXBContextFactory.createContext(new Class[]{clazz}, Collections.emptyMap());
StringReader sr = new StringReader(body);
return context.createUnmarshaller().unmarshal(sr);
} catch (JAXBException e) {
throw new CamundaMappingException(e);
}
}
}
| EsikAntony/camunda-task-dispatcher | camunda-task-dispatcher-mapper-xml/src/main/java/com/ae/camunda/dispatcher/mapper/xml/XmlTaskMapper.java | Java | apache-2.0 | 1,991 |
package org.whale.ext.domain;
import java.util.ArrayList;
import java.util.List;
import org.whale.system.annotation.jdbc.Column;
import org.whale.system.annotation.jdbc.Id;
import org.whale.system.annotation.jdbc.Table;
import org.whale.system.annotation.jdbc.Validate;
import org.whale.system.base.BaseEntry;
import org.whale.system.common.util.PropertiesUtil;
/**
* 实体对象
*
* @author wjs
* 2014年9月10日-上午10:12:48
*/
@Table(value="sys_domian", cnName="实体对象")
public class Domain extends BaseEntry{
private static final long serialVersionUID = -23042834921L;
@Id
@Column(cnName="id")
private Long id;
@Validate(required=true)
@Column(cnName="实体名")
private String domainName;
@Validate(required=true)
@Column(cnName="中文名")
private String domainCnName;
@Validate(required=true)
@Column(cnName="数据库", unique=true)
private String domainSqlName;
@Column(cnName="基础包路径")
private String pkgName = "org.whale.system";
//树模型
private Integer treeModel;
private String treeId;
private String treePid;
private String treeName;
//模板类型
private Integer ftlType;
//代码路径
private String codePath;
private String author = PropertiesUtil.getValue("author", "wjs");
//主键
private Attr idAttr;
private List<Attr> attrs;
private List<Attr> listAttrs = new ArrayList<Attr>();
private List<Attr> formAttrs = new ArrayList<Attr>();
private List<Attr> queryAttrs = new ArrayList<Attr>();
public Long getId() {
return id;
}
public String getDomainName() {
return domainName;
}
public void setDomainName(String domainName) {
this.domainName = domainName;
}
public String getDomainCnName() {
return domainCnName;
}
public void setDomainCnName(String domainCnName) {
this.domainCnName = domainCnName;
}
public String getDomainSqlName() {
return domainSqlName;
}
public void setDomainSqlName(String domainSqlName) {
this.domainSqlName = domainSqlName;
}
public String getPkgName() {
return pkgName;
}
public void setPkgName(String pkgName) {
this.pkgName = pkgName;
}
public Attr getIdAttr() {
return idAttr;
}
public void setIdAttr(Attr idAttr) {
this.idAttr = idAttr;
}
public List<Attr> getAttrs() {
return attrs;
}
public void setAttrs(List<Attr> attrs) {
this.attrs = attrs;
}
public List<Attr> getListAttrs() {
return listAttrs;
}
public void setListAttrs(List<Attr> listAttrs) {
this.listAttrs = listAttrs;
}
public List<Attr> getFormAttrs() {
return formAttrs;
}
public void setFormAttrs(List<Attr> formAttrs) {
this.formAttrs = formAttrs;
}
public List<Attr> getQueryAttrs() {
return queryAttrs;
}
public void setQueryAttrs(List<Attr> queryAttrs) {
this.queryAttrs = queryAttrs;
}
public void setId(Long id) {
this.id = id;
}
public Integer getFtlType() {
return ftlType;
}
public void setFtlType(Integer ftlType) {
this.ftlType = ftlType;
}
public String getCodePath() {
return codePath;
}
public void setCodePath(String codePath) {
this.codePath = codePath;
}
public Integer getTreeModel() {
return treeModel;
}
public void setTreeModel(Integer treeModel) {
this.treeModel = treeModel;
}
public String getTreeId() {
return treeId;
}
public void setTreeId(String treeId) {
this.treeId = treeId;
}
public String getTreePid() {
return treePid;
}
public void setTreePid(String treePid) {
this.treePid = treePid;
}
public String getTreeName() {
return treeName;
}
public void setTreeName(String treeName) {
this.treeName = treeName;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
}
| fywxin/base | system-parent/ext-code/src/main/java/org/whale/ext/domain/Domain.java | Java | apache-2.0 | 3,741 |
/* ========================================================================= *
* Boarder *
* http://boarder.mikuz.org/ *
* ========================================================================= *
* Copyright (C) 2013 Boarder *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
* ========================================================================= */
package fi.mikuz.boarder.util;
import org.acra.ACRA;
import android.content.Context;
import android.os.Looper;
import android.util.Log;
import android.widget.Toast;
public abstract class ContextUtils {
private static final String TAG = ContextUtils.class.getSimpleName();
public static void toast(Context context, String toast) {
toast(context, toast, Toast.LENGTH_SHORT);
}
public static void toast(Context context, String toast, int duration) {
String errLogMsg = "Unable to toast message: \"" + toast + "\"";
if (Looper.myLooper() == null) {
Exception e = new IllegalStateException("Not running in a looper");
Log.e(TAG, errLogMsg, e);
ACRA.getErrorReporter().handleException(e);
} else if (Looper.myLooper() != Looper.getMainLooper()) {
Exception e = new IllegalStateException("Not running in the main looper");
Log.e(TAG, errLogMsg, e);
ACRA.getErrorReporter().handleException(e);
} else {
try {
Toast.makeText(context, toast, duration).show();
} catch (NullPointerException e) {
Log.e(TAG, errLogMsg, e);
}
}
}
}
| Mikuz/Boarder | src/fi/mikuz/boarder/util/ContextUtils.java | Java | apache-2.0 | 2,556 |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.kafka.impl;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.config.DataFormat;
import com.streamsets.pipeline.kafka.api.PartitionStrategy;
import com.streamsets.pipeline.kafka.api.SdcKafkaProducer;
import com.streamsets.pipeline.lib.kafka.KafkaErrors;
import com.streamsets.pipeline.lib.kafka.exception.KafkaConnectionException;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Properties;
public class KafkaProducer08 implements SdcKafkaProducer {
private static final Logger LOG = LoggerFactory.getLogger(KafkaProducer08.class);
private static final String METADATA_BROKER_LIST_KEY = "metadata.broker.list";
private static final String KEY_SERIALIZER_CLASS_KEY = "key.serializer.class";
private static final String PRODUCER_TYPE_KEY = "producer.type";
private static final String PRODUCER_TYPE_DEFAULT = "sync";
private static final String SERIALIZER_CLASS_KEY = "serializer.class";
private static final String REQUEST_REQUIRED_ACKS_KEY = "request.required.acks";
private static final String REQUEST_REQUIRED_ACKS_DEFAULT = "1";
private static final String DEFAULT_ENCODER_CLASS = "kafka.serializer.DefaultEncoder";
private static final String STRING_ENCODER_CLASS = "kafka.serializer.StringEncoder";
private static final String PARTITIONER_CLASS_KEY = "partitioner.class";
private static final String RANDOM_PARTITIONER_CLASS = "com.streamsets.pipeline.kafka.impl.RandomPartitioner";
private static final String ROUND_ROBIN_PARTITIONER_CLASS = "com.streamsets.pipeline.kafka.impl.RoundRobinPartitioner";
private static final String EXPRESSION_PARTITIONER_CLASS = "com.streamsets.pipeline.kafka.impl.ExpressionPartitioner";
/*Topic to readData from*/
/*Host on which the seed broker is running*/
private final String metadataBrokerList;
private final Map<String, Object> kafkaProducerConfigs;
private final DataFormat producerPayloadType;
private final PartitionStrategy partitionStrategy;
private List<KeyedMessage> messageList;
private Producer producer;
public KafkaProducer08(
String metadataBrokerList,
DataFormat producerPayloadType,
PartitionStrategy partitionStrategy,
Map<String, Object> kafkaProducerConfigs
) {
this.metadataBrokerList = metadataBrokerList;
this.producerPayloadType = producerPayloadType;
this.partitionStrategy = partitionStrategy;
this.messageList = new ArrayList<>();
this.kafkaProducerConfigs = kafkaProducerConfigs;
}
@Override
public void init() throws StageException {
Properties props = new Properties();
//metadata.broker.list
props.put(METADATA_BROKER_LIST_KEY, metadataBrokerList);
//producer.type
props.put(PRODUCER_TYPE_KEY, PRODUCER_TYPE_DEFAULT);
//key.serializer.class
props.put(KEY_SERIALIZER_CLASS_KEY, STRING_ENCODER_CLASS);
//partitioner.class
configurePartitionStrategy(props, partitionStrategy);
//serializer.class
configureSerializer(props, producerPayloadType);
//request.required.acks
props.put(REQUEST_REQUIRED_ACKS_KEY, REQUEST_REQUIRED_ACKS_DEFAULT);
addUserConfiguredProperties(props);
ProducerConfig config = new ProducerConfig(props);
producer = new Producer<>(config);
}
@Override
public void destroy() {
if(producer != null) {
producer.close();
}
}
@Override
public String getVersion() {
return Kafka08Constants.KAFKA_VERSION;
}
@Override
public void enqueueMessage(String topic, Object message, Object messageKey) {
//Topic could be a record EL string. This is not a good place to evaluate expression
//Hence get topic as parameter
messageList.add(new KeyedMessage<>(topic, messageKey, message));
}
@Override
public void clearMessages() {
messageList.clear();
}
@Override
public List<Record> write(Stage.Context context) throws StageException {
try {
producer.send(messageList);
messageList.clear();
} catch (Exception e) {
//Producer internally refreshes metadata and retries if there is any recoverable exception.
//If retry fails, a FailedToSendMessageException is thrown.
//In this case we want to fail pipeline.
LOG.error(KafkaErrors.KAFKA_50.getMessage(), e.toString(), e);
throw new KafkaConnectionException(KafkaErrors.KAFKA_50, e.toString(), e);
}
return Collections.emptyList();
}
private void configureSerializer(Properties props, DataFormat producerPayloadType) {
if(producerPayloadType == DataFormat.TEXT) {
props.put(SERIALIZER_CLASS_KEY, DEFAULT_ENCODER_CLASS);
}
}
private void configurePartitionStrategy(Properties props, PartitionStrategy partitionStrategy) {
if (partitionStrategy == PartitionStrategy.RANDOM) {
props.put(PARTITIONER_CLASS_KEY, RANDOM_PARTITIONER_CLASS);
} else if (partitionStrategy == PartitionStrategy.ROUND_ROBIN) {
props.put(PARTITIONER_CLASS_KEY, ROUND_ROBIN_PARTITIONER_CLASS);
} else if (partitionStrategy == PartitionStrategy.EXPRESSION) {
props.put(PARTITIONER_CLASS_KEY, EXPRESSION_PARTITIONER_CLASS);
} else if (partitionStrategy == PartitionStrategy.DEFAULT) {
//default partitioner class
}
}
private void addUserConfiguredProperties(Properties props) {
//The following options, if specified, are ignored : "metadata.broker.list", "producer.type",
// "key.serializer.class", "partitioner.class", "serializer.class".
if (kafkaProducerConfigs != null && !kafkaProducerConfigs.isEmpty()) {
kafkaProducerConfigs.remove(METADATA_BROKER_LIST_KEY);
kafkaProducerConfigs.remove(KEY_SERIALIZER_CLASS_KEY);
kafkaProducerConfigs.remove(SERIALIZER_CLASS_KEY);
for (Map.Entry<String, Object> producerConfig : kafkaProducerConfigs.entrySet()) {
props.put(producerConfig.getKey(), producerConfig.getValue());
}
}
}
}
| kunickiaj/datacollector | sdc-kafka_0_8/src/main/java/com/streamsets/pipeline/kafka/impl/KafkaProducer08.java | Java | apache-2.0 | 6,836 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.docdb.model.transform;
import java.util.ArrayList;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.docdb.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* Event StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class EventStaxUnmarshaller implements Unmarshaller<Event, StaxUnmarshallerContext> {
public Event unmarshall(StaxUnmarshallerContext context) throws Exception {
Event event = new Event();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return event;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("SourceIdentifier", targetDepth)) {
event.setSourceIdentifier(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("SourceType", targetDepth)) {
event.setSourceType(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Message", targetDepth)) {
event.setMessage(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("EventCategories", targetDepth)) {
event.withEventCategories(new ArrayList<String>());
continue;
}
if (context.testExpression("EventCategories/EventCategory", targetDepth)) {
event.withEventCategories(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Date", targetDepth)) {
event.setDate(DateStaxUnmarshallerFactory.getInstance("iso8601").unmarshall(context));
continue;
}
if (context.testExpression("SourceArn", targetDepth)) {
event.setSourceArn(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return event;
}
}
}
}
private static EventStaxUnmarshaller instance;
public static EventStaxUnmarshaller getInstance() {
if (instance == null)
instance = new EventStaxUnmarshaller();
return instance;
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-docdb/src/main/java/com/amazonaws/services/docdb/model/transform/EventStaxUnmarshaller.java | Java | apache-2.0 | 3,623 |
package com.gentics.mesh.changelog.changes;
import static com.gentics.mesh.core.data.relationship.GraphRelationships.SCHEMA_CONTAINER_VERSION_KEY_PROPERTY;
import com.gentics.mesh.changelog.AbstractChange;
import com.tinkerpop.blueprints.Direction;
/**
* Changelog entry which removed the schema version edges with properties
*/
public class ReplaceSchemaVersionEdges extends AbstractChange {
@Override
public String getUuid() {
return "E737684330534623B768433053C623F2";
}
@Override
public String getName() {
return "ReplaceSchemaVersionEdges";
}
@Override
public String getDescription() {
return "Replaces edges from node content to schema versions with properties.";
}
@Override
public void applyInTx() {
replaceSingleEdge("NodeGraphFieldContainerImpl", Direction.OUT, "HAS_SCHEMA_CONTAINER_VERSION", SCHEMA_CONTAINER_VERSION_KEY_PROPERTY);
}
}
| gentics/mesh | changelog-system/src/main/java/com/gentics/mesh/changelog/changes/ReplaceSchemaVersionEdges.java | Java | apache-2.0 | 877 |
package sl.hr_client;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | 862573026/SchoolHRProj | Android/HRAM_SNU_App/hr-client/src/androidTest/java/sl/hr_client/ApplicationTest.java | Java | apache-2.0 | 343 |
package cc.mallet.util;
/**
* Static utility methods for Strings
*/
final public class Strings {
public static int commonPrefixIndex (String[] strings)
{
int prefixLen = strings[0].length();
for (int i = 1; i < strings.length; i++) {
if (strings[i].length() < prefixLen)
prefixLen = strings[i].length();
int j = 0;
if (prefixLen == 0)
return 0;
while (j < prefixLen) {
if (strings[i-1].charAt(j) != strings[i].charAt(j)) {
prefixLen = j;
break;
}
j++;
}
}
return prefixLen;
}
public static String commonPrefix (String[] strings)
{
return strings[0].substring (0, commonPrefixIndex(strings));
}
public static int count (String string, char ch)
{
int idx = -1;
int count = 0;
while ((idx = string.indexOf (ch, idx+1)) >= 0) { count++; };
return count;
}
public static double levenshteinDistance (String s, String t) {
int n = s.length();
int m = t.length();
int d[][]; // matrix
int i; // iterates through s
int j; // iterates through t
char s_i; // ith character of s
char t_j; // jth character of t
int cost; // cost
if (n == 0)
return 1.0;
if (m == 0)
return 1.0;
d = new int[n+1][m+1];
for (i = 0; i <= n; i++)
d[i][0] = i;
for (j = 0; j <= m; j++)
d[0][j] = j;
for (i = 1; i <= n; i++) {
s_i = s.charAt (i - 1);
for (j = 1; j <= m; j++) {
t_j = t.charAt (j - 1);
cost = (s_i == t_j) ? 0 : 1;
d[i][j] = minimum (d[i-1][j]+1, d[i][j-1]+1, d[i-1][j-1] + cost);
}
}
int longer = (n > m) ? n : m;
return (double)d[n][m] / longer; // Normalize to 0-1.
}
private static int minimum (int a, int b, int c) {
int mi = a;
if (b < mi) {
mi = b;
}
if (c < mi) {
mi = c;
}
return mi;
}
}
| UnsupervisedOntologyLearning/hrLDA | hrLDA/src/cc/mallet/util/Strings.java | Java | apache-2.0 | 1,930 |
package org.museautomation.core.step;
import org.jetbrains.annotations.*;
import org.museautomation.core.*;
import org.museautomation.core.context.*;
import org.museautomation.core.step.descriptor.*;
import org.museautomation.core.steptask.*;
import org.museautomation.core.values.*;
import org.museautomation.core.values.descriptor.*;
import java.util.*;
/**
* Executes the steps contained within a Macro.
*
* Note that this does NOT execute those steps within a separate variable scope, despite this class extending
* ScopedGroup. It overrides #isCreateNewVariableScope to disable that behavior. That seems a bit strange, but
* CallFunction builds on the basic function of CallMacroStep and it needs to be scoped. We need multiple-inheritance
* to do this cleanly (yuck), but this will have to suffice.
*
* @see Macro
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
@MuseTypeId("callmacro")
@MuseStepName("Macro")
@MuseInlineEditString("call macro {id}")
@MuseStepIcon("glyph:FontAwesome:EXTERNAL_LINK")
@MuseStepTypeGroup("Structure")
@MuseStepLongDescription("The 'id' source is resolved to a string and used to find the macro in the project. The steps within the macro are then executed as children of the call-macro step, within the same variable scope as the parent. This means that steps within the macro have access to the same variables as the caller.")
@MuseSubsourceDescriptor(displayName = "Macro name", description = "The name (resource id) of the macro to call", type = SubsourceDescriptor.Type.Named, name = CallMacroStep.ID_PARAM)
public class CallMacroStep extends ScopedGroup
{
@SuppressWarnings("unused") // called via reflection
public CallMacroStep(StepConfiguration config, MuseProject project)
{
super(config, project);
_config = config;
_project = project;
}
@Override
protected StepExecutionContext createStepExecutionContextForChildren(StepExecutionContext context) throws MuseExecutionError
{
String id = getStepsId(context);
ContainsStep resource = _project.getResourceStorage().getResource(id, ContainsStep.class);
if (resource == null)
throw new StepExecutionError("unable to locate project resource, id=" + id);
StepConfiguration step = resource.getStep();
List<StepConfiguration> steps;
if (step.getChildren() != null && step.getChildren().size() > 0)
steps = step.getChildren();
else
{
steps = new ArrayList<>();
steps.add(step);
}
context.getStepLocator().loadSteps(steps);
context.raiseEvent(DynamicStepLoadingEventType.create(_config, steps));
return new ListOfStepsExecutionContext(context.getParent(), steps, isCreateNewVariableScope(), this);
}
/**
* Get the id of the project resource that contains the steps that should be run.
*/
@NotNull
@SuppressWarnings("WeakerAccess")
protected String getStepsId(StepExecutionContext context) throws MuseExecutionError
{
MuseValueSource id_source = getValueSource(_config, ID_PARAM, true, context.getProject());
return BaseValueSource.getValue(id_source, context, false, String.class);
}
@Override
protected boolean isCreateNewVariableScope()
{
return false;
}
protected MuseProject _project;
private StepConfiguration _config;
public final static String ID_PARAM = "id";
public final static String TYPE_ID = CallMacroStep.class.getAnnotation(MuseTypeId.class).value();
} | ChrisLMerrill/muse | core/src/main/java/org/museautomation/core/step/CallMacroStep.java | Java | apache-2.0 | 3,625 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.diff.impl.patch.formove;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diff.impl.patch.FilePatch;
import com.intellij.openapi.diff.impl.patch.TextFilePatch;
import com.intellij.openapi.diff.impl.patch.apply.ApplyFilePatchBase;
import com.intellij.openapi.diff.impl.patch.apply.ApplyFilePatchFactory;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.fileTypes.ex.FileTypeChooser;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.patch.RelativePathCalculator;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.CalledInAwt;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class PathsVerifier {
// in
private final Project myProject;
private final VirtualFile myBaseDirectory;
private final List<FilePatch> myPatches;
// temp
private final Map<VirtualFile, MovedFileData> myMovedFiles;
private final List<FilePath> myBeforePaths;
private final List<VirtualFile> myCreatedDirectories;
// out
private final List<PatchAndFile> myTextPatches;
private final List<PatchAndFile> myBinaryPatches;
@NotNull private final List<VirtualFile> myWritableFiles;
private final ProjectLevelVcsManager myVcsManager;
private final List<FilePatch> mySkipped;
private DelayedPrecheckContext myDelayedPrecheckContext;
private final List<FilePath> myAddedPaths;
private final List<FilePath> myDeletedPaths;
private boolean myIgnoreContentRootsCheck;
public PathsVerifier(@NotNull Project project,
@NotNull VirtualFile baseDirectory,
@NotNull List<FilePatch> patches) {
myProject = project;
myBaseDirectory = baseDirectory;
myPatches = patches;
myMovedFiles = new HashMap<>();
myBeforePaths = new ArrayList<>();
myCreatedDirectories = new ArrayList<>();
myTextPatches = new ArrayList<>();
myBinaryPatches = new ArrayList<>();
myWritableFiles = new ArrayList<>();
myVcsManager = ProjectLevelVcsManager.getInstance(myProject);
mySkipped = new ArrayList<>();
myAddedPaths = new ArrayList<>();
myDeletedPaths = new ArrayList<>();
}
// those to be moved to CL: target + created dirs
public List<FilePath> getDirectlyAffected() {
final List<FilePath> affected = new ArrayList<>();
addAllFilePath(myCreatedDirectories, affected);
addAllFilePath(myWritableFiles, affected);
affected.addAll(myBeforePaths);
return affected;
}
// old parents of moved files
public List<VirtualFile> getAllAffected() {
final List<VirtualFile> affected = new ArrayList<>();
affected.addAll(myCreatedDirectories);
affected.addAll(myWritableFiles);
// after files' parent
for (VirtualFile file : myMovedFiles.keySet()) {
final VirtualFile parent = file.getParent();
if (parent != null) {
affected.add(parent);
}
}
// before..
for (FilePath path : myBeforePaths) {
final FilePath parent = path.getParentPath();
if (parent != null) {
affected.add(parent.getVirtualFile());
}
}
return affected;
}
private static void addAllFilePath(final Collection<VirtualFile> files, final Collection<FilePath> paths) {
for (VirtualFile file : files) {
paths.add(VcsUtil.getFilePath(file));
}
}
@CalledInAwt
public List<FilePatch> nonWriteActionPreCheck() {
List<FilePatch> failedToApply = ContainerUtil.newArrayList();
myDelayedPrecheckContext = new DelayedPrecheckContext(myProject);
for (FilePatch patch : myPatches) {
final CheckPath checker = getChecker(patch);
if (!checker.canBeApplied(myDelayedPrecheckContext)) {
revert(checker.getErrorMessage());
failedToApply.add(patch);
}
}
final Collection<FilePatch> skipped = myDelayedPrecheckContext.doDelayed();
mySkipped.addAll(skipped);
myPatches.removeAll(skipped);
myPatches.removeAll(failedToApply);
return failedToApply;
}
public List<FilePatch> getSkipped() {
return mySkipped;
}
public List<FilePatch> execute() {
List<FilePatch> failedPatches = ContainerUtil.newArrayList();
try {
final List<CheckPath> checkers = new ArrayList<>(myPatches.size());
for (FilePatch patch : myPatches) {
final CheckPath checker = getChecker(patch);
checkers.add(checker);
}
for (CheckPath checker : checkers) {
if (!checker.check()) {
failedPatches.add(checker.getPatch());
revert(checker.getErrorMessage());
}
}
}
catch (IOException e) {
revert(e.getMessage());
}
myPatches.removeAll(failedPatches);
return failedPatches;
}
private CheckPath getChecker(final FilePatch patch) {
final String beforeFileName = patch.getBeforeName();
final String afterFileName = patch.getAfterName();
if (beforeFileName == null || patch.isNewFile()) {
return new CheckAdded(patch);
}
else if (afterFileName == null || patch.isDeletedFile()) {
return new CheckDeleted(patch);
}
else if (!beforeFileName.equals(afterFileName)) {
return new CheckMoved(patch);
}
else {
return new CheckModified(patch);
}
}
public Collection<FilePath> getToBeAdded() {
return myAddedPaths;
}
public Collection<FilePath> getToBeDeleted() {
return myDeletedPaths;
}
@NotNull
public Collection<FilePatch> filterBadFileTypePatches() {
List<PatchAndFile> failedTextPatches =
ContainerUtil.findAll(myTextPatches, textPatch -> !isFileTypeOk(textPatch.getFile()));
myTextPatches.removeAll(failedTextPatches);
return ContainerUtil.map(failedTextPatches, patchInfo -> patchInfo.getApplyPatch().getPatch());
}
private boolean isFileTypeOk(@NotNull VirtualFile file) {
if (file.isDirectory()) {
PatchApplier
.showError(myProject, "Cannot apply content for " + file.getPresentableName() + " file from patch because it is directory.");
return false;
}
FileType fileType = file.getFileType();
if (fileType == FileTypes.UNKNOWN) {
fileType = FileTypeChooser.associateFileType(file.getName());
if (fileType == null) {
PatchApplier
.showError(myProject, "Cannot apply content for " + file.getPresentableName() + " file from patch because its type not defined.");
return false;
}
}
if (fileType.isBinary()) {
PatchApplier.showError(myProject, "Cannot apply file " + file.getPresentableName() + " from patch because it is binary.");
return false;
}
return true;
}
private class CheckModified extends CheckDeleted {
private CheckModified(final FilePatch path) {
super(path);
}
}
private class CheckDeleted extends CheckPath {
protected CheckDeleted(final FilePatch path) {
super(path);
}
@Override
protected boolean precheck(final VirtualFile beforeFile, final VirtualFile afterFile, DelayedPrecheckContext context) {
if (beforeFile == null) {
context.addSkip(getMappedFilePath(myBeforeName), myPatch);
}
return true;
}
@Override
protected boolean check() {
final VirtualFile beforeFile = getMappedFile(myBeforeName);
if (! checkExistsAndValid(beforeFile, myBeforeName)) {
return false;
}
addPatch(myPatch, beforeFile);
FilePath filePath = VcsUtil.getFilePath(beforeFile.getParent(), beforeFile.getName(), beforeFile.isDirectory());
if (myPatch.isDeletedFile() || myPatch.getAfterName() == null) {
myDeletedPaths.add(filePath);
}
myBeforePaths.add(filePath);
return true;
}
}
private class CheckAdded extends CheckPath {
private CheckAdded(final FilePatch path) {
super(path);
}
@Override
protected boolean precheck(final VirtualFile beforeFile, final VirtualFile afterFile, DelayedPrecheckContext context) {
if (afterFile != null) {
context.addOverrideExisting(myPatch, VcsUtil.getFilePath(afterFile));
}
return true;
}
@Override
public boolean check() throws IOException {
final String[] pieces = RelativePathCalculator.split(myAfterName);
final VirtualFile parent = makeSureParentPathExists(pieces);
if (parent == null) {
setErrorMessage(fileNotFoundMessage(myAfterName));
return false;
}
String name = pieces[pieces.length - 1];
File afterFile = new File(parent.getPath(), name);
//if user already accepted overwriting, we shouldn't have created a new one
final VirtualFile file = myDelayedPrecheckContext.getOverridenPaths().contains(VcsUtil.getFilePath(afterFile))
? parent.findChild(name)
: createFile(parent, name);
if (file == null) {
setErrorMessage(fileNotFoundMessage(myAfterName));
return false;
}
myAddedPaths.add(VcsUtil.getFilePath(file));
if (! checkExistsAndValid(file, myAfterName)) {
return false;
}
addPatch(myPatch, file);
return true;
}
}
private class CheckMoved extends CheckPath {
private CheckMoved(final FilePatch path) {
super(path);
}
// before exists; after does not exist
@Override
protected boolean precheck(final VirtualFile beforeFile, final VirtualFile afterFile, final DelayedPrecheckContext context) {
if (beforeFile == null) {
setErrorMessage(fileNotFoundMessage(myBeforeName));
} else if (afterFile != null) {
setErrorMessage(fileAlreadyExists(afterFile.getPath()));
}
return beforeFile != null && afterFile == null;
}
@Override
public boolean check() throws IOException {
final String[] pieces = RelativePathCalculator.split(myAfterName);
final VirtualFile afterFileParent = makeSureParentPathExists(pieces);
if (afterFileParent == null) {
setErrorMessage(fileNotFoundMessage(myAfterName));
return false;
}
final VirtualFile beforeFile = getMappedFile(myBeforeName);
if (! checkExistsAndValid(beforeFile, myBeforeName)) {
return false;
}
assert beforeFile != null; // if beforeFile is null then checkExist returned false;
myMovedFiles.put(beforeFile, new MovedFileData(afterFileParent, beforeFile, myPatch.getAfterFileName()));
addPatch(myPatch, beforeFile);
return true;
}
}
private abstract class CheckPath {
protected final String myBeforeName;
protected final String myAfterName;
protected final FilePatch myPatch;
private String myErrorMessage;
CheckPath(final FilePatch path) {
myPatch = path;
myBeforeName = path.getBeforeName();
myAfterName = path.getAfterName();
}
public String getErrorMessage() {
return myErrorMessage;
}
public void setErrorMessage(final String errorMessage) {
myErrorMessage = errorMessage;
}
public boolean canBeApplied(DelayedPrecheckContext context) {
final VirtualFile beforeFile = getMappedFile(myBeforeName);
final VirtualFile afterFile = getMappedFile(myAfterName);
return precheck(beforeFile, afterFile, context);
}
protected abstract boolean precheck(final VirtualFile beforeFile,
final VirtualFile afterFile,
DelayedPrecheckContext context);
protected abstract boolean check() throws IOException;
protected boolean checkExistsAndValid(final VirtualFile file, final String name) {
if (file == null) {
setErrorMessage(fileNotFoundMessage(name));
return false;
}
return checkModificationValid(file, name);
}
protected boolean checkModificationValid(final VirtualFile file, final String name) {
if (ApplicationManager.getApplication().isUnitTestMode() && myIgnoreContentRootsCheck) return true;
// security check to avoid overwriting system files with a patch
if (file == null || !inContent(file) || myVcsManager.getVcsRootFor(file) == null) {
setErrorMessage("File to patch found outside content root: " + name);
return false;
}
return true;
}
@Nullable
protected VirtualFile getMappedFile(String path) {
return PathMerger.getFile(myBaseDirectory, path);
}
protected FilePath getMappedFilePath(String path) {
return PathMerger.getFile(VcsUtil.getFilePath(myBaseDirectory), path);
}
private boolean inContent(VirtualFile file) {
return myVcsManager.isFileInContent(file);
}
public FilePatch getPatch() {
return myPatch;
}
}
private void addPatch(final FilePatch patch, final VirtualFile file) {
if (patch instanceof TextFilePatch) {
myTextPatches.add(new PatchAndFile(file, ApplyFilePatchFactory.create((TextFilePatch)patch)));
}
else {
myBinaryPatches.add(new PatchAndFile(file, ApplyFilePatchFactory.createGeneral(patch)));
}
myWritableFiles.add(file);
}
private static String fileNotFoundMessage(final String path) {
return VcsBundle.message("cannot.find.file.to.patch", path);
}
private static String fileAlreadyExists(final String path) {
return VcsBundle.message("cannot.apply.file.already.exists", path);
}
private void revert(final String errorMessage) {
PatchApplier.showError(myProject, errorMessage);
// move back
/*for (MovedFileData movedFile : myMovedFiles) {
try {
final VirtualFile current = movedFile.getCurrent();
final VirtualFile newParent = current.getParent();
final VirtualFile file;
if (! Comparing.equal(newParent, movedFile.getOldParent())) {
file = moveFile(current, movedFile.getOldParent());
} else {
file = current;
}
if (! Comparing.equal(current.getName(), movedFile.getOldName())) {
file.rename(PatchApplier.class, movedFile.getOldName());
}
}
catch (IOException e) {
// ignore: revert as much as possible
}
}
// go back
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
for (int i = myCreatedDirectories.size() - 1; i >= 0; -- i) {
final VirtualFile file = myCreatedDirectories.get(i);
try {
file.delete(PatchApplier.class);
}
catch (IOException e) {
// ignore
}
}
}
});
myBinaryPatches.clear();
myTextPatches.clear();
myWritableFiles.clear();*/
}
private static VirtualFile createFile(final VirtualFile parent, final String name) throws IOException {
return parent.createChildData(PatchApplier.class, name);
/*final Ref<IOException> ioExceptionRef = new Ref<IOException>();
final Ref<VirtualFile> result = new Ref<VirtualFile>();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
try {
result.set(parent.createChildData(PatchApplier.class, name));
}
catch (IOException e) {
ioExceptionRef.set(e);
}
}
});
if (! ioExceptionRef.isNull()) {
throw ioExceptionRef.get();
}
return result.get();*/
}
private static VirtualFile moveFile(final VirtualFile file, final VirtualFile newParent) throws IOException {
file.move(FilePatch.class, newParent);
return file;
/*final Ref<IOException> ioExceptionRef = new Ref<IOException>();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
try {
file.move(FilePatch.class, newParent);
}
catch (IOException e) {
ioExceptionRef.set(e);
}
}
});
if (! ioExceptionRef.isNull()) {
throw ioExceptionRef.get();
}
return file;*/
}
@Nullable
private VirtualFile makeSureParentPathExists(final String[] pieces) throws IOException {
VirtualFile child = myBaseDirectory;
final int size = pieces.length - 1;
for (int i = 0; i < size; i++) {
final String piece = pieces[i];
if (StringUtil.isEmptyOrSpaces(piece)) {
continue;
}
if ("..".equals(piece)) {
child = child.getParent();
continue;
}
VirtualFile nextChild = child.findChild(piece);
if (nextChild == null) {
nextChild = VfsUtil.createDirectories(child.getPath() + '/' + piece);
myCreatedDirectories.add(nextChild);
}
child = nextChild;
}
return child;
}
public List<PatchAndFile> getTextPatches() {
return myTextPatches;
}
public List<PatchAndFile> getBinaryPatches() {
return myBinaryPatches;
}
@NotNull
public List<VirtualFile> getWritableFiles() {
return myWritableFiles;
}
public void doMoveIfNeeded(final VirtualFile file) throws IOException {
final MovedFileData movedFile = myMovedFiles.get(file);
if (movedFile != null) {
myBeforePaths.add(VcsUtil.getFilePath(file));
ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<VirtualFile, IOException>() {
@Override
public VirtualFile compute() throws IOException {
return movedFile.doMove();
}
});
}
}
private static class MovedFileData {
private final VirtualFile myNewParent;
private final VirtualFile myCurrent;
private final String myNewName;
private MovedFileData(@NotNull final VirtualFile newParent, @NotNull final VirtualFile current, @NotNull final String newName) {
myNewParent = newParent;
myCurrent = current;
myNewName = newName;
}
public VirtualFile getCurrent() {
return myCurrent;
}
public VirtualFile getNewParent() {
return myNewParent;
}
public String getNewName() {
return myNewName;
}
public VirtualFile doMove() throws IOException {
final VirtualFile oldParent = myCurrent.getParent();
boolean needRename = !Comparing.equal(myCurrent.getName(), myNewName);
boolean needMove = !myNewParent.equals(oldParent);
if (needRename) {
if (needMove) {
File oldParentFile = VfsUtilCore.virtualToIoFile(oldParent);
File targetAfterRenameFile = new File(oldParentFile, myNewName);
if (targetAfterRenameFile.exists() && myCurrent.exists()) {
// if there is a conflict during first rename we have to rename to third name, then move, then rename to final target
performRenameWithConflicts(oldParentFile);
return myCurrent;
}
}
myCurrent.rename(PatchApplier.class, myNewName);
}
if (needMove) {
myCurrent.move(PatchApplier.class, myNewParent);
}
return myCurrent;
}
private void performRenameWithConflicts(@NotNull File oldParent) throws IOException {
File tmpFileWithUniqueName = FileUtil.createTempFile(oldParent, "tempFileToMove", null, false);
File newParentFile = VfsUtilCore.virtualToIoFile(myNewParent);
File destFile = new File(newParentFile, tmpFileWithUniqueName.getName());
while (destFile.exists()) {
destFile = new File(newParentFile,
FileUtil.createTempFile(oldParent, FileUtil.getNameWithoutExtension(destFile.getName()), null, false)
.getName());
}
myCurrent.rename(PatchApplier.class, destFile.getName());
myCurrent.move(PatchApplier.class, myNewParent);
myCurrent.rename(PatchApplier.class, myNewName);
}
}
private static class DelayedPrecheckContext {
private final Map<FilePath, FilePatch> mySkipDeleted;
private final Map<FilePath, FilePatch> myOverrideExisting;
private final List<FilePath> myOverridenPaths;
private final Project myProject;
private DelayedPrecheckContext(final Project project) {
myProject = project;
myOverrideExisting = new HashMap<>();
mySkipDeleted = new HashMap<>();
myOverridenPaths = new LinkedList<>();
}
public void addSkip(final FilePath path, final FilePatch filePatch) {
mySkipDeleted.put(path, filePatch);
}
public void addOverrideExisting(final FilePatch patch, final FilePath filePath) {
if (! myOverrideExisting.containsKey(filePath)) {
myOverrideExisting.put(filePath, patch);
}
}
// returns those to be skipped
public Collection<FilePatch> doDelayed() {
final List<FilePatch> result = new LinkedList<>();
if (! myOverrideExisting.isEmpty()) {
final String title = "Overwrite Existing Files";
List<FilePath> files = new ArrayList<>(myOverrideExisting.keySet());
Collection<FilePath> selected = AbstractVcsHelper.getInstance(myProject).selectFilePathsToProcess(
files, title,
"\nThe following files should be created by patch, but they already exist.\nDo you want to overwrite them?\n", title,
"The following file should be created by patch, but it already exists.\nDo you want to overwrite it?\n{0}",
VcsShowConfirmationOption.STATIC_SHOW_CONFIRMATION,
"Overwrite", "Cancel");
if (selected != null) {
for (FilePath path : selected) {
myOverrideExisting.remove(path);
}
}
result.addAll(myOverrideExisting.values());
if (selected != null) {
myOverridenPaths.addAll(selected);
}
}
result.addAll(mySkipDeleted.values());
return result;
}
public List<FilePath> getOverridenPaths() {
return myOverridenPaths;
}
public Collection<FilePath> getAlreadyDeletedPaths() {
return mySkipDeleted.keySet();
}
}
public void setIgnoreContentRootsCheck(boolean ignoreContentRootsCheck) {
myIgnoreContentRootsCheck = ignoreContentRootsCheck;
}
public static class PatchAndFile {
private final VirtualFile myFile;
private final ApplyFilePatchBase<?> myPatch;
public PatchAndFile(VirtualFile file, ApplyFilePatchBase<?> patch) {
myFile = file;
myPatch = patch;
}
public VirtualFile getFile() {
return myFile;
}
public ApplyFilePatchBase<?> getApplyPatch() {
return myPatch;
}
}
}
| goodwinnk/intellij-community | platform/vcs-impl/src/com/intellij/openapi/diff/impl/patch/formove/PathsVerifier.java | Java | apache-2.0 | 23,092 |
package com.therabbitmage.android.beacon.network;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URISyntaxException;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.message.BasicHeader;
import android.net.Uri;
import android.util.Log;
import com.therabbitmage.android.beacon.entities.google.urlshortener.Url;
public final class URLShortenerAPI {
private static final String TAG = URLShortenerAPI.class.getSimpleName();
private static final String BASE_URL = "https://www.googleapis.com/urlshortener/v1/url";
public static NetworkResponse urlShorten(String url) throws IOException, URISyntaxException{
android.net.Uri.Builder uriBuilder = Uri.parse(BASE_URL).buildUpon();
String uri = uriBuilder.build().toString();
Header[] headers = new Header[1];
headers[0] = new BasicHeader(ApacheNetworkUtils.HEADER_CONTENT_TYPE, ApacheNetworkUtils.TYPE_JSON);
ApacheNetworkUtils.getAndroidInstance(ApacheNetworkUtils.sUserAgent, false);
HttpResponse response = ApacheNetworkUtils.post(
uri,
ApacheNetworkUtils.getDefaultApacheHeaders(),
new Url(url).toJson());
ApacheNetworkUtils.toStringResponseHeaders(response.getAllHeaders());
ApacheNetworkUtils.toStringStatusLine(response.getStatusLine());
HttpEntity entity = response.getEntity();
NetworkResponse networkResponse = new NetworkResponse();
if(response.getStatusLine().getStatusCode() == HttpStatus.SC_OK){
networkResponse.setError(0);
BufferedReader br = new BufferedReader(new InputStreamReader(entity.getContent()));
StringBuilder stringBuilder = new StringBuilder();
String output = new String();
while((output = br.readLine()) != null){
stringBuilder.append(output);
}
br.close();
Log.i(TAG, "Body: " + stringBuilder.toString());
networkResponse.setUrl(Url.fromJson(stringBuilder.toString()));
} else {
networkResponse.setError(1);
}
return networkResponse;
}
}
| GregSaintJean/Beacon | src/com/therabbitmage/android/beacon/network/URLShortenerAPI.java | Java | apache-2.0 | 2,158 |
/**
* Copyright (c) 2015 the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.jmnarloch.spring.jaxrs.client.support;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import javax.ws.rs.ext.Provider;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
/**
* Tests the {@link JaxRsClientProxyFactorySupport} class.
*
* @author Jakub Narloch
*/
public class JaxRsClientProxyFactorySupportTest {
/**
* The instance of the tested class.
*/
private JaxRsClientProxyFactorySupport instance;
/**
* Sets up the test environment.
*
* @throws Exception if any error occurs
*/
@Before
public void setUp() throws Exception {
instance = new MockJaxRsClientProxyFactorySupport();
}
@Test
public void shouldRetrieveProviders() {
// given
final List<JaxRsClientConfigurer> configurers = Arrays.asList(
mock(JaxRsClientConfigurer.class),
mock(JaxRsClientConfigurer.class)
);
for(JaxRsClientConfigurer conf : configurers) {
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
((ProviderRegistry)invocation.getArguments()[0]).addProvider(SimpleProvider.class);
return null;
}
}).when(conf).registerProviders(any(ProviderRegistry.class));
}
instance.setConfigurers(configurers);
// when
Class<?>[] providers = instance.getProviders();
// then
assertNotNull(providers);
assertEquals(2, providers.length);
}
private static class MockJaxRsClientProxyFactorySupport extends JaxRsClientProxyFactorySupport {
@Override
public <T> T createClientProxy(Class<T> serviceClass, String serviceUrl) {
return null;
}
}
/**
* A simple provider class used for testing.
*
* @author Jakub Narloch
*/
@Provider
private static class SimpleProvider {
}
} | jmnarloch/spring-jax-rs-client-proxy | src/test/java/com/github/jmnarloch/spring/jaxrs/client/support/JaxRsClientProxyFactorySupportTest.java | Java | apache-2.0 | 2,921 |
/*
Copyright (c) 2012 Marco Amadei.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.ucanaccess.test;
import java.sql.Connection;
import com.healthmarketscience.jackcess.Database.FileFormat;
public class PasswordTest extends UcanaccessTestBase {
public PasswordTest() {
super();
}
public PasswordTest(FileFormat accVer) {
super(accVer);
}
public String getAccessPath() {
return "net/ucanaccess/test/resources/pwd.mdb";
}
protected void setUp() throws Exception {}
public void testPassword() throws Exception {
Class.forName("net.ucanaccess.jdbc.UcanaccessDriver");
Connection ucanaccessConnection = null;
try {
ucanaccessConnection = getUcanaccessConnection();
} catch (Exception e) {
}
assertNull(ucanaccessConnection);
super.setPassword("ucanaccess");
//url will be
try {
ucanaccessConnection = getUcanaccessConnection();
} catch (Exception e) {
e.printStackTrace();
}
assertNotNull(ucanaccessConnection);
}
}
| lmu-bioinformatics/ucanaccess | src/test/java/net/ucanaccess/test/PasswordTest.java | Java | apache-2.0 | 1,516 |
/*
* Copyright 2017-2019 University of Hildesheim, Software Systems Engineering
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ssehub.kernel_haven.code_model;
import java.io.File;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.ssehub.kernel_haven.SetUpException;
import net.ssehub.kernel_haven.config.DefaultSettings;
import net.ssehub.kernel_haven.provider.AbstractCache;
import net.ssehub.kernel_haven.provider.AbstractProvider;
import net.ssehub.kernel_haven.util.null_checks.NonNull;
/**
* The provider for the code model. This class serves as an intermediate between the analysis and the code model
* extractor.
*
* @author Adam
*/
public class CodeModelProvider extends AbstractProvider<SourceFile<?>> {
@Override
protected long getTimeout() {
return config.getValue(DefaultSettings.CODE_PROVIDER_TIMEOUT);
}
@Override
protected @NonNull List<@NonNull File> getTargets() throws SetUpException {
List<@NonNull File> result = new LinkedList<>();
Pattern pattern = config.getValue(DefaultSettings.CODE_EXTRACTOR_FILE_REGEX);
for (String relativeStr : config.getValue(DefaultSettings.CODE_EXTRACTOR_FILES)) {
File relativeFile = new File(relativeStr);
File absoluteFile = new File(config.getValue(DefaultSettings.SOURCE_TREE), relativeFile.getPath());
if (absoluteFile.isFile()) {
result.add(relativeFile);
} else if (absoluteFile.isDirectory()) {
readFilesFromDirectory(absoluteFile, pattern, result);
} else {
throw new SetUpException("Non-existing file specified in code.extractor.files: "
+ relativeFile.getPath());
}
}
return result;
}
/**
* Finds all files in the given directory (recursively) that match the given
* pattern. The files that match are added to filesToParse.
*
* @param directory
* The directory to search in.
* @param pattern
* The pattern to check against.
* @param result
* The list to add the found files to.
*/
private void readFilesFromDirectory(File directory, Pattern pattern, List<File> result) {
for (File file : directory.listFiles()) {
if (file.isDirectory()) {
readFilesFromDirectory(file, pattern, result);
} else {
Matcher m = pattern.matcher(file.getName());
if (m.matches()) {
result.add(config.getValue(DefaultSettings.SOURCE_TREE).toPath()
.relativize(file.toPath()).toFile());
}
}
}
}
@Override
protected @NonNull AbstractCache<SourceFile<?>> createCache() {
return new JsonCodeModelCache(config.getValue(DefaultSettings.CACHE_DIR),
config.getValue(DefaultSettings.CODE_PROVIDER_CACHE_COMPRESS));
}
@Override
public boolean readCache() {
return config.getValue(DefaultSettings.CODE_PROVIDER_CACHE_READ);
}
@Override
public boolean writeCache() {
return config.getValue(DefaultSettings.CODE_PROVIDER_CACHE_WRITE);
}
@Override
public int getNumberOfThreads() {
return config.getValue(DefaultSettings.CODE_EXTRACTOR_THREADS);
}
}
| KernelHaven/KernelHaven | src/net/ssehub/kernel_haven/code_model/CodeModelProvider.java | Java | apache-2.0 | 3,998 |
/*
* Copyright 2005-2008 The Kuali Foundation
*
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.edl.impl;
public class TestConfigProcessor extends TestEDLModelCompent {
}
| sbower/kuali-rice-1 | it/kew/src/test/java/org/kuali/rice/edl/impl/TestConfigProcessor.java | Java | apache-2.0 | 734 |
package org.apache.hadoop.hive.kafka.camus;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.UTF8;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Map;
/**
* The key for the mapreduce job to pull kafka. Contains offsets and the
* checksum.
*/
public class KafkaKey implements WritableComparable<KafkaKey>, IKafkaKey {
public static final Text SERVER = new Text("server");
public static final Text SERVICE = new Text("service");
public static KafkaKey DUMMY_KEY = new KafkaKey();
private String leaderId = "";
private int partition = 0;
private long beginOffset = 0;
private long offset = 0;
private long checksum = 0;
private String topic = "";
private long time = 0;
private String server = "";
private String service = "";
private MapWritable partitionMap = new MapWritable();
/**
* dummy empty constructor
*/
public KafkaKey() {
this("dummy", "0", 0, 0, 0, 0);
}
public KafkaKey(KafkaKey other) {
this.partition = other.partition;
this.beginOffset = other.beginOffset;
this.offset = other.offset;
this.checksum = other.checksum;
this.topic = other.topic;
this.time = other.time;
this.server = other.server;
this.service = other.service;
this.partitionMap = new MapWritable(other.partitionMap);
}
public KafkaKey(String topic, String leaderId, int partition) {
this.set(topic, leaderId, partition, 0, 0, 0);
}
public KafkaKey(String topic, String leaderId, int partition, long beginOffset, long offset) {
this.set(topic, leaderId, partition, beginOffset, offset, 0);
}
public KafkaKey(String topic, String leaderId, int partition, long beginOffset, long offset, long checksum) {
this.set(topic, leaderId, partition, beginOffset, offset, checksum);
}
public void set(String topic, String leaderId, int partition, long beginOffset, long offset, long checksum) {
this.leaderId = leaderId;
this.partition = partition;
this.beginOffset = beginOffset;
this.offset = offset;
this.checksum = checksum;
this.topic = topic;
this.time = System.currentTimeMillis(); // if event can't be decoded,
// this time will be used for
// debugging.
}
public void clear() {
leaderId = "";
partition = 0;
beginOffset = 0;
offset = 0;
checksum = 0;
topic = "";
time = 0;
server = "";
service = "";
partitionMap = new MapWritable();
}
public String getServer() {
return partitionMap.get(SERVER).toString();
}
public void setServer(String newServer) {
partitionMap.put(SERVER, new Text(newServer));
}
public String getService() {
return partitionMap.get(SERVICE).toString();
}
public void setService(String newService) {
partitionMap.put(SERVICE, new Text(newService));
}
public long getTime() {
return time;
}
public void setTime(long time) {
this.time = time;
}
public String getTopic() {
return topic;
}
public String getLeaderId() {
return leaderId;
}
public int getPartition() {
return this.partition;
}
public long getBeginOffset() {
return this.beginOffset;
}
public void setOffset(long offset) {
this.offset = offset;
}
public long getOffset() {
return this.offset;
}
public long getChecksum() {
return this.checksum;
}
@Override
public long getMessageSize() {
Text key = new Text("message.size");
if (this.partitionMap.containsKey(key))
return ((LongWritable) this.partitionMap.get(key)).get();
else
return 1024; //default estimated size
}
public void setMessageSize(long messageSize) {
Text key = new Text("message.size");
put(key, new LongWritable(messageSize));
}
public void put(Writable key, Writable value) {
this.partitionMap.put(key, value);
}
public void addAllPartitionMap(MapWritable partitionMap) {
this.partitionMap.putAll(partitionMap);
}
public MapWritable getPartitionMap() {
return partitionMap;
}
@Override
public void readFields(DataInput in) throws IOException {
this.leaderId = UTF8.readString(in);
this.partition = in.readInt();
this.beginOffset = in.readLong();
this.offset = in.readLong();
this.checksum = in.readLong();
this.topic = in.readUTF();
this.time = in.readLong();
this.server = in.readUTF(); // left for legacy
this.service = in.readUTF(); // left for legacy
this.partitionMap = new MapWritable();
try {
this.partitionMap.readFields(in);
} catch (IOException e) {
this.setServer(this.server);
this.setService(this.service);
}
}
@Override
public void write(DataOutput out) throws IOException {
UTF8.writeString(out, this.leaderId);
out.writeInt(this.partition);
out.writeLong(this.beginOffset);
out.writeLong(this.offset);
out.writeLong(this.checksum);
out.writeUTF(this.topic);
out.writeLong(this.time);
out.writeUTF(this.server); // left for legacy
out.writeUTF(this.service); // left for legacy
this.partitionMap.write(out);
}
@Override
public int compareTo(KafkaKey o) {
if (partition != o.partition) {
return partition = o.partition;
} else {
if (offset > o.offset) {
return 1;
} else if (offset < o.offset) {
return -1;
} else {
if (checksum > o.checksum) {
return 1;
} else if (checksum < o.checksum) {
return -1;
} else {
return 0;
}
}
}
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("topic=");
builder.append(topic);
builder.append(" partition=");
builder.append(partition);
builder.append("leaderId=");
builder.append(leaderId);
builder.append(" server=");
builder.append(server);
builder.append(" service=");
builder.append(service);
builder.append(" beginOffset=");
builder.append(beginOffset);
builder.append(" offset=");
builder.append(offset);
builder.append(" msgSize=");
builder.append(getMessageSize());
builder.append(" server=");
builder.append(server);
builder.append(" checksum=");
builder.append(checksum);
builder.append(" time=");
builder.append(time);
for (Map.Entry<Writable, Writable> e : partitionMap.entrySet()) {
builder.append(" " + e.getKey() + "=");
builder.append(e.getValue().toString());
}
return builder.toString();
}
}
| HiveKa/HiveKa | src/main/java/org/apache/hadoop/hive/kafka/camus/KafkaKey.java | Java | apache-2.0 | 6,733 |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.jstestdriver.output;
/**
* Escapes and formats a filename.
*
* @author Cory Smith (corbinrsmith@gmail.com)
*/
public class FileNameFormatter {
public String format(String path, String format) {
String escaped = path
.replace('/', 'a')
.replace('\\', 'a')
.replace(">", "a")
.replace(":", "a")
.replace(":", "a")
.replace(";", "a")
.replace("+", "a")
.replace(",", "a")
.replace("<", "a")
.replace("?", "a")
.replace("*", "a")
.replace(" ", "a");
return String.format(format, escaped.length() > 200 ? escaped.substring(0, 200) : escaped);
}
}
| BladeRunnerJS/brjs-JsTestDriver | JsTestDriver/src/com/google/jstestdriver/output/FileNameFormatter.java | Java | apache-2.0 | 1,271 |
/**
* Copyright 2014 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.wallet;
import com.google.bitcoin.crypto.*;
import com.google.bitcoin.store.UnreadableWalletException;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import org.bitcoinj.wallet.Protos;
import org.spongycastle.crypto.params.KeyParameter;
import javax.annotation.Nullable;
import java.io.UnsupportedEncodingException;
import java.security.SecureRandom;
import java.util.List;
import static com.google.bitcoin.core.Utils.HEX;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
/**
* Holds the seed bytes for the BIP32 deterministic wallet algorithm, inside a
* {@link com.google.bitcoin.wallet.DeterministicKeyChain}. The purpose of this wrapper is to simplify the encryption
* code.
*/
public class DeterministicSeed implements EncryptableItem {
// It would take more than 10^12 years to brute-force a 128 bit seed using $1B worth of computing equipment.
public static final int DEFAULT_SEED_ENTROPY_BITS = 128;
public static final int MAX_SEED_ENTROPY_BITS = 512;
@Nullable private final byte[] seed;
@Nullable private List<String> mnemonicCode;
@Nullable private EncryptedData encryptedMnemonicCode;
private final long creationTimeSeconds;
public DeterministicSeed(String mnemonicCode, String passphrase, long creationTimeSeconds) throws UnreadableWalletException {
this(decodeMnemonicCode(mnemonicCode), passphrase, creationTimeSeconds);
}
public DeterministicSeed(byte[] seed, List<String> mnemonic, long creationTimeSeconds) {
this.seed = checkNotNull(seed);
this.mnemonicCode = checkNotNull(mnemonic);
this.encryptedMnemonicCode = null;
this.creationTimeSeconds = creationTimeSeconds;
}
public DeterministicSeed(EncryptedData encryptedMnemonic, long creationTimeSeconds) {
this.seed = null;
this.mnemonicCode = null;
this.encryptedMnemonicCode = checkNotNull(encryptedMnemonic);
this.creationTimeSeconds = creationTimeSeconds;
}
/**
* Constructs a seed from a BIP 39 mnemonic code. See {@link com.google.bitcoin.crypto.MnemonicCode} for more
* details on this scheme.
* @param mnemonicCode A list of words.
* @param passphrase A user supplied passphrase, or an empty string if there is no passphrase
* @param creationTimeSeconds When the seed was originally created, UNIX time.
*/
public DeterministicSeed(List<String> mnemonicCode, String passphrase, long creationTimeSeconds) {
this(MnemonicCode.toSeed(mnemonicCode, passphrase), mnemonicCode, creationTimeSeconds);
}
/**
* Constructs a seed from a BIP 39 mnemonic code. See {@link com.google.bitcoin.crypto.MnemonicCode} for more
* details on this scheme.
* @param random Entropy source
* @param bits number of bits, must be divisible by 32
* @param passphrase A user supplied passphrase, or an empty string if there is no passphrase
* @param creationTimeSeconds When the seed was originally created, UNIX time.
*/
public DeterministicSeed(SecureRandom random, int bits, String passphrase, long creationTimeSeconds) {
this(getEntropy(random, bits), passphrase, creationTimeSeconds);
}
/**
* Constructs a seed from a BIP 39 mnemonic code. See {@link com.google.bitcoin.crypto.MnemonicCode} for more
* details on this scheme.
* @param entropy entropy bits, length must be divisible by 32
* @param passphrase A user supplied passphrase, or an empty string if there is no passphrase
* @param creationTimeSeconds When the seed was originally created, UNIX time.
*/
public DeterministicSeed(byte[] entropy, String passphrase, long creationTimeSeconds) {
Preconditions.checkArgument(entropy.length % 4 == 0, "entropy size in bits not divisible by 32");
Preconditions.checkArgument(entropy.length * 8 >= DEFAULT_SEED_ENTROPY_BITS, "entropy size too small");
try {
this.mnemonicCode = MnemonicCode.INSTANCE.toMnemonic(entropy);
} catch (MnemonicException.MnemonicLengthException e) {
// cannot happen
throw new RuntimeException(e);
}
this.seed = MnemonicCode.toSeed(mnemonicCode, passphrase);
this.encryptedMnemonicCode = null;
this.creationTimeSeconds = creationTimeSeconds;
}
private static byte[] getEntropy(SecureRandom random, int bits) {
Preconditions.checkArgument(bits <= MAX_SEED_ENTROPY_BITS, "requested entropy size too large");
byte[] seed = new byte[bits / 8];
random.nextBytes(seed);
return seed;
}
@Override
public boolean isEncrypted() {
checkState(mnemonicCode != null || encryptedMnemonicCode != null);
return encryptedMnemonicCode != null;
}
@Override
public String toString() {
if (isEncrypted())
return "DeterministicSeed [encrypted]";
else
return "DeterministicSeed " + toHexString() +
((mnemonicCode != null) ? " " + Joiner.on(" ").join(mnemonicCode) : "");
}
/** Returns the seed as hex or null if encrypted. */
@Nullable
public String toHexString() {
if (seed != null)
return HEX.encode(seed);
else
return null;
}
@Nullable
@Override
public byte[] getSecretBytes() {
return getMnemonicAsBytes();
}
@Nullable
public byte[] getSeedBytes() {
return seed;
}
@Nullable
@Override
public EncryptedData getEncryptedData() {
return encryptedMnemonicCode;
}
@Override
public Protos.Wallet.EncryptionType getEncryptionType() {
return Protos.Wallet.EncryptionType.ENCRYPTED_SCRYPT_AES;
}
@Override
public long getCreationTimeSeconds() {
return creationTimeSeconds;
}
public DeterministicSeed encrypt(KeyCrypter keyCrypter, KeyParameter aesKey) {
checkState(encryptedMnemonicCode == null, "Trying to encrypt seed twice");
checkState(mnemonicCode != null, "Mnemonic missing so cannot encrypt");
EncryptedData mnemonic = keyCrypter.encrypt(getMnemonicAsBytes(), aesKey);
return new DeterministicSeed(mnemonic, creationTimeSeconds);
}
private byte[] getMnemonicAsBytes() {
return Joiner.on(" ").join(mnemonicCode).getBytes(Charsets.UTF_8);
}
public DeterministicSeed decrypt(KeyCrypter crypter, String passphrase, KeyParameter aesKey) {
checkState(isEncrypted());
checkNotNull(encryptedMnemonicCode);
List<String> mnemonic = null;
try {
mnemonic = decodeMnemonicCode(crypter.decrypt(encryptedMnemonicCode, aesKey));
} catch (UnreadableWalletException e) {
// TODO what is the best way to handle this exception?
throw new RuntimeException(e);
}
return new DeterministicSeed(mnemonic, passphrase, creationTimeSeconds);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeterministicSeed seed = (DeterministicSeed) o;
if (creationTimeSeconds != seed.creationTimeSeconds) return false;
if (encryptedMnemonicCode != null) {
if (seed.encryptedMnemonicCode == null) return false;
if (!encryptedMnemonicCode.equals(seed.encryptedMnemonicCode)) return false;
} else {
if (!mnemonicCode.equals(seed.mnemonicCode)) return false;
}
return true;
}
@Override
public int hashCode() {
int result = encryptedMnemonicCode != null ? encryptedMnemonicCode.hashCode() : mnemonicCode.hashCode();
result = 31 * result + (int) (creationTimeSeconds ^ (creationTimeSeconds >>> 32));
return result;
}
/**
* Check if our mnemonic is a valid mnemonic phrase for our word list.
* Does nothing if we are encrypted.
*
* @throws com.google.bitcoin.crypto.MnemonicException if check fails
*/
public void check() throws MnemonicException {
if (mnemonicCode != null)
MnemonicCode.INSTANCE.check(mnemonicCode);
}
byte[] getEntropyBytes() throws MnemonicException {
return MnemonicCode.INSTANCE.toEntropy(mnemonicCode);
}
/** Get the mnemonic code, or null if unknown. */
@Nullable
public List<String> getMnemonicCode() {
return mnemonicCode;
}
private static List<String> decodeMnemonicCode(byte[] mnemonicCode) throws UnreadableWalletException {
try {
return Splitter.on(" ").splitToList(new String(mnemonicCode, "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new UnreadableWalletException(e.toString());
}
}
private static List<String> decodeMnemonicCode(String mnemonicCode) {
return Splitter.on(" ").splitToList(mnemonicCode);
}
}
| troggy/bitcoinj | core/src/main/java/com/google/bitcoin/wallet/DeterministicSeed.java | Java | apache-2.0 | 9,791 |
/*
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.rxjava3.internal.operators.flowable;
import static org.junit.Assert.*;
import java.util.*;
import java.util.concurrent.ExecutionException;
import org.junit.Test;
import org.reactivestreams.*;
import io.reactivex.rxjava3.core.*;
import io.reactivex.rxjava3.functions.*;
import io.reactivex.rxjava3.internal.subscriptions.BooleanSubscription;
import io.reactivex.rxjava3.schedulers.Schedulers;
import io.reactivex.rxjava3.subscribers.*;
import io.reactivex.rxjava3.testsupport.*;
public class FlowableMaterializeTest extends RxJavaTest {
@Test
public void materialize1() {
// null will cause onError to be triggered before "three" can be
// returned
final TestAsyncErrorObservable o1 = new TestAsyncErrorObservable("one", "two", null,
"three");
TestNotificationSubscriber observer = new TestNotificationSubscriber();
Flowable<Notification<String>> m = Flowable.unsafeCreate(o1).materialize();
m.subscribe(observer);
try {
o1.t.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
assertFalse(observer.onError);
assertTrue(observer.onComplete);
assertEquals(3, observer.notifications.size());
assertTrue(observer.notifications.get(0).isOnNext());
assertEquals("one", observer.notifications.get(0).getValue());
assertTrue(observer.notifications.get(1).isOnNext());
assertEquals("two", observer.notifications.get(1).getValue());
assertTrue(observer.notifications.get(2).isOnError());
assertEquals(NullPointerException.class, observer.notifications.get(2).getError().getClass());
}
@Test
public void materialize2() {
final TestAsyncErrorObservable o1 = new TestAsyncErrorObservable("one", "two", "three");
TestNotificationSubscriber subscriber = new TestNotificationSubscriber();
Flowable<Notification<String>> m = Flowable.unsafeCreate(o1).materialize();
m.subscribe(subscriber);
try {
o1.t.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
assertFalse(subscriber.onError);
assertTrue(subscriber.onComplete);
assertEquals(4, subscriber.notifications.size());
assertTrue(subscriber.notifications.get(0).isOnNext());
assertEquals("one", subscriber.notifications.get(0).getValue());
assertTrue(subscriber.notifications.get(1).isOnNext());
assertEquals("two", subscriber.notifications.get(1).getValue());
assertTrue(subscriber.notifications.get(2).isOnNext());
assertEquals("three", subscriber.notifications.get(2).getValue());
assertTrue(subscriber.notifications.get(3).isOnComplete());
}
@Test
public void multipleSubscribes() throws InterruptedException, ExecutionException {
final TestAsyncErrorObservable o = new TestAsyncErrorObservable("one", "two", null, "three");
Flowable<Notification<String>> m = Flowable.unsafeCreate(o).materialize();
assertEquals(3, m.toList().toFuture().get().size());
assertEquals(3, m.toList().toFuture().get().size());
}
@Test
public void backpressureOnEmptyStream() {
TestSubscriber<Notification<Integer>> ts = new TestSubscriber<>(0L);
Flowable.<Integer> empty().materialize().subscribe(ts);
ts.assertNoValues();
ts.request(1);
ts.assertValueCount(1);
assertTrue(ts.values().get(0).isOnComplete());
ts.assertComplete();
}
@Test
public void backpressureNoError() {
TestSubscriber<Notification<Integer>> ts = new TestSubscriber<>(0L);
Flowable.just(1, 2, 3).materialize().subscribe(ts);
ts.assertNoValues();
ts.request(1);
ts.assertValueCount(1);
ts.request(2);
ts.assertValueCount(3);
ts.request(1);
ts.assertValueCount(4);
ts.assertComplete();
}
@Test
public void backpressureNoErrorAsync() throws InterruptedException {
TestSubscriber<Notification<Integer>> ts = new TestSubscriber<>(0L);
Flowable.just(1, 2, 3)
.materialize()
.subscribeOn(Schedulers.computation())
.subscribe(ts);
Thread.sleep(100);
ts.assertNoValues();
ts.request(1);
Thread.sleep(100);
ts.assertValueCount(1);
ts.request(2);
Thread.sleep(100);
ts.assertValueCount(3);
ts.request(1);
Thread.sleep(100);
ts.assertValueCount(4);
ts.assertComplete();
}
@Test
public void backpressureWithError() {
TestSubscriber<Notification<Integer>> ts = new TestSubscriber<>(0L);
Flowable.<Integer> error(new IllegalArgumentException()).materialize().subscribe(ts);
ts.assertNoValues();
ts.request(1);
ts.assertValueCount(1);
ts.assertComplete();
}
@Test
public void backpressureWithEmissionThenError() {
TestSubscriber<Notification<Integer>> ts = new TestSubscriber<>(0L);
IllegalArgumentException ex = new IllegalArgumentException();
Flowable.fromIterable(Arrays.asList(1)).concatWith(Flowable.<Integer> error(ex)).materialize()
.subscribe(ts);
ts.assertNoValues();
ts.request(1);
ts.assertValueCount(1);
assertTrue(ts.values().get(0).isOnNext());
ts.request(1);
ts.assertValueCount(2);
assertTrue(ts.values().get(1).isOnError());
assertEquals(ex, ts.values().get(1).getError());
ts.assertComplete();
}
@Test
public void withCompletionCausingError() {
TestSubscriberEx<Notification<Integer>> ts = new TestSubscriberEx<>();
final RuntimeException ex = new RuntimeException("boo");
Flowable.<Integer>empty().materialize().doOnNext(new Consumer<Object>() {
@Override
public void accept(Object t) {
throw ex;
}
}).subscribe(ts);
ts.assertError(ex);
ts.assertNoValues();
ts.assertTerminated();
}
@Test
public void unsubscribeJustBeforeCompletionNotificationShouldPreventThatNotificationArriving() {
TestSubscriber<Notification<Integer>> ts = new TestSubscriber<>(0L);
Flowable.<Integer>empty().materialize()
.subscribe(ts);
ts.assertNoValues();
ts.cancel();
ts.request(1);
ts.assertNoValues();
}
private static class TestNotificationSubscriber extends DefaultSubscriber<Notification<String>> {
boolean onComplete;
boolean onError;
List<Notification<String>> notifications = new Vector<>();
@Override
public void onComplete() {
this.onComplete = true;
}
@Override
public void onError(Throwable e) {
this.onError = true;
}
@Override
public void onNext(Notification<String> value) {
this.notifications.add(value);
}
}
private static class TestAsyncErrorObservable implements Publisher<String> {
String[] valuesToReturn;
TestAsyncErrorObservable(String... values) {
valuesToReturn = values;
}
volatile Thread t;
@Override
public void subscribe(final Subscriber<? super String> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
t = new Thread(new Runnable() {
@Override
public void run() {
for (String s : valuesToReturn) {
if (s == null) {
System.out.println("throwing exception");
try {
Thread.sleep(100);
} catch (Throwable e) {
}
subscriber.onError(new NullPointerException());
return;
} else {
subscriber.onNext(s);
}
}
System.out.println("subscription complete");
subscriber.onComplete();
}
});
t.start();
}
}
@Test
public void backpressure() {
TestSubscriber<Notification<Integer>> ts = Flowable.range(1, 5).materialize().test(0);
ts.assertEmpty();
ts.request(5);
ts.assertValueCount(5)
.assertNoErrors()
.assertNotComplete();
ts.request(1);
ts.assertValueCount(6)
.assertNoErrors()
.assertComplete();
}
@Test
public void dispose() {
TestHelper.checkDisposed(Flowable.just(1).materialize());
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeFlowable(new Function<Flowable<Object>, Flowable<Notification<Object>>>() {
@Override
public Flowable<Notification<Object>> apply(Flowable<Object> f) throws Exception {
return f.materialize();
}
});
}
@Test
public void badSource() {
TestHelper.checkBadSourceFlowable(new Function<Flowable<Object>, Object>() {
@Override
public Object apply(Flowable<Object> f) throws Exception {
return f.materialize();
}
}, false, null, null, Notification.createOnComplete());
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(Flowable.just(1).materialize());
}
}
| ReactiveX/RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableMaterializeTest.java | Java | apache-2.0 | 10,356 |
package net.unicon.cas.addons.serviceregistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
/**
* <code>BeanFactoryPostProcessor</code> to remove 2 quartz beans responsible for reloading the default services registry's registered services.
* <p/>
* Useful in cases where other facilities are responsible for reloading in-memory services cache, for example on-demand reloading
* of JSON services registry, etc.
* <p/>
* This bean just needs to be declared in CAS' application context and upon bootstrap Spring will call back into it and
* 2 scheduling quartz beans dedicated for services registry reloading thread will be removed from the final application context
* effectively disabling the default reloading behavior.
*
* @author Dmitriy Kopylenko
* @author Unicon, inc.
* @since 1.8
*/
public class RegisteredServicesReloadDisablingBeanFactoryPostProcessor implements BeanFactoryPostProcessor {
private static final String JOB_DETAIL_BEAN_NAME = "serviceRegistryReloaderJobDetail";
private static final String JOB_TRIGGER_BEAN_NAME = "periodicServiceRegistryReloaderTrigger";
private static final Logger logger = LoggerFactory.getLogger(RegisteredServicesReloadDisablingBeanFactoryPostProcessor.class);
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
logger.debug("Removing [{}] bean definition from the application context...", JOB_DETAIL_BEAN_NAME);
BeanDefinitionRegistry.class.cast(beanFactory).removeBeanDefinition(JOB_DETAIL_BEAN_NAME);
logger.debug("Removing [{}] bean definition from the application context...", JOB_TRIGGER_BEAN_NAME);
BeanDefinitionRegistry.class.cast(beanFactory).removeBeanDefinition(JOB_TRIGGER_BEAN_NAME);
}
}
| Unicon/cas-addons | src/main/java/net/unicon/cas/addons/serviceregistry/RegisteredServicesReloadDisablingBeanFactoryPostProcessor.java | Java | apache-2.0 | 2,060 |
/* Copyright 2008, 2009, 2010 by the Oxford University Computing Laboratory
This file is part of HermiT.
HermiT is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
HermiT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with HermiT. If not, see <http://www.gnu.org/licenses/>.
*/
package org.semanticweb.HermiT.datatypes.owlreal;
import java.math.BigDecimal;
import java.math.BigInteger;
public enum NumberRange {
NOTHING, INTEGER, DECIMAL, RATIONAL, REAL;
public boolean isDense() {
return ordinal() >= DECIMAL.ordinal();
}
public static NumberRange intersection(NumberRange it1, NumberRange it2) {
int minOrdinal = Math.min(it1.ordinal(), it2.ordinal());
return values()[minOrdinal];
}
public static NumberRange union(NumberRange it1, NumberRange it2) {
int maxOrdinal = Math.max(it1.ordinal(), it2.ordinal());
return values()[maxOrdinal];
}
public static boolean isSubsetOf(NumberRange subset, NumberRange superset) {
return subset.ordinal() <= superset.ordinal();
}
public static NumberRange getMostSpecificRange(Number n) {
if (n instanceof Integer || n instanceof Long || n instanceof BigInteger)
return INTEGER;
else if (n instanceof BigDecimal)
return DECIMAL;
else if (n instanceof BigRational)
return RATIONAL;
else
throw new IllegalArgumentException();
}
} | CPoirot3/OWL-Reasoner | project/src/org/semanticweb/HermiT/datatypes/owlreal/NumberRange.java | Java | apache-2.0 | 1,986 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lookoutequipment.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.lookoutequipment.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* CreateDatasetResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateDatasetResultJsonUnmarshaller implements Unmarshaller<CreateDatasetResult, JsonUnmarshallerContext> {
public CreateDatasetResult unmarshall(JsonUnmarshallerContext context) throws Exception {
CreateDatasetResult createDatasetResult = new CreateDatasetResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return createDatasetResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("DatasetName", targetDepth)) {
context.nextToken();
createDatasetResult.setDatasetName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DatasetArn", targetDepth)) {
context.nextToken();
createDatasetResult.setDatasetArn(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Status", targetDepth)) {
context.nextToken();
createDatasetResult.setStatus(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return createDatasetResult;
}
private static CreateDatasetResultJsonUnmarshaller instance;
public static CreateDatasetResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new CreateDatasetResultJsonUnmarshaller();
return instance;
}
}
| aws/aws-sdk-java | aws-java-sdk-lookoutequipment/src/main/java/com/amazonaws/services/lookoutequipment/model/transform/CreateDatasetResultJsonUnmarshaller.java | Java | apache-2.0 | 3,307 |
package com.designpattern.structural.facade;
public class Facade {
SystemOne system1 = new SystemOne();
SystemTwo system2 = new SystemTwo();
SystemThree system3 = new SystemThree();
SystemFour system4 = new SystemFour();
public void facadeFunction1() {
System.out.println("---- facade function 1");
system1.methodOne();
system3.methodThree();
system4.methodFour();
System.out.println("---- facade function 1 end");
}
public void facadeFunction2() {
System.out.println("---- facade function 2");
system2.methodTwo();
system3.methodThree();
System.out.println("---- facade function 2 end");
}
}
| tyybjcc/Design-Patterns-in-java | DesignPatterns/src/com/designpattern/structural/facade/Facade.java | Java | apache-2.0 | 649 |
/*
* Copyright 2019 Stephane Nicolas
* Copyright 2019 Daniel Molinero Reguera
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toothpick.getInstance;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.sameInstance;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import toothpick.Scope;
import toothpick.ScopeImpl;
import toothpick.Toothpick;
import toothpick.config.Module;
import toothpick.configuration.Configuration;
import toothpick.configuration.CyclicDependencyException;
import toothpick.data.CyclicFoo;
import toothpick.data.CyclicNamedFoo;
import toothpick.data.IFoo;
import toothpick.locators.NoFactoryFoundException;
/*
* Creates a instance in the simplest possible way
* without any module.
*/
public class CycleCheckTest {
@BeforeClass
public static void setUp() {
Toothpick.setConfiguration(Configuration.forDevelopment());
}
@AfterClass
public static void staticTearDown() {
Toothpick.setConfiguration(Configuration.forProduction());
}
@After
public void tearDown() {
Toothpick.reset();
}
@Test(expected = CyclicDependencyException.class)
public void testSimpleCycleDetection() {
// GIVEN
Scope scope = new ScopeImpl("");
// WHEN
scope.getInstance(CyclicFoo.class);
// THEN
fail("Should throw an exception as a cycle is detected");
}
@Test
public void testCycleDetection_whenSameClass_and_differentName_shouldNotCrash() {
// GIVEN
final CyclicNamedFoo instance1 = new CyclicNamedFoo();
Scope scope = new ScopeImpl("");
scope.installModules(
new Module() {
{
bind(CyclicNamedFoo.class).withName("foo").toInstance(instance1);
}
});
// WHEN
CyclicNamedFoo instance2 = scope.getInstance(CyclicNamedFoo.class);
// THEN
// Should not crashed
assertThat(instance2, notNullValue());
assertThat(instance2.cyclicFoo, sameInstance(instance1));
}
@Test(expected = NoFactoryFoundException.class)
public void testCycleDetection_whenGetInstanceFails_shouldCloseCycle() {
// GIVEN
Scope scope = new ScopeImpl("");
// WHEN
try {
scope.getInstance(IFoo.class);
} catch (NoFactoryFoundException nfe) {
nfe.printStackTrace();
}
scope.getInstance(IFoo.class);
// THEN
fail(
"Should throw NoFactoryFoundException as IFoo does not have any implementation bound."
+ "But It should not throw CyclicDependencyException as it was removed from the stack.");
}
}
| stephanenicolas/toothpick | toothpick-runtime/src/test/java/toothpick/getInstance/CycleCheckTest.java | Java | apache-2.0 | 3,214 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.ssg.dcst.panthera.parse.sql.transformer.fb;
import java.util.ArrayList;
import java.util.List;
import org.antlr.runtime.tree.CommonTree;
import com.intel.ssg.dcst.panthera.parse.sql.PantheraExpParser;
import com.intel.ssg.dcst.panthera.parse.sql.SqlXlateException;
import com.intel.ssg.dcst.panthera.parse.sql.SqlXlateUtil;
import com.intel.ssg.dcst.panthera.parse.sql.TranslateContext;
import br.com.porcelli.parser.plsql.PantheraParser_PLSQLParser;
/**
* transform AND to JOIN(by rebuilding left select).<br>
* AndFilterBlock.
*
*/
public class AndFilterBlock extends LogicFilterBlock {
/**
* this must have two children.
*
* @throws SqlXlateException
*/
@Override
public void process(FilterBlockContext fbContext, TranslateContext context)
throws SqlXlateException {
FilterBlock leftFB = this.getChildren().get(0);
leftFB.process(fbContext, context);
fbContext.getQueryStack().peek().setQueryForTransfer(leftFB.getTransformedNode());
fbContext.getQueryStack().peek().setRebuildQueryForTransfer();
FilterBlock rightFB = this.getChildren().get(1);
CommonTree condition = rightFB.getASTNode();
TypeFilterBlock type = fbContext.getTypeStack().peek();
if (rightFB instanceof UnCorrelatedFilterBlock) {
// simple condition
if (type instanceof WhereFilterBlock) {
rebuildWhereCondition(leftFB, condition);
}
if (type instanceof HavingFilterBlock) {
rebuildHavingCondition(leftFB, condition);
}
this.setTransformedNode(leftFB.getTransformedNode());
} else {
rightFB.process(fbContext, context);
this.setTransformedNode(rightFB.getTransformedNode());
}
}
private void rebuildWhereCondition(FilterBlock leftFB, CommonTree condition) {
CommonTree transformedSelect = leftFB.getTransformedNode();
rebuildWhereCond(transformedSelect, condition);
}
private void rebuildWhereCond(CommonTree transformedSelect, CommonTree condition) {
if (transformedSelect.getType() == PantheraParser_PLSQLParser.SUBQUERY) {
for (int i = 0; i < transformedSelect.getChildCount(); i++) {
rebuildWhereCond((CommonTree) transformedSelect.getChild(i), condition);
}
} else if (transformedSelect.getType() == PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT) {
rebuildWhereCondition(transformedSelect, condition);
} else if (transformedSelect.getType() == PantheraParser_PLSQLParser.SQL92_RESERVED_UNION) { // UNION node
rebuildWhereCond((CommonTree) transformedSelect.getChild(0), condition);
}
}
private void rebuildWhereCondition(CommonTree transformedSelect, CommonTree condition) {
CommonTree tableRefElement = (CommonTree) transformedSelect.getChild(0).getChild(0).getChild(0);
CommonTree subQuery = (CommonTree) tableRefElement.getChild(tableRefElement.getChildCount() - 1).getChild(0)
.getChild(0).getChild(0);
List<List<CommonTree>> selects = new ArrayList<List<CommonTree>>();
for (int i = 0; i < subQuery.getChildCount(); i++) {
List<CommonTree> selectLists = new ArrayList<CommonTree>();
FilterBlockUtil.findNode((CommonTree) subQuery.getChild(i),
PantheraExpParser.SELECT_LIST, selectLists);
assert(selectLists != null);
List<CommonTree> oneSelects = new ArrayList<CommonTree>();
for (CommonTree sl:selectLists) {
oneSelects.add((CommonTree) sl.getParent());
}
selects.add(oneSelects);
}
for (List<CommonTree> sels:selects) {
CommonTree sel = sels.get(0);
for (int j = 0; j < sels.size(); j++) {
sel = sels.get(j);
if(sel.getCharPositionInLine() == condition.getAncestor(PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT).getCharPositionInLine()) {
break;
}
}
CommonTree where = (CommonTree) sel
.getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_WHERE);
if (where == null) {
where = FilterBlockUtil.createSqlASTNode(condition, PantheraExpParser.SQL92_RESERVED_WHERE,
"where");
CommonTree group = (CommonTree) sel
.getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_GROUP);
if (group != null) {
int groupIndex = group.getChildIndex();
SqlXlateUtil.addCommonTreeChild(sel, groupIndex, where);
} else {
sel.addChild(where);
}
CommonTree logicExpr = FilterBlockUtil.createSqlASTNode(condition,
PantheraExpParser.LOGIC_EXPR, "LOGIC_EXPR");
where.addChild(logicExpr);
logicExpr.addChild(condition);
} else {
CommonTree logicExpr = (CommonTree) where.getChild(0);
FilterBlockUtil.addConditionToLogicExpr(logicExpr, condition);
}
}
}
private void rebuildHavingCondition(FilterBlock leftFB, CommonTree condition) {
CommonTree transformedSelect = leftFB.getTransformedNode();
rebuildHavingCond(transformedSelect, condition);
}
private void rebuildHavingCond(CommonTree transformedSelect, CommonTree condition) {
if (transformedSelect.getType() == PantheraParser_PLSQLParser.SUBQUERY) {
for (int i = 0; i < transformedSelect.getChildCount(); i++) {
rebuildHavingCond((CommonTree) transformedSelect.getChild(i), condition);
}
} else if (transformedSelect.getType() == PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT) {
rebuildHavingCondition(transformedSelect, condition);
} else if (transformedSelect.getType() == PantheraParser_PLSQLParser.SQL92_RESERVED_UNION) { // UNION node
rebuildHavingCond((CommonTree) transformedSelect.getChild(0), condition);
}
}
private void rebuildHavingCondition(CommonTree transformedSelect, CommonTree condition) {
CommonTree tableRefElement = (CommonTree) transformedSelect.getChild(0).getChild(0).getChild(0);
CommonTree subQuery = (CommonTree) tableRefElement.getChild(tableRefElement.getChildCount() - 1).getChild(0)
.getChild(0).getChild(0);
List<List<CommonTree>> groups = new ArrayList<List<CommonTree>>();
for(int i = 0; i < subQuery.getChildCount(); i++){
List<CommonTree> oneGroups = new ArrayList<CommonTree>();
FilterBlockUtil.findNode((CommonTree) subQuery.getChild(i),
PantheraExpParser.SQL92_RESERVED_GROUP, oneGroups);
assert(oneGroups != null);
groups.add(oneGroups);
}
for(List<CommonTree> grps:groups) {
CommonTree group = grps.get(0);
for (int j = 0; j < grps.size(); j++) {
group = grps.get(j);
if(group.getCharPositionInLine() == condition.getAncestor(PantheraParser_PLSQLParser.SQL92_RESERVED_GROUP).getCharPositionInLine()) {
break;
}
}
CommonTree having = (CommonTree) group
.getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_HAVING);
if (having == null) {
having = FilterBlockUtil.createSqlASTNode(condition, PantheraExpParser.SQL92_RESERVED_HAVING,
"having");
group.addChild(having);
CommonTree logicExpr = FilterBlockUtil.createSqlASTNode(condition,
PantheraExpParser.LOGIC_EXPR, "LOGIC_EXPR");
having.addChild(logicExpr);
logicExpr.addChild(condition);
} else {
CommonTree logicExpr = (CommonTree) having.getChild(0);
FilterBlockUtil.addConditionToLogicExpr(logicExpr, condition);
}
}
}
}
| adrian-wang/project-panthera-skin | src/main/java/com/intel/ssg/dcst/panthera/parse/sql/transformer/fb/AndFilterBlock.java | Java | apache-2.0 | 8,205 |
package io.swagger.client.api;
import com.sun.jersey.api.client.GenericType;
import io.swagger.client.ApiException;
import io.swagger.client.ApiClient;
import io.swagger.client.Configuration;
import io.swagger.client.Pair;
import io.swagger.client.model.CodeSnippet;
import io.swagger.client.model.CodeSnippetList;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@javax.annotation.Generated(value = "class io.swagger.codegen.languages.JavaClientCodegen", date = "2016-03-11T02:09:38.462Z")
public class DefaultApi {
private ApiClient apiClient;
public DefaultApi() {
this(Configuration.getDefaultApiClient());
}
public DefaultApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/**
* Creates a code snippet.
* Creates a code snippet in the specified language.
* @param codeSnippetBody Code snippet object.
* @throws ApiException if fails to make API call
*/
public void snipPost(CodeSnippet codeSnippetBody) throws ApiException {
Object localVarPostBody = codeSnippetBody;
// create path and map variables
String localVarPath = "/snip".replaceAll("\\{format\\}","json");
// query params
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
String[] localVarAuthNames = new String[] { };
apiClient.invokeAPI(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, null);
}
/**
* Retrieves the specified code snippet.
* Retrieves the specified code snippet.
* @param codeSnippetUuid Code snippet unique identifier.
* @return CodeSnippet
* @throws ApiException if fails to make API call
*/
public CodeSnippet snipCodeSnippetUuidGet(String codeSnippetUuid) throws ApiException {
Object localVarPostBody = null;
// verify the required parameter 'codeSnippetUuid' is set
if (codeSnippetUuid == null) {
throw new ApiException(400, "Missing the required parameter 'codeSnippetUuid' when calling snipCodeSnippetUuidGet");
}
// create path and map variables
String localVarPath = "/snip/{code_snippet_uuid}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "code_snippet_uuid" + "\\}", apiClient.escapeString(codeSnippetUuid.toString()));
// query params
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
String[] localVarAuthNames = new String[] { };
GenericType<CodeSnippet> localVarReturnType = new GenericType<CodeSnippet>() {};
return apiClient.invokeAPI(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, localVarReturnType);
}
/**
* Updates a code snippet.
* Updates a code snippet changes.
* @param codeSnippetUuid Code snippet unique identifier.
* @param codeSnippetBody Code snippet object.
* @throws ApiException if fails to make API call
*/
public void snipCodeSnippetUuidPut(String codeSnippetUuid, CodeSnippet codeSnippetBody) throws ApiException {
Object localVarPostBody = codeSnippetBody;
// verify the required parameter 'codeSnippetUuid' is set
if (codeSnippetUuid == null) {
throw new ApiException(400, "Missing the required parameter 'codeSnippetUuid' when calling snipCodeSnippetUuidPut");
}
// create path and map variables
String localVarPath = "/snip/{code_snippet_uuid}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "code_snippet_uuid" + "\\}", apiClient.escapeString(codeSnippetUuid.toString()));
// query params
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
String[] localVarAuthNames = new String[] { };
apiClient.invokeAPI(localVarPath, "PUT", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, null);
}
/**
* Deletes the specified code snippet.
* Deletes the specified code snippet.
* @param codeSnippetUuid Code snippet unique identifier.
* @throws ApiException if fails to make API call
*/
public void snipCodeSnippetUuidDelete(String codeSnippetUuid) throws ApiException {
Object localVarPostBody = null;
// verify the required parameter 'codeSnippetUuid' is set
if (codeSnippetUuid == null) {
throw new ApiException(400, "Missing the required parameter 'codeSnippetUuid' when calling snipCodeSnippetUuidDelete");
}
// create path and map variables
String localVarPath = "/snip/{code_snippet_uuid}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "code_snippet_uuid" + "\\}", apiClient.escapeString(codeSnippetUuid.toString()));
// query params
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
String[] localVarAuthNames = new String[] { };
apiClient.invokeAPI(localVarPath, "DELETE", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, null);
}
/**
* Retrieves all code snippets.
*
* @return CodeSnippetList
* @throws ApiException if fails to make API call
*/
public CodeSnippetList snipsGet() throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/snips".replaceAll("\\{format\\}","json");
// query params
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
String[] localVarAuthNames = new String[] { };
GenericType<CodeSnippetList> localVarReturnType = new GenericType<CodeSnippetList>() {};
return apiClient.invokeAPI(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, localVarReturnType);
}
}
| jsodini/CodeSnip | sdk/java/src/main/java/io/swagger/client/api/DefaultApi.java | Java | apache-2.0 | 8,599 |
/**
* CommonFramework
*
* Copyright (C) 2017 Black Duck Software, Inc.
* http://www.blackducksoftware.com/
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.blackducksoftware.tools.commonframework.standard.protex.report.template;
// TODO: Auto-generated Javadoc
/**
* Pojo representing the second sheet of our test template .
*
* @author akamen
*/
public class TestPojoPageTwo extends TestPojo {
/** The value1page2. */
public String value1page2;
/** The value2page2. */
public String value2page2;
/**
* Gets the value2 page2.
*
* @return the value2 page2
*/
public String getValue2Page2() {
return value2page2;
}
/**
* Sets the value2page2.
*
* @param value2page2
* the new value2page2
*/
public void setValue2page2(String value2page2) {
this.value2page2 = value2page2;
}
/**
* Gets the value1 page2.
*
* @return the value1 page2
*/
public String getValue1Page2() {
return value1page2;
}
/**
* Sets the value1page2.
*
* @param value1page2
* the new value1page2
*/
public void setValue1page2(String value1page2) {
this.value1page2 = value1page2;
}
}
| blackducksoftware/common-framework | src/test/java/com/blackducksoftware/tools/commonframework/standard/protex/report/template/TestPojoPageTwo.java | Java | apache-2.0 | 2,010 |
package org.test;
import org.test.act.MainGame;
import loon.LSetting;
import loon.LSystem;
import loon.LazyLoading;
import loon.Screen;
import loon.javase.Loon;
public class JavaSEMain {
public static void main(String[]args){
LSetting setting = new LSetting();
setting.isFPS = true;
setting.isLogo = false;
setting.logoPath = "loon_logo.png";
// 原始大小
setting.width = 800;
setting.height = 480;
setting.fps = 60;
setting.fontName = "黑体";
setting.appName = "动作游戏";
LSystem.NOT_MOVE = true;
Loon.register(setting, new LazyLoading.Data() {
@Override
public Screen onScreen() {
//此Screen位于sample文件夹下,引入资源即可加载
return new MainGame();
}
});
}
}
| cping/LGame | Java/Examples/cannonblast(0.5)/src/org/test/JavaSEMain.java | Java | apache-2.0 | 744 |
package apple.uikit;
import java.io.*;
import java.nio.*;
import java.util.*;
import com.google.j2objc.annotations.*;
import com.google.j2objc.runtime.*;
import com.google.j2objc.runtime.block.*;
import apple.audiotoolbox.*;
import apple.corefoundation.*;
import apple.coregraphics.*;
import apple.coreservices.*;
import apple.foundation.*;
import apple.coreanimation.*;
import apple.coredata.*;
import apple.coreimage.*;
import apple.coretext.*;
import apple.corelocation.*;
@Library("UIKit/UIKit.h")
@Mapping("UIMenuControllerArrowDirection")
public final class UIMenuControllerArrowDirection extends ObjCEnum {
@GlobalConstant("UIMenuControllerArrowDefault")
public static final long Default = 0L;
@GlobalConstant("UIMenuControllerArrowUp")
public static final long Up = 1L;
@GlobalConstant("UIMenuControllerArrowDown")
public static final long Down = 2L;
@GlobalConstant("UIMenuControllerArrowLeft")
public static final long Left = 3L;
@GlobalConstant("UIMenuControllerArrowRight")
public static final long Right = 4L;
}
| Sellegit/j2objc | runtime/src/main/java/apple/uikit/UIMenuControllerArrowDirection.java | Java | apache-2.0 | 1,076 |
package com.swifts.frame.modules.wx.fastweixin.company.message.req;
/**
* 微信企业号异步任务类型
* ====================================================================
*
* --------------------------------------------------------------------
* @author Nottyjay
* @version 1.0.beta
* @since 1.3.6
* ====================================================================
*/
public final class QYBatchJobType {
private String SYNCUSER = "sync_user";// 增量更新成员
private String REPLACEUSER = "replace_user";// 全量覆盖成员
private String INVITEUSER = "invite_user";// 邀请成员关注
private String REPLACEPARTY = "replace_party";// 全量覆盖部门
private QYBatchJobType() {
}
}
| hanyahui88/swifts | src/main/java/com/swifts/frame/modules/wx/fastweixin/company/message/req/QYBatchJobType.java | Java | apache-2.0 | 754 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.TestCoderBase;
import java.lang.reflect.Constructor;
/**
* Erasure coder test base with utilities.
*/
public abstract class TestErasureCoderBase extends TestCoderBase {
protected Class<? extends ErasureCoder> encoderClass;
protected Class<? extends ErasureCoder> decoderClass;
private ErasureCoder encoder;
private ErasureCoder decoder;
protected int numChunksInBlock = 16;
/**
* It's just a block for this test purpose. We don't use HDFS block here
* at all for simple.
*/
protected static class TestBlock extends ECBlock {
private ECChunk[] chunks;
// For simple, just assume the block have the chunks already ready.
// In practice we need to read/write chunks from/to the block via file IO.
public TestBlock(ECChunk[] chunks) {
this.chunks = chunks;
}
}
/**
* Generating source data, encoding, recovering and then verifying.
* RawErasureCoder mainly uses ECChunk to pass input and output data buffers,
* it supports two kinds of ByteBuffers, one is array backed, the other is
* direct ByteBuffer. Have usingDirectBuffer to indicate which case to test.
* @param usingDirectBuffer
*/
protected void testCoding(boolean usingDirectBuffer) {
this.usingDirectBuffer = usingDirectBuffer;
prepareCoders();
/**
* The following runs will use 3 different chunkSize for inputs and outputs,
* to verify the same encoder/decoder can process variable width of data.
*/
performTestCoding(baseChunkSize, true);
performTestCoding(baseChunkSize - 17, false);
performTestCoding(baseChunkSize + 16, true);
}
private void performTestCoding(int chunkSize, boolean usingSlicedBuffer) {
setChunkSize(chunkSize);
prepareBufferAllocator(usingSlicedBuffer);
// Generate data and encode
ECBlockGroup blockGroup = prepareBlockGroupForEncoding();
// Backup all the source chunks for later recovering because some coders
// may affect the source data.
TestBlock[] clonedDataBlocks =
cloneBlocksWithData((TestBlock[]) blockGroup.getDataBlocks());
TestBlock[] parityBlocks = (TestBlock[]) blockGroup.getParityBlocks();
ErasureCodingStep codingStep;
codingStep = encoder.calculateCoding(blockGroup);
performCodingStep(codingStep);
// Erase specified sources but return copies of them for later comparing
TestBlock[] backupBlocks = backupAndEraseBlocks(clonedDataBlocks, parityBlocks);
// Decode
blockGroup = new ECBlockGroup(clonedDataBlocks, blockGroup.getParityBlocks());
codingStep = decoder.calculateCoding(blockGroup);
performCodingStep(codingStep);
// Compare
compareAndVerify(backupBlocks, codingStep.getOutputBlocks());
}
/**
* This is typically how a coding step should be performed.
* @param codingStep
*/
private void performCodingStep(ErasureCodingStep codingStep) {
// Pretend that we're opening these input blocks and output blocks.
ECBlock[] inputBlocks = codingStep.getInputBlocks();
ECBlock[] outputBlocks = codingStep.getOutputBlocks();
// We allocate input and output chunks accordingly.
ECChunk[] inputChunks = new ECChunk[inputBlocks.length];
ECChunk[] outputChunks = new ECChunk[outputBlocks.length];
for (int i = 0; i < numChunksInBlock; ++i) {
// Pretend that we're reading input chunks from input blocks.
for (int j = 0; j < inputBlocks.length; ++j) {
inputChunks[j] = ((TestBlock) inputBlocks[j]).chunks[i];
}
// Pretend that we allocate and will write output results to the blocks.
for (int j = 0; j < outputBlocks.length; ++j) {
outputChunks[j] = allocateOutputChunk();
((TestBlock) outputBlocks[j]).chunks[i] = outputChunks[j];
}
// Given the input chunks and output chunk buffers, just call it !
codingStep.performCoding(inputChunks, outputChunks);
}
codingStep.finish();
}
/**
* Compare and verify if recovered blocks data are the same with the erased
* blocks data.
* @param erasedBlocks
* @param recoveredBlocks
*/
protected void compareAndVerify(ECBlock[] erasedBlocks,
ECBlock[] recoveredBlocks) {
for (int i = 0; i < erasedBlocks.length; ++i) {
compareAndVerify(((TestBlock) erasedBlocks[i]).chunks, ((TestBlock) recoveredBlocks[i]).chunks);
}
}
private void prepareCoders() {
if (encoder == null) {
encoder = createEncoder();
}
if (decoder == null) {
decoder = createDecoder();
}
}
/**
* Create the raw erasure encoder to test
* @return
*/
protected ErasureCoder createEncoder() {
ErasureCoder encoder;
try {
Constructor<? extends ErasureCoder> constructor =
(Constructor<? extends ErasureCoder>)
encoderClass.getConstructor(int.class, int.class);
encoder = constructor.newInstance(numDataUnits, numParityUnits);
} catch (Exception e) {
throw new RuntimeException("Failed to create encoder", e);
}
encoder.setConf(getConf());
return encoder;
}
/**
* create the raw erasure decoder to test
* @return
*/
protected ErasureCoder createDecoder() {
ErasureCoder decoder;
try {
Constructor<? extends ErasureCoder> constructor =
(Constructor<? extends ErasureCoder>)
decoderClass.getConstructor(int.class, int.class);
decoder = constructor.newInstance(numDataUnits, numParityUnits);
} catch (Exception e) {
throw new RuntimeException("Failed to create decoder", e);
}
decoder.setConf(getConf());
return decoder;
}
/**
* Prepare a block group for encoding.
* @return
*/
protected ECBlockGroup prepareBlockGroupForEncoding() {
ECBlock[] dataBlocks = new TestBlock[numDataUnits];
ECBlock[] parityBlocks = new TestBlock[numParityUnits];
for (int i = 0; i < numDataUnits; i++) {
dataBlocks[i] = generateDataBlock();
}
for (int i = 0; i < numParityUnits; i++) {
parityBlocks[i] = allocateOutputBlock();
}
return new ECBlockGroup(dataBlocks, parityBlocks);
}
/**
* Generate random data and return a data block.
* @return
*/
protected ECBlock generateDataBlock() {
ECChunk[] chunks = new ECChunk[numChunksInBlock];
for (int i = 0; i < numChunksInBlock; ++i) {
chunks[i] = generateDataChunk();
}
return new TestBlock(chunks);
}
/**
* Erase blocks to test the recovering of them. Before erasure clone them
* first so could return themselves.
* @param dataBlocks
* @return clone of erased dataBlocks
*/
protected TestBlock[] backupAndEraseBlocks(TestBlock[] dataBlocks,
TestBlock[] parityBlocks) {
TestBlock[] toEraseBlocks = new TestBlock[erasedDataIndexes.length +
erasedParityIndexes.length];
int idx = 0;
TestBlock block;
for (int i = 0; i < erasedParityIndexes.length; i++) {
block = parityBlocks[erasedParityIndexes[i]];
toEraseBlocks[idx ++] = cloneBlockWithData(block);
eraseDataFromBlock(block);
}
for (int i = 0; i < erasedDataIndexes.length; i++) {
block = dataBlocks[erasedDataIndexes[i]];
toEraseBlocks[idx ++] = cloneBlockWithData(block);
eraseDataFromBlock(block);
}
return toEraseBlocks;
}
/**
* Allocate an output block. Note the chunk buffer will be allocated by the
* up caller when performing the coding step.
* @return
*/
protected TestBlock allocateOutputBlock() {
ECChunk[] chunks = new ECChunk[numChunksInBlock];
return new TestBlock(chunks);
}
/**
* Clone blocks with data copied along with, avoiding affecting the original
* blocks.
* @param blocks
* @return
*/
protected TestBlock[] cloneBlocksWithData(TestBlock[] blocks) {
TestBlock[] results = new TestBlock[blocks.length];
for (int i = 0; i < blocks.length; ++i) {
results[i] = cloneBlockWithData(blocks[i]);
}
return results;
}
/**
* Clone exactly a block, avoiding affecting the original block.
* @param block
* @return a new block
*/
protected TestBlock cloneBlockWithData(TestBlock block) {
ECChunk[] newChunks = cloneChunksWithData(block.chunks);
return new TestBlock(newChunks);
}
/**
* Erase data from a block.
*/
protected void eraseDataFromBlock(TestBlock theBlock) {
eraseDataFromChunks(theBlock.chunks);
theBlock.setErased(true);
}
}
| anjuncc/hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java | Java | apache-2.0 | 9,576 |
/*
* Copyright (c) 2010-2017 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.model.intest.rbac;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.ContextConfiguration;
import java.io.File;
/**
* @author semancik
*
*/
@ContextConfiguration(locations = {"classpath:ctx-model-intest-test-main.xml"})
@DirtiesContext(classMode = ClassMode.AFTER_CLASS)
public class TestRbacDeprecated extends TestRbac {
protected static final File ROLE_GOVERNOR_DEPRECATED_FILE = new File(TEST_DIR, "role-governor-deprecated.xml");
protected static final File ROLE_CANNIBAL_DEPRECATED_FILE = new File(TEST_DIR, "role-cannibal-deprecated.xml");
@Override
protected File getRoleGovernorFile() {
return ROLE_GOVERNOR_DEPRECATED_FILE;
}
@Override
protected File getRoleCannibalFile() {
return ROLE_CANNIBAL_DEPRECATED_FILE;
}
@Override
protected boolean testMultiplicityConstraintsForNonDefaultRelations() {
return false;
}
}
| Pardus-Engerek/engerek | model/model-intest/src/test/java/com/evolveum/midpoint/model/intest/rbac/TestRbacDeprecated.java | Java | apache-2.0 | 1,603 |
package com.koch.ambeth.util;
import org.junit.Assert;
import org.junit.Test;
import com.koch.ambeth.ioc.util.ImmutableTypeSet;
public class ImmutableTypeSetTest {
public interface MyType {
// intended blank
}
public class MyClass implements MyType {
// intended blank
}
@Test
public void test() {
ImmutableTypeSet immutableTypeSet = new ImmutableTypeSet();
Assert.assertFalse(immutableTypeSet.isImmutableType(MyClass.class));
immutableTypeSet.registerImmutableType(MyType.class);
Assert.assertTrue(immutableTypeSet.isImmutableType(MyType.class));
Assert.assertTrue(immutableTypeSet.isImmutableType(MyClass.class));
}
}
| Dennis-Koch/ambeth | jambeth/jambeth-ioc-test/src/test/java/com/koch/ambeth/util/ImmutableTypeSetTest.java | Java | apache-2.0 | 677 |
/*
* Copyright 2010-2020 Alfresco Software, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.bpmn.model;
public class EventGateway extends Gateway {
public EventGateway clone() {
EventGateway clone = new EventGateway();
clone.setValues(this);
return clone;
}
public void setValues(EventGateway otherElement) {
super.setValues(otherElement);
}
}
| Activiti/Activiti | activiti-core/activiti-bpmn-model/src/main/java/org/activiti/bpmn/model/EventGateway.java | Java | apache-2.0 | 912 |
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.skylark.io.impl;
import com.facebook.buck.skylark.io.Globber;
import com.facebook.buck.util.MoreCollectors;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.UnixGlob;
import java.io.IOException;
import java.util.Collection;
import java.util.Set;
/**
* A simple implementation of globbing functionality that allows resolving file paths based on
* include patterns (file patterns that should be returned) minus exclude patterns (file patterns
* that should be excluded from the resulting set).
*
* <p>Since this is a simple implementation it does not support caching and other smarts.
*/
public class SimpleGlobber implements Globber {
/** Path used as a root when resolving patterns. */
private final Path basePath;
private SimpleGlobber(Path basePath) {
this.basePath = basePath;
}
/**
* @param include File patterns that should be included in the resulting set.
* @param exclude File patterns that should be excluded from the resulting set.
* @param excludeDirectories Whether directories should be excluded from the resulting set.
* @return The set of paths resolved using include patterns minus paths excluded by exclude
* patterns.
*/
@Override
public Set<String> run(
Collection<String> include, Collection<String> exclude, Boolean excludeDirectories)
throws IOException {
ImmutableSet<String> includePaths =
resolvePathsMatchingGlobPatterns(include, basePath, excludeDirectories);
ImmutableSet<String> excludePaths =
resolvePathsMatchingGlobPatterns(exclude, basePath, excludeDirectories);
return Sets.difference(includePaths, excludePaths);
}
/**
* Resolves provided list of glob patterns into a set of paths.
*
* @param patterns The glob patterns to resolve.
* @param basePath The base path used when resolving glob patterns.
* @param excludeDirectories Flag indicating whether directories should be excluded from result.
* @return The set of paths corresponding to requested patterns.
*/
private static ImmutableSet<String> resolvePathsMatchingGlobPatterns(
Collection<String> patterns, Path basePath, Boolean excludeDirectories) throws IOException {
UnixGlob.Builder includeGlobBuilder = UnixGlob.forPath(basePath).addPatterns(patterns);
if (excludeDirectories != null) {
includeGlobBuilder.setExcludeDirectories(excludeDirectories);
}
return includeGlobBuilder
.glob()
.stream()
.map(includePath -> includePath.relativeTo(basePath).getPathString())
.collect(MoreCollectors.toImmutableSet());
}
/**
* Factory method for creating {@link SimpleGlobber} instances.
*
* @param basePath The base path relative to which paths matching glob patterns will be resolved.
*/
public static Globber create(Path basePath) {
return new SimpleGlobber(basePath);
}
}
| shybovycha/buck | src/com/facebook/buck/skylark/io/impl/SimpleGlobber.java | Java | apache-2.0 | 3,622 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.