repo_name
stringlengths 5
108
| path
stringlengths 6
333
| size
stringlengths 1
6
| content
stringlengths 4
977k
| license
stringclasses 15
values |
|---|---|---|---|---|
benbenw/jmeter
|
src/core/src/main/java/org/apache/jmeter/testelement/property/NullProperty.java
|
2933
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.testelement.property;
import org.apache.jmeter.testelement.TestElement;
/**
* A null property.
*
*/
public final class NullProperty extends AbstractProperty {
private static final long serialVersionUID = 240L;
private JMeterProperty tempValue; // TODO - why does null property have a value?
public NullProperty(String name) {
super(name);
}
public NullProperty() {
super();
}
/**
* @see JMeterProperty#getStringValue()
*/
@Override
public String getStringValue() {
if (tempValue != null) {
return tempValue.getStringValue();
}
return "";
}
@Override
public void setObjectValue(Object v) {
// NOOP
}
/**
* @see JMeterProperty#getObjectValue()
*/
@Override
public Object getObjectValue() {
return null;
}
/**
* @see JMeterProperty#isRunningVersion()
*/
@Override
public boolean isRunningVersion() {
return false;
}
/**
* @see JMeterProperty#mergeIn(JMeterProperty)
*/
@Override
public void mergeIn(JMeterProperty prop) {
tempValue = prop;
}
@Override
public NullProperty clone() {
return this;
}
/**
* @see JMeterProperty#getBooleanValue()
*/
@Override
public boolean getBooleanValue() {
return false;
}
/**
* @see JMeterProperty#getDoubleValue()
*/
@Override
public double getDoubleValue() {
return 0;
}
/**
* @see JMeterProperty#getFloatValue()
*/
@Override
public float getFloatValue() {
return 0;
}
/**
* @see JMeterProperty#getIntValue()
*/
@Override
public int getIntValue() {
return 0;
}
/**
* @see JMeterProperty#getLongValue()
*/
@Override
public long getLongValue() {
return 0;
}
/**
* @see JMeterProperty#recoverRunningVersion(TestElement)
*/
@Override
public void recoverRunningVersion(TestElement owner) {
tempValue = null;
}
}
|
apache-2.0
|
emre-aydin/hazelcast
|
hazelcast/src/test/java/com/hazelcast/internal/util/QueueUtilTest.java
|
3064
|
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.util;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.LinkedList;
import java.util.Queue;
import java.util.function.Predicate;
import static com.hazelcast.test.HazelcastTestSupport.assertUtilityConstructor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class QueueUtilTest {
@Test
public void testConstructor() {
assertUtilityConstructor(QueueUtil.class);
}
@Test
public void drainQueueToZero() {
Queue<Integer> queue = new LinkedList<Integer>();
for (int i = 0; i < 100; i++) {
queue.offer(i);
}
int drained = QueueUtil.drainQueue(queue);
assertEquals(100, drained);
assertTrue(queue.isEmpty());
}
@Test
public void drainQueueToNonZero() {
Queue<Integer> queue = new LinkedList<Integer>();
for (int i = 0; i < 100; i++) {
queue.offer(i);
}
int drained = QueueUtil.drainQueue(queue, 50, null);
assertEquals(50, drained);
assertEquals(50, queue.size());
}
@Test
public void drainQueueToZeroWithPredicate() {
Queue<Integer> queue = new LinkedList<Integer>();
for (int i = 0; i < 100; i++) {
queue.offer(i);
}
int drained = QueueUtil.drainQueue(queue, new Predicate<Integer>() {
@Override
public boolean test(Integer integer) {
return integer % 2 == 0;
}
});
assertEquals(50, drained);
assertTrue(queue.isEmpty());
}
@Test
public void drainQueueToNonZeroWithPredicate() {
Queue<Integer> queue = new LinkedList<Integer>();
for (int i = 0; i < 100; i++) {
queue.offer(i);
}
int drained = QueueUtil.drainQueue(queue, 50, new Predicate<Integer>() {
@Override
public boolean test(Integer integer) {
return integer % 2 == 0;
}
});
assertEquals(25, drained);
assertEquals(50, queue.size());
}
}
|
apache-2.0
|
tianyutingxy/Carpaccio
|
carpaccio/src/main/java/com/github/florent37/carpaccio/CarpaccioHelper.java
|
12008
|
package com.github.florent37.carpaccio;
import android.support.v4.util.LruCache;
import android.view.View;
import com.github.florent37.carpaccio.model.CarpaccioAction;
import com.github.florent37.carpaccio.model.ObjectAndMethod;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
/**
* Created by florentchampigny on 21/07/15.
*/
public class CarpaccioHelper {
public static String TAG = "CarpaccioHelper";
public static boolean LOG_FAILURES = false;
protected static LruCache<String, Class> classesCache = new LruCache<>(15);
public static Object construct(String name) {
try {
Class objectClass = classesCache.get(name);
if (objectClass == null) {
objectClass = Class.forName(name);
classesCache.put(name, objectClass);
}
return objectClass.newInstance();
} catch (Exception e) {
CarpaccioLogger.e(TAG, "Cannot construct " + name, e);
}
return null;
}
/**
* Return an array of classes from the args[], with headerClass on the first position
* From [object1,object2,object3] with headerClass=View.class return [View.class, object1.class, object2.class, object3.class]
*/
public static Class[] getClassesWithHeaderClass(Object[] args, Class headerClass) {
Class[] classes = new Class[args.length + 1];
classes[0] = headerClass;
for (int i = 0; i < args.length; ++i)
classes[i + 1] = args[i].getClass();
return classes;
}
/**
* Return an array of classes from the args[]
* From [object1,object2,object3] return [object1.class, object2.class, object3.class]
*/
public static Class[] getClasses(Object[] args) {
Class[] classes = new Class[args.length];
for (int i = 0; i < args.length; ++i)
classes[i] = args[i].getClass();
return classes;
}
/**
* From arg=["arg1","arg2"] and viewClass= TextView.class and view instance of TextView (but given as a view)
* return [(TextView)view,"arg1","arg2")];
*/
public static Object[] getArgumentsWithView(View view, Class[] parametersType, Object[] args) {
Object[] out = new Object[args.length + 1];
//add the view on the first parameter
try {
out[0] = parametersType[0].cast(view);
} catch (ClassCastException e) {
if (LOG_FAILURES)
CarpaccioLogger.e(TAG, view.getClass().toString() + " cannot be cast to " + parametersType[0].getClass().toString(), e);
out[0] = view;
}
for (int i = 0; i < args.length; ++i) {
Class paramClass = parametersType[i + 1];
Object param = args[i];
if (param instanceof String && isNumber(paramClass)) {
out[i + 1] = stringToNumber((String) param, paramClass);
} else {
try {
out[i + 1] = paramClass.cast(param);
} catch (ClassCastException e) {
if (LOG_FAILURES)
CarpaccioLogger.e(TAG, param.getClass().toString() + " cannot be cast to " + paramClass.toString(), e);
out[i + 1] = param;
}
}
}
return out;
}
/**
* from "myFunction(arg1,arg2)", return "myFunction"
*/
public static String getFunctionName(String tag) {
return tag.substring(0, tag.indexOf('(')).trim();
}
/**
* from "myFunction(arg1,arg2)", return ["arg1","arg2"]
*/
public static String[] getAttributes(String tag) {
String attributes = tag.substring(tag.indexOf('(') + 1, tag.lastIndexOf(')'));
if (attributes.isEmpty())
return new String[0];
return trim(attributes.split(","));
}
public static Object removeTag(View view, String actionName) {
if (view.getTag() != null && view.getTag() instanceof List && actionName!= null) {
List<CarpaccioAction> actions = (List<CarpaccioAction>) view.getTag();
List<CarpaccioAction> newActions = new ArrayList<>();
for (int i = 0, count = actions.size(); i < count; ++i)
if (!actions.get(i).getCompleteCall().equals(actionName))
newActions.add(actions.get(i));
return newActions;
}
return view.getTag();
}
/**
* Trim an array of String (each element)
*/
public static String[] trim(String[] strings) {
for (int i = 0; i < strings.length; ++i)
strings[i] = strings[i].trim();
return strings;
}
public static ObjectAndMethod findObjectWithThisMethod(List<Object> objects, String function, int numberOfParams) {
if (objects != null && function != null) {
Method method;
Object object;
int numberOfObjects = objects.size();
for (int j = 0; j < numberOfObjects; ++j) {
object = objects.get(j);
int methodCount = object.getClass().getMethods().length;
for (int i = 0; i < methodCount; ++i) {
method = object.getClass().getMethods()[i];
if (function.equals(method.getName()) && method.getParameterTypes().length == numberOfParams) {
return new ObjectAndMethod(object, method);
}
}
}
CarpaccioLogger.v(TAG, "can't find controller with the method " + function + " , controllers=" + objects.toString());
}
return null;
}
/**
*
*/
public static Method callFunction(Object object, String name, View view, Object[] args) {
if (object != null && name != null && view != null && args != null) {
Method method = null;
Class viewClass = View.class;
//if name = font(Roboto.ttf) with a TextView
//try to find the font(TextView,String)
for (Method containedMethods : object.getClass().getMethods()) {
if (name.equals(containedMethods.getName()) && containedMethods.getParameterTypes().length == args.length + 1) { //+1 for the view
method = containedMethods;
break;
}
}
//try {
// method = object.getClass().getMethod(name, getClasses(args));
//}catch (Exception e){
// Log.v(TAG,object.getClass()+" does not contains the method "+name);
//}
return callMethod(object, method, name, view, args);
}
return null;
}
public static Method callMethod(Object object, Method method, String name, View view, Object[] args) {
if (method != null && object != null) {
CarpaccioLogger.d(TAG, view.getClass().getName() + " call method " + name + " on " + object);
try {
method.invoke(object, getArgumentsWithView(view, method.getParameterTypes(), args));
return method;
} catch (Exception e) {
CarpaccioLogger.e(TAG, object.getClass() + " cannot invoke method " + name);
}
}
return null;
}
/**
* Invoke the function object.name() with no arguments
* Then return the result (with cast)
*/
public static <T> T callFunction(Object object, String name) {
Method method = null;
try {
method = object.getClass().getMethod(name);
} catch (Exception e) {
if (LOG_FAILURES)
CarpaccioLogger.v(TAG, object.getClass() + " does not contains the method " + name);
}
if (method != null) {
try {
return (T) method.invoke(object);
} catch (Exception e) {
CarpaccioLogger.e(TAG, object.getClass() + " cannot invoke method " + name);
}
}
return null;
}
/**
* Invoke the function object.name() with no arguments
* Then return the result (with cast)
*/
public static <T> T callFunction(Object object, String name, Object[] args) {
Method method = null;
try {
method = object.getClass().getMethod(name, getClasses(args));
} catch (Exception e) {
if (LOG_FAILURES)
CarpaccioLogger.v(TAG, object.getClass() + " does not contains the method " + name);
}
if (method != null) {
try {
return (T) method.invoke(object, args);
} catch (Exception e) {
CarpaccioLogger.e(TAG, object.getClass() + " cannot invoke method " + name);
}
}
return null;
}
public static Integer stringToInt(String s) {
try {
return Integer.parseInt(s);
} catch (NumberFormatException e) {
CarpaccioLogger.e(TAG, s + " is not an integer", e);
return null;
}
}
public static Double stringToDouble(String s) {
try {
return Double.parseDouble(s);
} catch (NumberFormatException e) {
CarpaccioLogger.e(TAG, s + " is not an double", e);
return null;
}
}
public static Long stringToLong(String s) {
try {
return Long.parseLong(s);
} catch (NumberFormatException e) {
CarpaccioLogger.e(TAG, s + " is not a long", e);
return null;
}
}
public static Float stringToFloat(String s) {
try {
return Float.parseFloat(s);
} catch (NumberFormatException e) {
CarpaccioLogger.e(TAG, s + " is not a long", e);
return null;
}
}
public static boolean isNumber(Class destinationClass) {
return
Integer.class.equals(destinationClass) ||
int.class.equals(destinationClass) ||
Float.class.equals(destinationClass) ||
float.class.equals(destinationClass) ||
Long.class.equals(destinationClass) ||
long.class.equals(destinationClass) ||
Double.class.equals(destinationClass) ||
double.class.equals(destinationClass);
}
public static Object stringToNumber(String s, Class destinationClass) {
if (Integer.class.equals(destinationClass))
return stringToInt(s);
else if (destinationClass.getName().equals("int"))
return stringToInt(s);
else if (Float.class.equals(destinationClass))
return stringToFloat(s);
else if (destinationClass.getName().equals("float"))
return stringToFloat(s);
else if (Long.class.equals(destinationClass))
return stringToLong(s);
else if (destinationClass.getName().equals("long"))
return stringToLong(s);
else if (Double.class.equals(destinationClass))
return stringToDouble(s);
else if (destinationClass.getName().equals("double"))
return stringToDouble(s);
else
return null;
}
public static <T extends View> T findParentOfClass(View view, Class<T> theClass) {
if (theClass.isAssignableFrom(view.getClass()))
return (T) view;
else if (view.getParent() != null && view.getParent() instanceof View)
return findParentOfClass((View) view.getParent(), theClass);
else
return null;
}
public static Carpaccio findParentCarpaccio(View view) {
return findParentOfClass(view, Carpaccio.class);
}
public static Carpaccio registerToParentCarpaccio(View view) {
Carpaccio carpaccio = findParentOfClass(view, Carpaccio.class);
if (carpaccio != null) {
carpaccio.addCarpaccioView(view);
return carpaccio;
}
return null;
}
}
|
apache-2.0
|
baldimir/optaplanner
|
optaplanner-examples/src/main/java/org/optaplanner/examples/cloudbalancing/optional/realtime/DeleteComputerProblemFactChange.java
|
2763
|
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.cloudbalancing.optional.realtime;
import java.util.ArrayList;
import org.optaplanner.core.impl.score.director.ScoreDirector;
import org.optaplanner.core.impl.solver.ProblemFactChange;
import org.optaplanner.examples.cloudbalancing.domain.CloudBalance;
import org.optaplanner.examples.cloudbalancing.domain.CloudComputer;
import org.optaplanner.examples.cloudbalancing.domain.CloudProcess;
public class DeleteComputerProblemFactChange implements ProblemFactChange<CloudBalance> {
private final CloudComputer computer;
public DeleteComputerProblemFactChange(CloudComputer computer) {
this.computer = computer;
}
@Override
public void doChange(ScoreDirector<CloudBalance> scoreDirector) {
CloudBalance cloudBalance = scoreDirector.getWorkingSolution();
CloudComputer workingComputer = scoreDirector.lookUpWorkingObject(computer);
if (workingComputer == null) {
// The computer has already been deleted (the UI asked to changed the same computer twice), so do nothing
return;
}
// First remove the problem fact from all planning entities that use it
for (CloudProcess process : cloudBalance.getProcessList()) {
if (process.getComputer() == workingComputer) {
scoreDirector.beforeVariableChanged(process, "computer");
process.setComputer(null);
scoreDirector.afterVariableChanged(process, "computer");
}
}
// A SolutionCloner does not clone problem fact lists (such as computerList)
// Shallow clone the computerList so only workingSolution is affected, not bestSolution or guiSolution
ArrayList<CloudComputer> computerList = new ArrayList<>(cloudBalance.getComputerList());
cloudBalance.setComputerList(computerList);
// Remove the problem fact itself
scoreDirector.beforeProblemFactRemoved(workingComputer);
computerList.remove(workingComputer);
scoreDirector.afterProblemFactRemoved(workingComputer);
scoreDirector.triggerVariableListeners();
}
}
|
apache-2.0
|
LightGuard/quickstart
|
kitchensink-spring/springmvctest/src/main/java/org/jboss/as/quickstarts/kitchensink/spring/springmvctest/controller/MemberController.java
|
2616
|
/*
* JBoss, Home of Professional Open Source
* Copyright 2013, Red Hat, Inc. and/or its affiliates, and individual
* contributors by the @authors tag. See the copyright.txt in the
* distribution for a full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.as.quickstarts.kitchensink.spring.springmvctest.controller;
import org.jboss.as.quickstarts.kitchensink.spring.springmvctest.data.MemberDao;
import org.jboss.as.quickstarts.kitchensink.spring.springmvctest.model.Member;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.UnexpectedRollbackException;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import javax.validation.Valid;
@Controller
@RequestMapping(value = "/")
public class MemberController {
@Autowired
private MemberDao memberDao;
@RequestMapping(method = RequestMethod.GET)
public String displaySortedMembers(Model model) {
model.addAttribute("newMember", new Member());
model.addAttribute("members", memberDao.findAllOrderedByName());
return "index";
}
@RequestMapping(method = RequestMethod.POST)
public String registerNewMember(@Valid @ModelAttribute("newMember") Member newMember, BindingResult result, Model model) {
if (!result.hasErrors()) {
try {
memberDao.register(newMember);
return "redirect:/";
} catch (UnexpectedRollbackException e) {
model.addAttribute("members", memberDao.findAllOrderedByName());
model.addAttribute("error", e.getCause().getCause());
return "index";
}
} else {
model.addAttribute("members", memberDao.findAllOrderedByName());
return "index";
}
}
}
|
apache-2.0
|
curso007/camel
|
components/camel-cometd/src/test/java/org/apache/camel/component/cometd/CometBindingTest.java
|
5327
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.cometd;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.apache.camel.CamelContext;
import org.apache.camel.Message;
import org.apache.camel.impl.DefaultCamelContext;
import org.cometd.bayeux.server.ServerMessage;
import org.cometd.bayeux.server.ServerSession;
import org.cometd.server.BayeuxServerImpl;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class CometBindingTest {
private static final Object FOO = new Object();
private static final Long THIRTY_FOUR = Long.valueOf(34L);
private static final Double TWO_POINT_ONE = Double.valueOf(2.1);
private static final Integer EIGHT = new Integer(8);
private static final String HELLO = "hello";
private static final String FOO_ATTR_NAME = "foo";
private static final String LONG_ATTR_NAME = "long";
private static final String DOUBLE_ATTR_NAME = "double";
private static final String INTEGER_ATTR_NAME = "integer";
private static final String STRING_ATTR_NAME = "string";
private static final String BOOLEAN_ATT_NAME = "boolean";
private CometdBinding testObj;
@Mock
private BayeuxServerImpl bayeux;
@Mock
private ServerSession remote;
@Mock
private ServerMessage cometdMessage;
private final CamelContext camelContext = new DefaultCamelContext();
@Before
public void before() {
testObj = new CometdBinding(bayeux);
Set<String> attributeNames = new HashSet<String>(Arrays.asList(STRING_ATTR_NAME, INTEGER_ATTR_NAME,
LONG_ATTR_NAME, DOUBLE_ATTR_NAME,
FOO_ATTR_NAME, BOOLEAN_ATT_NAME));
when(remote.getAttributeNames()).thenReturn(attributeNames);
when(remote.getAttribute(STRING_ATTR_NAME)).thenReturn(HELLO);
when(remote.getAttribute(INTEGER_ATTR_NAME)).thenReturn(EIGHT);
when(remote.getAttribute(LONG_ATTR_NAME)).thenReturn(THIRTY_FOUR);
when(remote.getAttribute(DOUBLE_ATTR_NAME)).thenReturn(TWO_POINT_ONE);
when(remote.getAttribute(FOO_ATTR_NAME)).thenReturn(FOO);
when(remote.getAttribute(BOOLEAN_ATT_NAME)).thenReturn(Boolean.TRUE);
}
@Test
public void testBindingTransfersSessionAttributtes() {
// setup
testObj = new CometdBinding(bayeux, true);
// act
Message result = testObj.createCamelMessage(camelContext, remote, cometdMessage, null);
// assert
assertEquals(6, result.getHeaders().size());
assertEquals(HELLO, result.getHeader(STRING_ATTR_NAME));
assertEquals(EIGHT, result.getHeader(INTEGER_ATTR_NAME));
assertEquals(THIRTY_FOUR, result.getHeader(LONG_ATTR_NAME));
assertEquals(TWO_POINT_ONE, result.getHeader(DOUBLE_ATTR_NAME));
assertEquals(null, result.getHeader(FOO_ATTR_NAME));
assertTrue((Boolean)result.getHeader(BOOLEAN_ATT_NAME));
}
@Test
public void testBindingHonorsFlagForSessionAttributtes() {
// act
Message result = testObj.createCamelMessage(camelContext, remote, cometdMessage, null);
// assert
assertEquals(1, result.getHeaders().size());
assertEquals(null, result.getHeader(STRING_ATTR_NAME));
assertEquals(null, result.getHeader(INTEGER_ATTR_NAME));
assertEquals(null, result.getHeader(LONG_ATTR_NAME));
assertEquals(null, result.getHeader(FOO_ATTR_NAME));
assertEquals(null, result.getHeader(DOUBLE_ATTR_NAME));
assertEquals(null, result.getHeader(BOOLEAN_ATT_NAME));
}
@Test
public void testSubscriptionHeadersPassed() {
// setup
String expectedSubscriptionInfo = "subscriptionInfo";
when(cometdMessage.get(CometdBinding.COMETD_SUBSCRIPTION_HEADER_NAME))
.thenReturn(expectedSubscriptionInfo);
// act
Message result = testObj.createCamelMessage(camelContext, remote, cometdMessage, null);
// assert
assertEquals(2, result.getHeaders().size());
assertEquals(expectedSubscriptionInfo,
result.getHeader(CometdBinding.COMETD_SUBSCRIPTION_HEADER_NAME));
}
}
|
apache-2.0
|
gradle/gradle
|
subprojects/core/src/main/java/org/gradle/api/internal/tasks/execution/package-info.java
|
712
|
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package org.gradle.api.internal.tasks.execution;
import org.gradle.api.NonNullApi;
|
apache-2.0
|
ricepanda/rice-git3
|
rice-framework/krad-sampleapp/web/src/it/java/org/kuali/rice/krad/demo/travel/account/DemoTravelAccountMultivalueLookUpAft.java
|
4184
|
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.demo.travel.account;
import org.junit.Test;
import org.kuali.rice.krad.demo.ViewDemoAftBase;
import org.kuali.rice.testtools.selenium.WebDriverLegacyITBase;
import org.openqa.selenium.By;
/**
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public class DemoTravelAccountMultivalueLookUpAft extends ViewDemoAftBase {
/**
* /kr-krad/lookup?methodToCall=start&dataObjectClassName=org.kuali.rice.krad.demo.travel.dataobject.TravelAccount&hideReturnLink=true&multipleValuesSelect=true&lookupCollectionName=travelAccounts&suppressActions=true&conversionFields=number:foo,name:foo
*/
public static final String BOOKMARK_URL = "/kr-krad/lookup?methodToCall=start&dataObjectClassName=org.kuali.rice.krad.demo.travel.dataobject.TravelAccount&hideReturnLink=true&multipleValuesSelect=true&lookupCollectionName=travelAccounts&suppressActions=true&conversionFields=number:foo,name:foo";
/**
* selectedCollectionLines['lookupResults']
*/
public static final String LOOKUP_RESULTS = "selectedCollectionLines['lookupResults']";
@Override
public String getBookmarkUrl() {
return BOOKMARK_URL;
}
@Override
protected void navigate() throws Exception {
waitAndClickDemoLink();
waitAndClickByLinkText("Account Multi-Value Lookup");
}
private void testSearchSelect() throws Exception {
waitAndClickByValue("CAT");
waitAndClickByXpath("//div[@data-label='Travel Account Type Code']/div/div/button[@class='btn btn-default uif-action icon-search']");
waitSearchAndReturnFromLightbox();
waitAndClickButtonByText(WebDriverLegacyITBase.SEARCH);
By[] bysPresent = new By[] {By.xpath("//a[contains(text(), 'a6')]"), By.xpath("//a[contains(text(), 'a9')]"), By.xpath("//a[contains(text(), 'a14')]")};
assertElementsPresentInResultPages(bysPresent);
waitAndClickByName(LOOKUP_RESULTS);
assertButtonEnabledByText(WebDriverLegacyITBase.RETURN_SELECTED_BUTTON_TEXT);
waitAndClickByName(LOOKUP_RESULTS);
assertButtonDisabledByText(WebDriverLegacyITBase.RETURN_SELECTED_BUTTON_TEXT);
assertMultiValueSelectAllThisPage();
assertMultiValueDeselectAllThisPage();
waitAndClickByName(LOOKUP_RESULTS);
waitAndClickButtonByText(WebDriverLegacyITBase.SEARCH);
checkForIncidentReport();
}
@Test
public void testTravelAccountMultivalueLookUpSearchSelectBookmark() throws Exception {
testSearchSelect();
passed();
}
@Test
public void testTravelAccountMultivalueLookUpSearchSelectNav() throws Exception {
testSearchSelect();
passed();
}
@Test
public void testTravelAccountMultivalueLookUpSelectThisPageBookmark() throws Exception {
testMultiValueSelectAllThisPage();
passed();
}
@Test
public void testTravelAccountMultivalueLookUpSelectThisPageNav() throws Exception {
testMultiValueSelectAllThisPage();
passed();
}
@Test
public void testTravelAccountMultivalueLookUpSelectAllPagesBookmark() throws Exception {
testMultiValueSelectAllPages();
passed();
}
@Test
public void testTravelAccountMultivalueLookUpSelectAllPagesNav() throws Exception {
testMultiValueSelectAllPages();
passed();
}
private void waitSearchAndReturnFromLightbox() throws Exception {
gotoLightBox();
waitAndClickButtonByText("Search");
waitAndClickByLinkText("return value");
}
}
|
apache-2.0
|
WilliamRen/bbossgroups-3.5
|
bboss-rpc/src-jgroups/bboss/org/jgroups/persistence/FilePersistenceManager.java
|
5311
|
package bboss.org.jgroups.persistence;
/**
* @author Mandar Shinde
* The class implements the PersistenceManager interface and provides users
* a file based implementation when required.
* The state of this class is current NOOP. Implementation will be in place
* once a better structure for file based properties will be designed.
*/
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import bboss.org.jgroups.annotations.Unsupported;
@Unsupported
public class FilePersistenceManager implements PersistenceManager
{
private final File file;
/**
* Default constructor
*/
public FilePersistenceManager(String propertiesFilename)
throws Exception
{
Properties properties = new Properties();
properties.load(new FileInputStream(propertiesFilename));
String path = properties.getProperty(PersistenceFactory.persistProp);
file = new File(path);
file.createNewFile();
}
/**
* Save new NV pair as serializable objects or if already exist; store
* new state
*/
public void save(Serializable key, Serializable val) throws CannotPersistException
{
try
{
Map map = retrieveAll();
map.put(key, val);
saveAll(map);
}
catch (CannotRetrieveException e)
{
throw new CannotPersistException(e, "Unable to pre-load existing store.");
}
}
/**
* Remove existing NV from being persisted
*/
public Serializable remove(Serializable key) throws CannotRemoveException
{
Object o;
try
{
Map map = retrieveAll();
o = map.remove(key);
saveAll(map);
}
catch (CannotRetrieveException e)
{
throw new CannotRemoveException(e, "Unable to pre-load existing store.");
}
catch (CannotPersistException e)
{
throw new CannotRemoveException(e, "Unable to pre-load existing store.");
}
return (Serializable) o;
}
/**
* Use to store a complete map into persistent state
* @exception CannotPersistException;
*/
public void saveAll(Map map) throws CannotPersistException
{
try
{
OutputStream fos = new FileOutputStream(file);
Properties prop = new Properties();
// NB: For some reason Properties.putAll(map) doesn't seem to work - dimc@users.sourceforge.net
for (Iterator iterator = map.entrySet().iterator(); iterator.hasNext();)
{
Map.Entry entry = (Map.Entry) iterator.next();
prop.setProperty(entry.getKey().toString(), entry.getValue().toString());
}
prop.store(fos, null);
fos.flush();
fos.close();
}
catch (IOException e)
{
throw new CannotPersistException(e, "Cannot save to: " + file.getAbsolutePath());
}
}
/**
* Gives back the Map in last known state
* @return Map;
* @exception CannotRetrieveException;
*/
public Map retrieveAll() throws CannotRetrieveException
{
try
{
Properties prop = new Properties();
FileInputStream fis = new FileInputStream(file);
prop.load(fis);
fis.close();
return filterLoadedValues(prop);
}
catch (IOException e)
{
throw new CannotRetrieveException(e, "Unable to load from file: " + file.getAbsolutePath());
}
}
/**
* Turns the values into Floats to enable
* {@link bboss.org.jgroups.demos.DistributedHashtableDemo} to work.
* Subclasses should override this method to convert the incoming map
* of string/string key/value pairs into the types they want.
* @param in
* @return Map
*/
protected Map filterLoadedValues(Map in)
{
Map out = new HashMap();
for (Iterator iterator = in.entrySet().iterator(); iterator.hasNext();)
{
Map.Entry entry = (Map.Entry) iterator.next();
out.put(entry.getKey().toString(), Float.valueOf(entry.getValue().toString()));
}
return out;
}
/**
* Clears the complete NV state from the DB
* @exception CannotRemoveException;
x*/
public void clear() throws CannotRemoveException
{
try
{
saveAll(Collections.EMPTY_MAP);
}
catch (CannotPersistException e)
{
throw new CannotRemoveException(e, "Unable to clear map.");
}
}
/**
* Used to handle shutdown call the PersistenceManager implementation.
* Persistent engines can leave this implementation empty.
*/
public void shutDown()
{
return;
}
}// end of class
|
apache-2.0
|
emeroad/pinpoint
|
profiler/src/test/java/com/navercorp/pinpoint/profiler/monitor/metric/totalthread/DefaultTotalThreadMetricTest.java
|
1024
|
/*
* Copyright 2020 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.monitor.metric.totalthread;
import org.junit.Assert;
import org.junit.Test;
public class DefaultTotalThreadMetricTest {
@Test
public void test() {
TotalThreadMetric totalThreadMetric = new DefaultTotalThreadMetric();
TotalThreadMetricSnapshot snapshot = totalThreadMetric.getSnapshot();
Assert.assertNotEquals(snapshot.getTotalThreadCount(), 0);
}
}
|
apache-2.0
|
jk1/intellij-community
|
platform/vcs-log/impl/src/com/intellij/vcs/log/data/TopCommitsCache.java
|
3864
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.vcs.log.data;
import com.google.common.collect.Iterators;
import com.google.common.collect.PeekingIterator;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.IntObjectMap;
import com.intellij.vcs.log.VcsCommitMetadata;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Iterator;
import java.util.List;
public class TopCommitsCache {
@NotNull private final VcsLogStorage myStorage;
@NotNull private final IntObjectMap<VcsCommitMetadata> myCache = ContainerUtil.createConcurrentIntObjectMap();
@NotNull private List<VcsCommitMetadata> mySortedDetails = ContainerUtil.newArrayList();
public TopCommitsCache(@NotNull VcsLogStorage storage) {
myStorage = storage;
}
private int getIndex(@NotNull VcsCommitMetadata metadata) {
return myStorage.getCommitIndex(metadata.getId(), metadata.getRoot());
}
public void storeDetails(@NotNull List<? extends VcsCommitMetadata> sortedDetails) {
List<VcsCommitMetadata> newDetails = ContainerUtil.filter(sortedDetails, metadata -> !myCache.containsValue(metadata));
if (newDetails.isEmpty()) return;
Iterator<VcsCommitMetadata> it = new MergingIterator(mySortedDetails, newDetails);
List<VcsCommitMetadata> result = ContainerUtil.newArrayList();
boolean isBroken = false;
while (it.hasNext()) {
VcsCommitMetadata detail = it.next();
int index = getIndex(detail);
if (index == VcsLogStorageImpl.NO_INDEX) {
isBroken = true;
continue; // means some error happened (and reported) earlier, nothing we can do here
}
if (result.size() < VcsLogData.RECENT_COMMITS_COUNT * 2) {
result.add(detail);
myCache.put(index, detail);
}
else {
myCache.remove(index);
}
}
assert result.size() == myCache.size() || isBroken : result.size() + " details to store, yet " + myCache.size() + " indexes in cache.";
mySortedDetails = result;
}
@Nullable
public VcsCommitMetadata get(int index) {
return myCache.get(index);
}
public void clear() {
myCache.clear();
mySortedDetails.clear();
}
private static class MergingIterator implements Iterator<VcsCommitMetadata> {
private final PeekingIterator<VcsCommitMetadata> myFirst;
private final PeekingIterator<VcsCommitMetadata> mySecond;
private MergingIterator(@NotNull List<VcsCommitMetadata> first, @NotNull List<VcsCommitMetadata> second) {
myFirst = Iterators.peekingIterator(first.iterator());
mySecond = Iterators.peekingIterator(second.iterator());
}
@Override
public boolean hasNext() {
return myFirst.hasNext() || mySecond.hasNext();
}
@Override
public VcsCommitMetadata next() {
if (!myFirst.hasNext()) return mySecond.next();
if (!mySecond.hasNext()) return myFirst.next();
VcsCommitMetadata data1 = myFirst.peek();
VcsCommitMetadata data2 = mySecond.peek();
// more recent commits (with bigger timestamp) should go first
// if timestamp is the same, commit from the second list is chosen
if (data1.getTimestamp() > data2.getTimestamp()) return myFirst.next();
return mySecond.next();
}
}
}
|
apache-2.0
|
ChristosChristofidis/h2o-3
|
h2o-algos/src/test/java/hex/deeplearning/DeepLearningScoreTest.java
|
2061
|
package hex.deeplearning;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import water.DKV;
import water.H2O;
import water.Key;
import water.TestUtil;
import water.fvec.Frame;
import water.fvec.NFSFileVec;
import water.fvec.RebalanceDataSet;
import water.fvec.Vec;
import water.parser.ParseDataset;
/**
* This test simulates environment
* produced by Spark - dataset divided into
* many small chunks, some of theam are empty.
*/
public class DeepLearningScoreTest extends TestUtil {
@BeforeClass
public static void setup() { stall_till_cloudsize(5); }
/** Load simple dataset, rebalance to a number of chunks > number of rows, and run deep learning */
@Test public void testPubDev928() {
// Create rebalanced dataset
Key rebalancedKey = Key.make("rebalanced");
NFSFileVec nfs = NFSFileVec.make(find_test_file("smalldata/logreg/prostate.csv"));
Frame fr = ParseDataset.parse(Key.make(), nfs._key);
RebalanceDataSet rb = new RebalanceDataSet(fr, rebalancedKey, (int)(fr.numRows()+1));
H2O.submitTask(rb);
rb.join();
Frame rebalanced = DKV.get(rebalancedKey).get();
// Assert that there is at least one 0-len chunk
assertZeroLengthChunk("Rebalanced dataset should contain at least one 0-len chunk!", rebalanced.anyVec());
DeepLearningModel dlModel = null;
try {
// Launch Deep Learning
DeepLearningParameters dlParams = new DeepLearningParameters();
dlParams._train = rebalancedKey;
dlParams._epochs = 5;
dlParams._response_column = "CAPSULE";
dlModel = new DeepLearning(dlParams).trainModel().get();
} finally {
fr.delete();
rebalanced.delete();
if (dlModel != null) dlModel.delete();
}
}
private void assertZeroLengthChunk(String msg, Vec v) {
boolean hasZeroLenChunk = false;
for (int i = 0; i < v.nChunks(); i++) {
hasZeroLenChunk |= (v.chunkForChunkIdx(i).len() == 0);
System.out.println(v.chunkForChunkIdx(i).len());
}
Assert.assertTrue(msg, hasZeroLenChunk);
}
}
|
apache-2.0
|
stoksey69/googleads-java-lib
|
modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201502/cm/PolicyViolationErrorReason.java
|
820
|
package com.google.api.ads.adwords.jaxws.v201502.cm;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for PolicyViolationError.Reason.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="PolicyViolationError.Reason">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="POLICY_ERROR"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "PolicyViolationError.Reason")
@XmlEnum
public enum PolicyViolationErrorReason {
POLICY_ERROR;
public String value() {
return name();
}
public static PolicyViolationErrorReason fromValue(String v) {
return valueOf(v);
}
}
|
apache-2.0
|
noondaysun/sakai
|
site-manage/site-manage-group-section-role-helper/tool/src/java/org/sakaiproject/site/tool/helper/managegroupsectionrole/rsf/GroupDelProducer.java
|
6520
|
package org.sakaiproject.site.tool.helper.managegroupsectionrole.rsf;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.authz.api.AuthzGroupService;
import org.sakaiproject.authz.api.GroupNotDefinedException;
import org.sakaiproject.site.api.Group;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.site.tool.helper.managegroupsectionrole.impl.SiteManageGroupSectionRoleHandler;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.user.api.User;
import uk.ac.cam.caret.sakai.rsf.producers.FrameAdjustingProducer;
import uk.ac.cam.caret.sakai.rsf.util.SakaiURLUtil;
import uk.org.ponder.messageutil.MessageLocator;
import uk.org.ponder.messageutil.TargettedMessageList;
import uk.org.ponder.rsf.components.UIBranchContainer;
import uk.org.ponder.rsf.components.UICommand;
import uk.org.ponder.rsf.components.UIContainer;
import uk.org.ponder.rsf.components.UIELBinding;
import uk.org.ponder.rsf.components.UIForm;
import uk.org.ponder.rsf.components.UIInput;
import uk.org.ponder.rsf.components.UIInternalLink;
import uk.org.ponder.rsf.components.UIMessage;
import uk.org.ponder.rsf.components.UIOutput;
import uk.org.ponder.rsf.components.UISelect;
import uk.org.ponder.rsf.components.UISelectChoice;
import uk.org.ponder.rsf.components.UIVerbatim;
import uk.org.ponder.rsf.components.UIDeletionBinding;
import uk.org.ponder.rsf.components.decorators.DecoratorList;
import uk.org.ponder.rsf.components.decorators.UILabelTargetDecorator;
import uk.org.ponder.rsf.components.decorators.UITooltipDecorator;
import uk.org.ponder.rsf.flow.ARIResult;
import uk.org.ponder.rsf.flow.ActionResultInterceptor;
import uk.org.ponder.rsf.flow.jsfnav.NavigationCase;
import uk.org.ponder.rsf.view.ComponentChecker;
import uk.org.ponder.rsf.view.DefaultView;
import uk.org.ponder.rsf.view.ViewComponentProducer;
import uk.org.ponder.rsf.viewstate.RawViewParameters;
import uk.org.ponder.rsf.viewstate.SimpleViewParameters;
import uk.org.ponder.rsf.viewstate.ViewParameters;
import uk.org.ponder.rsf.viewstate.ViewParamsReporter;
import uk.org.ponder.stringutil.StringList;
/**
*
* @author
*
*/
public class GroupDelProducer
implements ViewComponentProducer, ActionResultInterceptor{
/** Our log (commons). */
private static Log M_log = LogFactory.getLog(GroupDelProducer.class);
public static final String VIEW_ID = "GroupDel";
public MessageLocator messageLocator;
public SiteManageGroupSectionRoleHandler handler;
public FrameAdjustingProducer frameAdjustingProducer;
public AuthzGroupService authzGroupService;
public String getViewID() {
return VIEW_ID;
}
private TargettedMessageList tml;
public void setTargettedMessageList(TargettedMessageList tml) {
this.tml = tml;
}
public void fillComponents(UIContainer tofill, ViewParameters arg1, ComponentChecker arg2) {
UIOutput.make(tofill, "page-title", messageLocator.getMessage("editgroup.removegroups"));
UIForm deleteForm = UIForm.make(tofill, "delete-confirm-form");
boolean renderDelete = false;
// Create a multiple selection control for the tasks to be deleted.
// We will fill in the options at the loop end once we have collected them.
UISelect deleteselect = UISelect.makeMultiple(deleteForm, "delete-group",
null, "#{SiteManageGroupSectionRoleHandler.deleteGroupIds}", new String[] {});
//get the headers for the table
UIMessage.make(deleteForm, "group-title-title","group.title");
UIMessage.make(deleteForm, "group-size-title", "group.number");
UIMessage.make(deleteForm, "group-remove-title", "editgroup.remove");
List<Group> groups = handler.getSelectedGroups();
StringList deletable = new StringList();
M_log.debug(this + "fillComponents: got a list of " + groups.size() + " groups");
if (groups != null && groups.size() > 0)
{
for (Iterator<Group> it=groups.iterator(); it.hasNext(); ) {
Group group = it.next();
String groupId = group.getId();
UIBranchContainer grouprow = UIBranchContainer.make(deleteForm, "group-row:", group.getId());
UIOutput.make(grouprow,"group-title",group.getTitle());
int size = 0;
try
{
size=authzGroupService.getAuthzGroup(group.getReference()).getMembers().size();
}
catch (GroupNotDefinedException e)
{
M_log.debug(this + "fillComponent: cannot find group " + group.getReference());
}
UIOutput.make(grouprow,"group-size",String.valueOf(size));
deletable.add(group.getId());
UISelectChoice delete = UISelectChoice.make(grouprow, "group-select", deleteselect.getFullID(), (deletable.size()-1));
delete.decorators = new DecoratorList(new UITooltipDecorator(UIMessage.make("delete_group_tooltip", new String[] {group.getTitle()})));
UIMessage message = UIMessage.make(grouprow,"delete-label","delete_group_tooltip", new String[] {group.getTitle()});
UILabelTargetDecorator.targetLabel(message,delete);
M_log.debug(this + ".fillComponent: this group can be deleted");
renderDelete = true;
}
}
deleteselect.optionlist.setValue(deletable.toStringArray());
UICommand.make(deleteForm, "delete-groups", UIMessage.make("editgroup.removegroups"), "#{SiteManageGroupSectionRoleHandler.processDeleteGroups}");
UICommand cancel = UICommand.make(deleteForm, "cancel", UIMessage.make("editgroup.cancel"), "#{SiteManageGroupSectionRoleHandler.processCancelDelete}");
cancel.parameters.add(new UIDeletionBinding("#{destroyScope.resultScope}"));
//process any messages
UIBranchContainer errorRow = UIBranchContainer.make(tofill,"error-row:", "0");
UIMessage.make(errorRow,"error","editgroup.groupdel.alert", new String[]{});
}
public ViewParameters getViewParameters() {
GroupEditViewParameters params = new GroupEditViewParameters();
params.id = null;
return params;
}
// new hotness
public void interceptActionResult(ARIResult result, ViewParameters incoming, Object actionReturn) {
if ("success".equals(actionReturn) || "cancel".equals(actionReturn)) {
result.resultingView = new SimpleViewParameters(GroupListProducer.VIEW_ID);
}
}
}
|
apache-2.0
|
PP888/collide
|
java/com/google/collide/json/shared/JsonIntegerMap.java
|
1220
|
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.collide.json.shared;
/**
* Integer Map interface.
*
* @param <T> the type contained as value in the map
*/
public interface JsonIntegerMap<T> {
/**
* Callback interface for int,double key value pairs.
*/
public interface IterationCallback<T> {
void onIteration(int key, T val);
}
boolean hasKey(int key);
T get(int key);
void put(int key, T val);
boolean isEmpty();
void erase(int key);
/**
* Iterates through the contents and calls back out to a callback.
*
* @param cb callback object
*/
void iterate(IterationCallback<T> cb);
}
|
apache-2.0
|
DieBauer/flink
|
flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/PipelineErrorHandler.java
|
2754
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.webmonitor;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpVersion;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.util.ExceptionUtils;
import org.slf4j.Logger;
/**
* This is the last handler in the pipeline. It logs all error messages and sends exception
* responses.
*/
@ChannelHandler.Sharable
public class PipelineErrorHandler extends SimpleChannelInboundHandler<Object> {
/** The logger to which the handler writes the log statements */
private final Logger logger;
public PipelineErrorHandler(Logger logger) {
this.logger = logger;
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, Object message) {
// we can't deal with this message. No one in the pipeline handled it. Log it.
logger.debug("Unknown message received: {}", message);
sendError(ctx, "Unknown message received.");
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
logger.debug("Unhandled exception: {}", cause);
sendError(ctx, ExceptionUtils.stringifyException(cause));
}
private void sendError(ChannelHandlerContext ctx, String error) {
if (ctx.channel().isActive()) {
DefaultFullHttpResponse response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1,
HttpResponseStatus.INTERNAL_SERVER_ERROR,
Unpooled.wrappedBuffer(error.getBytes(ConfigConstants.DEFAULT_CHARSET)));
response.headers().set(HttpHeaders.Names.CONTENT_TYPE, "text/plain");
response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, response.content().readableBytes());
ctx.writeAndFlush(response);
}
}
}
|
apache-2.0
|
idea4bsd/idea4bsd
|
platform/vcs-impl/src/com/intellij/openapi/vcs/changes/patch/tool/ApplyPatchMergeRequest.java
|
4424
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.changes.patch.tool;
import com.intellij.diff.contents.DocumentContent;
import com.intellij.diff.merge.MergeRequest;
import com.intellij.diff.merge.MergeResult;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.vcs.changes.patch.AppliedTextPatch;
import com.intellij.util.Consumer;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class ApplyPatchMergeRequest extends MergeRequest implements ApplyPatchRequest {
@Nullable private final Project myProject;
@NotNull private final DocumentContent myResultContent;
@NotNull private final AppliedTextPatch myAppliedPatch;
@NotNull private final CharSequence myOriginalContent;
@NotNull private final String myLocalContent;
@Nullable private final String myWindowTitle;
@NotNull private final String myLocalTitle;
@NotNull private final String myResultTitle;
@NotNull private final String myPatchTitle;
@Nullable private final Consumer<MergeResult> myCallback;
public ApplyPatchMergeRequest(@Nullable Project project,
@NotNull DocumentContent resultContent,
@NotNull AppliedTextPatch appliedPatch,
@NotNull String localContent,
@Nullable String windowTitle,
@NotNull String localTitle,
@NotNull String resultTitle,
@NotNull String patchTitle,
@Nullable Consumer<MergeResult> callback) {
myProject = project;
myResultContent = resultContent;
myAppliedPatch = appliedPatch;
myOriginalContent = ApplicationManager.getApplication().runReadAction(new Computable<CharSequence>() {
@Override
public CharSequence compute() {
return myResultContent.getDocument().getImmutableCharSequence();
}
});
myLocalContent = localContent;
myWindowTitle = windowTitle;
myLocalTitle = localTitle;
myResultTitle = resultTitle;
myPatchTitle = patchTitle;
myCallback = callback;
}
@Nullable
public Project getProject() {
return myProject;
}
@Override
@NotNull
public DocumentContent getResultContent() {
return myResultContent;
}
@Override
@NotNull
public String getLocalContent() {
return myLocalContent;
}
@Override
@NotNull
public AppliedTextPatch getPatch() {
return myAppliedPatch;
}
@Nullable
@Override
public String getTitle() {
return myWindowTitle;
}
@Override
@NotNull
public String getLocalTitle() {
return myLocalTitle;
}
@Override
@NotNull
public String getResultTitle() {
return myResultTitle;
}
@Override
@NotNull
public String getPatchTitle() {
return myPatchTitle;
}
@Override
public void applyResult(@NotNull MergeResult result) {
final CharSequence applyContent;
switch (result) {
case CANCEL:
applyContent = myOriginalContent;
break;
case LEFT:
applyContent = myLocalContent;
break;
case RIGHT:
throw new UnsupportedOperationException();
case RESOLVED:
applyContent = null;
break;
default:
throw new IllegalArgumentException(result.name());
}
if (applyContent != null) {
new WriteCommandAction.Simple(myProject) {
@Override
protected void run() throws Throwable {
myResultContent.getDocument().setText(applyContent);
}
}.execute();
}
if (myCallback != null) myCallback.consume(result);
}
}
|
apache-2.0
|
clarkyzl/flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/StreamTableDescriptorValidator.java
|
2242
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.descriptors;
import org.apache.flink.annotation.Internal;
import java.util.ArrayList;
import java.util.List;
/** Validator for {@code StreamTableDescriptor}. */
@Internal
public class StreamTableDescriptorValidator implements DescriptorValidator {
public static final String UPDATE_MODE = "update-mode";
public static final String UPDATE_MODE_VALUE_APPEND = "append";
public static final String UPDATE_MODE_VALUE_RETRACT = "retract";
public static final String UPDATE_MODE_VALUE_UPSERT = "upsert";
private final boolean supportsAppend;
private final boolean supportsRetract;
private final boolean supportsUpsert;
public StreamTableDescriptorValidator(
boolean supportsAppend, boolean supportsRetract, boolean supportsUpsert) {
this.supportsAppend = supportsAppend;
this.supportsRetract = supportsRetract;
this.supportsUpsert = supportsUpsert;
}
@Override
public void validate(DescriptorProperties properties) {
List<String> modeList = new ArrayList<>();
if (supportsAppend) {
modeList.add(UPDATE_MODE_VALUE_APPEND);
}
if (supportsRetract) {
modeList.add(UPDATE_MODE_VALUE_RETRACT);
}
if (supportsUpsert) {
modeList.add(UPDATE_MODE_VALUE_UPSERT);
}
properties.validateEnumValues(UPDATE_MODE, false, modeList);
}
}
|
apache-2.0
|
shyTNT/googleads-java-lib
|
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201411/DateTimeRangeTargetingError.java
|
1855
|
package com.google.api.ads.dfp.jaxws.v201411;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* Lists all date time range errors caused by associating a line item with a targeting
* expression.
*
*
* <p>Java class for DateTimeRangeTargetingError complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="DateTimeRangeTargetingError">
* <complexContent>
* <extension base="{https://www.google.com/apis/ads/publisher/v201411}ApiError">
* <sequence>
* <element name="reason" type="{https://www.google.com/apis/ads/publisher/v201411}DateTimeRangeTargetingError.Reason" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "DateTimeRangeTargetingError", propOrder = {
"reason"
})
public class DateTimeRangeTargetingError
extends ApiError
{
@XmlSchemaType(name = "string")
protected DateTimeRangeTargetingErrorReason reason;
/**
* Gets the value of the reason property.
*
* @return
* possible object is
* {@link DateTimeRangeTargetingErrorReason }
*
*/
public DateTimeRangeTargetingErrorReason getReason() {
return reason;
}
/**
* Sets the value of the reason property.
*
* @param value
* allowed object is
* {@link DateTimeRangeTargetingErrorReason }
*
*/
public void setReason(DateTimeRangeTargetingErrorReason value) {
this.reason = value;
}
}
|
apache-2.0
|
romeara/spdx-tools
|
src/org/spdx/compare/SpdxItemDifference.java
|
5944
|
/**
* Copyright (c) 2015 Source Auditor Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.spdx.compare;
import org.spdx.rdfparser.license.AnyLicenseInfo;
import org.spdx.rdfparser.model.Annotation;
import org.spdx.rdfparser.model.Relationship;
import org.spdx.rdfparser.model.SpdxItem;
/**
* Contains the results of a comparison between two SPDX items with the same name
* @author Gary O'Neall
*
*/
public class SpdxItemDifference {
private String name;
private String commentA;
private String commentB;
private String concludedLicenseA;
private String concludedLicenseB;
private boolean concludedLicenseEquals;
private String copyrightA;
private String copyrightB;
private String licenseCommentsA;
private String licenseCommentsB;
private boolean seenLicensesEqual;
private AnyLicenseInfo[] uniqueSeenLicensesA;
private AnyLicenseInfo[] uniqueSeenLicensesB;
private boolean relationshipsEquals;
private Relationship[] uniqueRelationshipA;
private Relationship[] uniqueRelationshipB;
private boolean annotationsEquals;
private Annotation[] uniqueAnnotationsA;
private Annotation[] uniqueAnnotationsB;
public SpdxItemDifference(SpdxItem itemA, SpdxItem itemB,
boolean concludedLicensesEqual, boolean seenLicensesEqual,
AnyLicenseInfo[] uniqueSeenLicensesA,
AnyLicenseInfo[] uniqueSeenLicensesB,
boolean relationshipsEquals,
Relationship[] uniqueRelationshipA,
Relationship[] uniqueRelationshipB,
boolean annotationsEquals,
Annotation[] uniqueAnnotationsA,
Annotation[] uniqueAnnotationsB
) throws SpdxCompareException {
this.name = itemA.getName();
this.commentA = itemA.getComment();
if (this.commentA == null) {
this.commentA = "";
}
this.commentB = itemB.getComment();
if (this.commentB == null) {
this.commentB = "";
}
this.concludedLicenseA = itemA.getLicenseConcluded().toString();
this.concludedLicenseB = itemB.getLicenseConcluded().toString();
this.concludedLicenseEquals = concludedLicensesEqual;
this.copyrightA = itemA.getCopyrightText();
if (this.copyrightA == null) {
this.copyrightA = "";
}
this.copyrightB = itemB.getCopyrightText();
if (this.copyrightB == null) {
this.copyrightB = "";
}
this.licenseCommentsA = itemA.getLicenseComments();
if (this.licenseCommentsA == null) {
this.licenseCommentsA = "";
}
this.licenseCommentsB = itemB.getLicenseComments();
if (this.licenseCommentsB == null) {
this.licenseCommentsB = "";
}
this.seenLicensesEqual = seenLicensesEqual;
this.uniqueSeenLicensesA = uniqueSeenLicensesA;
this.uniqueSeenLicensesB = uniqueSeenLicensesB;
this.relationshipsEquals = relationshipsEquals;
this.uniqueRelationshipA = uniqueRelationshipA;
this.uniqueRelationshipB = uniqueRelationshipB;
this.annotationsEquals = annotationsEquals;
this.uniqueAnnotationsA = uniqueAnnotationsA;
this.uniqueAnnotationsB = uniqueAnnotationsB;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @return the commentA
*/
public String getCommentA() {
return commentA;
}
/**
* @return the commentB
*/
public String getCommentB() {
return commentB;
}
/**
* @return the concludedLicenseA
*/
public String getConcludedLicenseA() {
return concludedLicenseA;
}
/**
* @return the concludedLicenseB
*/
public String getConcludedLicenseB() {
return concludedLicenseB;
}
/**
* @return the concludedLicenseEquals
*/
public boolean isConcludedLicenseEquals() {
return concludedLicenseEquals;
}
/**
* @return the copyrightA
*/
public String getCopyrightA() {
return copyrightA;
}
/**
* @return the copyrightB
*/
public String getCopyrightB() {
return copyrightB;
}
/**
* @return the licenseCommentsA
*/
public String getLicenseCommentsA() {
return licenseCommentsA;
}
/**
* @return the licenseCommentsB
*/
public String getLicenseCommentsB() {
return licenseCommentsB;
}
/**
* @return the seenLicensesEqual
*/
public boolean isSeenLicensesEquals() {
return seenLicensesEqual;
}
/**
* @return the uniqueSeenLicensesA
*/
public AnyLicenseInfo[] getUniqueSeenLicensesA() {
return uniqueSeenLicensesA;
}
/**
* @return the uniqueSeenLicensesB
*/
public AnyLicenseInfo[] getUniqueSeenLicensesB() {
return uniqueSeenLicensesB;
}
public boolean isCommentsEquals() {
return SpdxComparer.stringsEqual(commentA, commentB);
}
public boolean isCopyrightsEqual() {
return SpdxComparer.stringsEqual(copyrightA, copyrightB);
}
public boolean isLicenseCommentsEqual() {
return SpdxComparer.stringsEqual(licenseCommentsA, licenseCommentsB);
}
/**
* @return the relationshipsEquals
*/
public boolean isRelationshipsEquals() {
return relationshipsEquals;
}
/**
* @return the uniqueRelationshipA
*/
public Relationship[] getUniqueRelationshipA() {
return uniqueRelationshipA;
}
/**
* @return the uniqueRelationshipB
*/
public Relationship[] getUniqueRelationshipB() {
return uniqueRelationshipB;
}
/**
* @return the annotationsEquals
*/
public boolean isAnnotationsEquals() {
return annotationsEquals;
}
/**
* @return the uniqueAnnotationsA
*/
public Annotation[] getUniqueAnnotationsA() {
return uniqueAnnotationsA;
}
/**
* @return the uniqueAnnotationsB
*/
public Annotation[] getUniqueAnnotationsB() {
return uniqueAnnotationsB;
}
}
|
apache-2.0
|
meiercaleb/incubator-rya
|
extras/indexing/src/test/java/org/apache/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java
|
23050
|
package org.apache.rya.indexing.external.tupleSet;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import java.util.Set;
import org.apache.rya.indexing.pcj.matching.QueryVariableNormalizer;
import org.junit.Assert;
import org.junit.Test;
import org.openrdf.query.algebra.Filter;
import org.openrdf.query.algebra.QueryModelNode;
import org.openrdf.query.algebra.StatementPattern;
import org.openrdf.query.algebra.TupleExpr;
import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
import org.openrdf.query.algebra.helpers.StatementPatternCollector;
import org.openrdf.query.parser.ParsedQuery;
import org.openrdf.query.parser.sparql.SPARQLParser;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
public class VarConstQueryVariableNormalizerTest {
private String query1 = " " //
+ "SELECT ?person ?address ?otherValue" //
+ "{" //
+ "?person a <uri:Person>. " //
+ "?person <uri:hasName> <uri:name>."//
+ "?person <uri:hasAddress> ?address." //
+ "?person <uri:blah> ?otherValue" //
+ "}"; //
private String index1 = " " //
+ "SELECT ?X ?Y ?Z ?W" //
+ "{"//
+ "?X a <uri:Person>. " //
+ "?X <uri:hasName> ?Y."//
+ "?X <uri:hasAddress> ?Z." //
+ "?X <uri:blah> ?W" //
+ "}"; //
private String q4 = ""//
+ "SELECT ?s ?t ?u " //
+ "{" //
+ " ?s a ?t . "//
+ " ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u "//
+ "}";//
private String q7 = ""//
+ "SELECT ?s ?t ?u ?x ?y ?z " //
+ "{" //
+ " ?s a ?t ."//
+ " ?x a ?y ."//
+ " ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u ."//
+ " ?y <http://www.w3.org/2000/01/rdf-schema#label> ?z ."//
+ "}";//
private String q8 = ""//
+ "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " //
+ "{" //
+ " ?f a ?m ."//
+ " ?e a ?l ."//
+ " ?n a ?o ."//
+ " ?a a ?h ."//
+ " ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+ " ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+ " ?o <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
+ " ?h <http://www.w3.org/2000/01/rdf-schema#label> ?r ."//
+ " ?f <uri:talksTo> ?m . "//
+ " ?m <uri:talksTo> ?a . "//
+ " ?o <uri:talksTo> ?r . "//
+ "}";//
private String q9 = ""//
+ "SELECT ?f ?d ?e ?c ?n ?p ?a ?r " //
+ "{" //
+ " ?f a <uri:dog> ."//
+ " ?e a <uri:chicken> ."//
+ " ?n a <uri:cow> ."//
+ " ?a a <uri:elephant> ."//
+ " <uri:dog> <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+ " <uri:chicken> <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+ " <uri:cow> <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
+ " <uri:elephant> <http://www.w3.org/2000/01/rdf-schema#label> ?r ."//
+ " ?d <uri:talksTo> ?f . "//
+ " ?c <uri:talksTo> ?e . "//
+ " ?p <uri:talksTo> ?n . "//
+ " ?r <uri:talksTo> ?a . "//
+ "}";//
private String q10 = ""//
+ "SELECT ?f ?m ?d " //
+ "{" //
+ " ?f a ?m ."//
+ " ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+ " ?d <uri:talksTo> ?f . "//
+ "}";//
String q15 = ""//
+ "SELECT ?x ?y ?z ?w " //
+ "{" //
+ " ?x ?y ?z ."//
+ " ?y ?z ?w ."//
+ "}";//
String q16 = ""//
+ "SELECT ?a ?b ?c " //
+ "{" //
+ " ?a ?b ?c ."//
+ "}";//
String q17 = ""//
+ "SELECT ?q ?r " //
+ "{" //
+ " ?q ?r \"url:\" ."//
+ "}";//
private String q18 = ""//
+ "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?r " //
+ "{" //
+ " ?f a ?m ."//
+ " ?e a ?l ."//
+ " ?n a ?o ."//
+ " ?a a <uri:elephant> ."//
+ " ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+ " ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+ " ?o <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
+ " <uri:elephant> <http://www.w3.org/2000/01/rdf-schema#label> ?r ."//
+ " ?d <uri:talksTo> ?f . "//
+ " ?c <uri:talksTo> ?e . "//
+ " ?p <uri:talksTo> ?n . "//
+ " ?r <uri:talksTo> ?a . "//
+ "}";//
String q32 = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ "SELECT ?feature ?point " //
+ "{" //
+ " ?feature a geo:Feature . "//
+ " ?feature geo:hasGeometry ?point . "//
+ " ?point a geo:Point . "//
+ " ?point geo:asWKT \"wkt\" . "//
+ " FILTER(geof:sfWithin(\"wkt\", \"Polygon\")) " //
+ "}";//
String q33 = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ "SELECT ?person ?commentmatch ?labelmatch" //
+ "{" //
+ " ?person a <http://example.org/ontology/Person> . "//
+ " ?person <http://www.w3.org/2000/01/rdf-schema#comment> ?labelmatch . "//
+ " ?person <http://www.w3.org/2000/01/rdf-schema#comment> ?commentmatch . "//
+ " FILTER(fts:text(?labelmatch, \"sally\")) . " //
+ " FILTER(fts:text(?commentmatch, \"bob\")) " //
+ "}";//
String q34 = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ "SELECT ?a ?b ?c ?d" //
+ "{" //
+ " ?a a geo:Feature . "//
+ " ?b a geo:Point . "//
+ " ?b geo:asWKT ?c . "//
+ " FILTER(geof:sfWithin(?c, ?d)) " //
+ "}";//
String q35 = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ "SELECT ?a ?b ?c" //
+ "{" //
+ " ?a <http://www.w3.org/2000/01/rdf-schema#comment> ?b . "//
+ " FILTER(fts:text(?b, ?c)) " //
+ "}";//
/**
* @param tuple1
* @param tuple2
* @return
* @throws Exception
*/
public boolean tupleEquals(TupleExpr tuple1, TupleExpr tuple2) throws Exception {
Set<StatementPattern> spSet1 = Sets.newHashSet(StatementPatternCollector.process(tuple1));
Set<StatementPattern> spSet2 = Sets.newHashSet(StatementPatternCollector.process(tuple2));
return spSet1.equals(spSet2);
}
/**
* @param tuple1
* @param tuple2
* @return
* @throws Exception
*/
public boolean isTupleSubset(TupleExpr tuple1, TupleExpr tuple2) throws Exception {
Set<StatementPattern> spSet1 = Sets.newHashSet(StatementPatternCollector.process(tuple1));
Set<StatementPattern> spSet2 = Sets.newHashSet(StatementPatternCollector.process(tuple2));
return (Sets.intersection(spSet1, spSet2).equals(spSet2));
}
/**
* @throws Exception
* Tests QueryVariableNormalizerContext with two queries whose
* StatementPattern nodes contain no constant Vars.
*/
@Test
public void testNoConstants() throws Exception {
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q15, null);
ParsedQuery pq2 = parser2.parseQuery(q16, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertEquals(2,normalize.size());
for (TupleExpr s : normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), s));
}
pq1 = parser1.parseQuery(q16, null);
pq2 = parser2.parseQuery(q17, null);
normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), pq2.getTupleExpr());
Assert.assertTrue(normalize.size() == 0);
}
@Test
public void queryConstantNodeOneMatch() throws Exception {
SPARQLParser p = new SPARQLParser();
ParsedQuery pq1 = p.parseQuery(query1, null);
ParsedQuery pq2 = p.parseQuery(index1, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertEquals(1, normalize.size());
for(TupleExpr te: normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
}
}
/**
* @throws Exception
* Tests QueryVariableNormalizerContext on the large query q9
* with with a smaller, potential index q10 to see if the
* correct number of outputs are produced.
*/
@Test
public void querConstNodeFourMatch() throws Exception {
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q9, null);
ParsedQuery pq2 = parser2.parseQuery(q10, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
//System.out.println(normalize);
Assert.assertEquals(4, normalize.size());
for(TupleExpr te: normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
}
}
@Test
public void queryConstNodeSixMatch() throws Exception {
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q9, null);
ParsedQuery pq2 = parser2.parseQuery(q18, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertEquals(6, normalize.size());
//System.out.println("tuple expr is " +pq1.getTupleExpr() + " and normalized tuples are " + normalize);
for(TupleExpr te: normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
}
}
@Test
public void queryConstGeoFilter() throws Exception {
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q32, null);
ParsedQuery pq2 = parser2.parseQuery(q34, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertEquals(1, normalize.size());
for(TupleExpr te: normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
}
FilterCollector fc1 = new FilterCollector();
pq1.getTupleExpr().visit(fc1);
List<QueryModelNode> fList1 = fc1.getFilters();
for(TupleExpr te: normalize) {
FilterCollector fc2 = new FilterCollector();
te.visit(fc2);
List<QueryModelNode> fList2 = fc2.getFilters();
for(QueryModelNode q: fList2) {
Assert.assertTrue(fList1.contains(q));
}
}
}
@Test
public void queryConstFreeTextFilter() throws Exception {
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q33, null);
ParsedQuery pq2 = parser2.parseQuery(q35, null);
System.out.println(pq1.getTupleExpr());
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertEquals(2, normalize.size());
for(TupleExpr te: normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
}
FilterCollector fc1 = new FilterCollector();
pq1.getTupleExpr().visit(fc1);
List<QueryModelNode> fList1 = fc1.getFilters();
for(TupleExpr te: normalize) {
FilterCollector fc2 = new FilterCollector();
te.visit(fc2);
List<QueryModelNode> fList2 = fc2.getFilters();
for(QueryModelNode q: fList2) {
Assert.assertTrue(fList1.contains(q));
}
}
}
@Test
public void queryConstNodeTwoMatch() throws Exception {
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q7, null);
ParsedQuery pq2 = parser2.parseQuery(q4, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertEquals(2, normalize.size());
for(TupleExpr te: normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
}
}
@Test
public void queryNAryListMatch() throws Exception {
String q1 = ""//
+ "SELECT ?a ?b ?c ?d ?e ?f ?q ?g ?h " //
+ "{" //
+ " GRAPH ?x { " //
+ " ?a a ?b ."//
+ " ?b <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+ " ?d <uri:talksTo> ?e . "//
+ " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " //
+ " FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " //
+ " FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " //
+ " ?x <http://www.w3.org/2000/01/rdf-schema#label> ?g. "//
+ " ?b a ?q ."//
+ " }"//
+ "}";//
String q2 = ""//
+ "SELECT ?m ?n ?r ?y " //
+ "{" //
+ " GRAPH ?q { " //
+ " FILTER(?m IN (1,?y,3) && ?n NOT IN(?r,6,7)). " //
+ " ?q <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
+ " }"//
+ "}";//
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q1, null);
ParsedQuery pq2 = parser2.parseQuery(q2, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertEquals(1, normalize.size());
for(TupleExpr te: normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
}
FilterCollector fc1 = new FilterCollector();
pq1.getTupleExpr().visit(fc1);
List<QueryModelNode> fList1 = fc1.getFilters();
for(TupleExpr te: normalize) {
FilterCollector fc2 = new FilterCollector();
te.visit(fc2);
List<QueryModelNode> fList2 = fc2.getFilters();
for(QueryModelNode q: fList2) {
Assert.assertTrue(fList1.contains(q));
}
}
}
@Test
public void queryCompoundFilterMatch() throws Exception {
String q17 = ""//
+ "SELECT ?j ?k ?l ?m ?n ?o " //
+ "{" //
+ " GRAPH ?z { " //
+ " ?j <uri:talksTo> ?k . "//
+ " FILTER ( ?k < ?l && (?m > ?n || ?o = ?j) ). " //
+ " }"//
+ "}";//
// String q18 = ""//
// + "SELECT ?r ?s ?t ?u " //
// + "{" //
// + " GRAPH ?q { " //
// + " FILTER(bound(?r) && sameTerm(?s,?t)&&bound(?u)). " //
// + " ?t a ?u ."//
// + " }"//
// + "}";//
String q19 = ""//
+ "SELECT ?a ?b ?c ?d ?f ?q ?g ?h " //
+ "{" //
+ " GRAPH ?x { " //
+ " ?a a ?b ."//
+ " ?b <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+ " ?d <uri:talksTo> \"5\" . "//
+ " FILTER ( \"5\" < ?f && (?a > ?b || ?c = ?d) ). " //
+ " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " //
+ " FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " //
+ " ?h <http://www.w3.org/2000/01/rdf-schema#label> ?g. "//
+ " ?b a ?q ."//
+ " }"//
+ "}";//
// String q20 = ""//
// + "SELECT ?m ?n ?o " //
// + "{" //
// + " GRAPH ?q { " //
// + " FILTER(?m IN (1,?o,3) && ?n NOT IN(5,6,7)). " //
// + " ?n <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
// + " }"//
// + "}";//
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q19, null);
ParsedQuery pq2 = parser2.parseQuery(q17, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
System.out.println(normalize);
Assert.assertEquals(1, normalize.size());
for(TupleExpr te: normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
}
FilterCollector fc1 = new FilterCollector();
pq1.getTupleExpr().visit(fc1);
List<QueryModelNode> fList1 = fc1.getFilters();
for(TupleExpr te: normalize) {
FilterCollector fc2 = new FilterCollector();
te.visit(fc2);
List<QueryModelNode> fList2 = fc2.getFilters();
for(QueryModelNode q: fList2) {
Assert.assertTrue(fList1.contains(q));
}
}
}
// @Test
// public void queryCompoundFilterMatch2() throws Exception {
//
//
//
//
//
//
// String q19 = ""//
// + "SELECT ?a ?b ?c ?d ?f ?q ?g ?h " //
// + "{" //
// + " GRAPH ?x { " //
// + " ?a a ?b ."//
// + " ?b <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
// + " ?d <uri:talksTo> \"5\" . "//
// + " FILTER ( \"5\" < ?f && (?a > ?b || ?c = ?d) ). " //
// + " FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " //
// + " FILTER(?g IN (1,5,3) && ?h NOT IN(5,6,7)). " //
// + " ?h <http://www.w3.org/2000/01/rdf-schema#label> ?g. "//
// + " ?b a ?q ."//
// + " }"//
// + "}";//
//
//
// String q20 = ""//
// + "SELECT ?m ?n ?o ?f ?a ?b ?c ?d " //
// + "{" //
// + " GRAPH ?q { " //
// + " ?d <uri:talksTo> ?o . "//
// + " FILTER ( ?o < ?f && (?a > ?b || ?c = ?d) ). " //
// + " FILTER(?m IN (1,?o,3) && ?n NOT IN(5,6,7)). " //
// + " ?n <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
// + " }"//
// + "}";//
//
//
//
//
// SPARQLParser parser1 = new SPARQLParser();
// SPARQLParser parser2 = new SPARQLParser();
//
// ParsedQuery pq1 = parser1.parseQuery(q19, null);
// ParsedQuery pq2 = parser2.parseQuery(q20, null);
//
// List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
// pq2.getTupleExpr());
//
//
//
// System.out.println(normalize);
//
// Assert.assertEquals(1, normalize.size());
//
// for(TupleExpr te: normalize) {
// Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
// }
//
// FilterCollector fc1 = new FilterCollector();
// pq1.getTupleExpr().visit(fc1);
// List<QueryModelNode> fList1 = fc1.getFilters();
//
// for(TupleExpr te: normalize) {
// FilterCollector fc2 = new FilterCollector();
// te.visit(fc2);
// List<QueryModelNode> fList2 = fc2.getFilters();
//
// for(QueryModelNode q: fList2) {
// Assert.assertTrue(fList1.contains(q));
// }
// }
//
//
//
// }
//
//
private static class FilterCollector extends QueryModelVisitorBase<RuntimeException> {
private List<QueryModelNode> filterList = Lists.newArrayList();
public List<QueryModelNode> getFilters() {
return filterList;
}
@Override
public void meet(Filter node) {
filterList.add(node.getCondition());
super.meet(node);
}
}
}
|
apache-2.0
|
ShailShah/alluxio
|
tests/src/test/java/alluxio/proxy/FileSystemClientRestApiTest.java
|
10442
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.proxy;
import alluxio.AlluxioURI;
import alluxio.client.file.options.CreateDirectoryOptions;
import alluxio.client.file.options.CreateFileOptions;
import alluxio.client.file.options.DeleteOptions;
import alluxio.client.file.options.ExistsOptions;
import alluxio.client.file.options.FreeOptions;
import alluxio.client.file.options.GetStatusOptions;
import alluxio.client.file.options.ListStatusOptions;
import alluxio.client.file.options.MountOptions;
import alluxio.client.file.options.OpenFileOptions;
import alluxio.client.file.options.RenameOptions;
import alluxio.client.file.options.SetAttributeOptions;
import alluxio.client.file.options.UnmountOptions;
import alluxio.exception.FileDoesNotExistException;
import alluxio.master.file.FileSystemMaster;
import alluxio.rest.RestApiTest;
import alluxio.rest.TestCase;
import alluxio.rest.TestCaseOptions;
import alluxio.security.authorization.Mode;
import alluxio.wire.FileInfo;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Iterables;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.ByteArrayInputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.HttpMethod;
/**
* Test cases for {@link StreamsRestServiceHandler}.
*/
public final class FileSystemClientRestApiTest extends RestApiTest {
private static final alluxio.master.file.options.GetStatusOptions GET_STATUS_OPTIONS =
alluxio.master.file.options.GetStatusOptions.defaults();
private static final Map<String, String> NO_PARAMS = new HashMap<>();
private static final String PATHS_PREFIX = "paths/";
private static final String STREAMS_PREFIX = "streams/";
private FileSystemMaster mFileSystemMaster;
@Rule
public TemporaryFolder mFolder = new TemporaryFolder();
@Before
public void before() throws Exception {
mHostname = mResource.get().getHostname();
mPort = mResource.get().getProxyProcess().getWebLocalPort();
mFileSystemMaster = mResource.get().getLocalAlluxioMaster().getMasterProcess()
.getMaster(FileSystemMaster.class);
}
@Test
public void createDirectory() throws Exception {
AlluxioURI uri = new AlluxioURI("/dir");
new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.CREATE_DIRECTORY, NO_PARAMS,
HttpMethod.POST, null,
TestCaseOptions.defaults().setBody(CreateDirectoryOptions.defaults())).run();
Assert.assertTrue(
mFileSystemMaster.listStatus(uri, alluxio.master.file.options.ListStatusOptions.defaults())
.isEmpty());
}
@Test
public void delete() throws Exception {
AlluxioURI uri = new AlluxioURI("/file");
writeFile(uri, null);
new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.DELETE, NO_PARAMS,
HttpMethod.POST, null, TestCaseOptions.defaults().setBody(DeleteOptions.defaults())).run();
try {
mFileSystemMaster.getFileInfo(uri, GET_STATUS_OPTIONS);
Assert.fail("file should have been removed");
} catch (FileDoesNotExistException e) {
// Expected
}
}
@Test
public void download() throws Exception {
AlluxioURI uri = new AlluxioURI("/file");
String message = "Greetings traveller!";
writeFile(uri, message.getBytes());
Assert.assertEquals(message, new String(readFile(uri)));
}
@Test
public void exists() throws Exception {
AlluxioURI uri = new AlluxioURI("/file");
writeFile(uri, null);
new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.EXISTS, NO_PARAMS,
HttpMethod.POST, true, TestCaseOptions.defaults().setBody(ExistsOptions.defaults())).run();
}
@Test
public void free() throws Exception {
AlluxioURI uri = new AlluxioURI("/file");
writeFile(uri, null);
new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.FREE, NO_PARAMS,
HttpMethod.POST, null, TestCaseOptions.defaults().setBody(FreeOptions.defaults())).run();
}
@Test
public void getStatus() throws Exception {
AlluxioURI uri = new AlluxioURI("/file");
writeFile(uri, null);
String result = new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.GET_STATUS, NO_PARAMS,
HttpMethod.POST, TestCaseOptions.defaults().setBody(GetStatusOptions.defaults())).call();
FileInfo fileInfo = new ObjectMapper().readValue(result, FileInfo.class);
Assert.assertEquals(uri.getPath(), fileInfo.getPath());
Assert.assertEquals(0, fileInfo.getLength());
}
@Test
public void listStatus() throws Exception {
AlluxioURI uri = new AlluxioURI("/file");
writeFile(uri, null);
String result = new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.LIST_STATUS, NO_PARAMS,
HttpMethod.POST, null, TestCaseOptions.defaults().setBody(ListStatusOptions.defaults()))
.call();
List<FileInfo> fileInfos =
new ObjectMapper().readValue(result, new TypeReference<List<FileInfo>>() {});
FileInfo fileInfo = Iterables.getOnlyElement(fileInfos);
Assert.assertEquals(uri.getPath(), fileInfo.getPath());
Assert.assertEquals(0, fileInfo.getLength());
}
@Test
public void mount() throws Exception {
AlluxioURI uri = new AlluxioURI("/file");
Map<String, String> params = new HashMap<>();
params.put("src", mFolder.newFolder().getAbsolutePath());
new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.MOUNT, params,
HttpMethod.POST, null, TestCaseOptions.defaults().setBody(MountOptions.defaults())).run();
}
@Test
public void rename() throws Exception {
AlluxioURI uri1 = new AlluxioURI("/file1");
AlluxioURI uri2 = new AlluxioURI("/file2");
writeFile(uri1, null);
Map<String, String> params = new HashMap<>();
params.put("dst", uri2.toString());
new TestCase(mHostname, mPort,
PATHS_PREFIX + uri1.toString() + "/" + PathsRestServiceHandler.RENAME, params,
HttpMethod.POST, null, TestCaseOptions.defaults().setBody(RenameOptions.defaults())).run();
try {
mFileSystemMaster.getFileInfo(uri1, GET_STATUS_OPTIONS);
Assert.fail("file should have been removed");
} catch (FileDoesNotExistException e) {
// Expected
}
mFileSystemMaster.getFileInfo(uri2, GET_STATUS_OPTIONS);
}
@Test
public void setAttribute() throws Exception {
AlluxioURI uri = new AlluxioURI("/file");
writeFile(uri, null);
new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.SET_ATTRIBUTE, NO_PARAMS,
HttpMethod.POST, null, TestCaseOptions.defaults()
.setBody(SetAttributeOptions.defaults().setMode(Mode.defaults())))
.run();
FileInfo fileInfo = mFileSystemMaster.getFileInfo(uri, GET_STATUS_OPTIONS);
Assert.assertEquals(uri.toString(), fileInfo.getPath());
}
@Test
public void unmount() throws Exception {
AlluxioURI uri = new AlluxioURI("/mount");
mFileSystemMaster.mount(uri, new AlluxioURI(mFolder.newFolder().getAbsolutePath()),
alluxio.master.file.options.MountOptions.defaults());
new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.UNMOUNT, NO_PARAMS,
HttpMethod.POST, null, TestCaseOptions.defaults().setBody(UnmountOptions.defaults())).run();
}
@Test
public void upload() throws Exception {
AlluxioURI uri = new AlluxioURI("/file");
String message = "Greetings traveller!";
writeFile(uri, message.getBytes());
String result = new TestCase(mHostname, mPort,
PATHS_PREFIX + uri.toString() + "/" + PathsRestServiceHandler.GET_STATUS, NO_PARAMS,
HttpMethod.POST, null).call();
FileInfo fileInfo = new ObjectMapper().readValue(result, FileInfo.class);
Assert.assertEquals(message.length(), fileInfo.getLength());
}
private byte[] readFile(AlluxioURI path) throws Exception {
String result = new TestCase(mHostname, mPort,
PATHS_PREFIX + path.toString() + "/" + PathsRestServiceHandler.OPEN_FILE, NO_PARAMS,
HttpMethod.POST, null, TestCaseOptions.defaults().setBody(OpenFileOptions.defaults()))
.call();
Integer id = new ObjectMapper().readValue(result, Integer.TYPE);
result = new TestCase(mHostname, mPort,
STREAMS_PREFIX + id.toString() + "/" + StreamsRestServiceHandler.READ, NO_PARAMS,
HttpMethod.POST, null).call();
new TestCase(mHostname, mPort,
STREAMS_PREFIX + id.toString() + "/" + StreamsRestServiceHandler.CLOSE, NO_PARAMS,
HttpMethod.POST, null).run();
return result.getBytes();
}
private void writeFile(AlluxioURI path, byte[] input) throws Exception {
String result = new TestCase(mHostname, mPort,
PATHS_PREFIX + path.toString() + "/" + PathsRestServiceHandler.CREATE_FILE, NO_PARAMS,
HttpMethod.POST, null, TestCaseOptions.defaults().setBody(CreateFileOptions.defaults()))
.call();
Integer id = new ObjectMapper().readValue(result, Integer.TYPE);
TestCaseOptions options = TestCaseOptions.defaults();
long expected = 0;
if (input != null) {
options.setInputStream(new ByteArrayInputStream(input));
expected = input.length;
}
new TestCase(mHostname, mPort,
STREAMS_PREFIX + id.toString() + "/" + StreamsRestServiceHandler.WRITE, NO_PARAMS,
HttpMethod.POST, expected, options).run();
new TestCase(mHostname, mPort,
STREAMS_PREFIX + id.toString() + "/" + StreamsRestServiceHandler.CLOSE, NO_PARAMS,
HttpMethod.POST, null).run();
}
}
|
apache-2.0
|
ascrutae/sky-walking
|
oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/MultiIntValuesHolder.java
|
998
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.analysis.metrics;
/**
* MultiIntValuesHolder always holds a set of int(s).
*/
public interface MultiIntValuesHolder {
int[] getValues();
}
|
apache-2.0
|
tizarhunter/Transitions-Everywhere
|
library/src/main/java/com/transitionseverywhere/utils/ViewGroupOverlayUtils.java
|
6724
|
package com.transitionseverywhere.utils;
import android.annotation.TargetApi;
import android.graphics.drawable.BitmapDrawable;
import android.os.Build;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewOverlay;
import com.transitionseverywhere.hidden.Crossfade;
public class ViewGroupOverlayUtils {
static class BaseViewGroupOverlayUtils {
public void addOverlay(ViewGroup sceneRoot, View overlayView, int screenX, int screenY) {
ViewOverlayPreJellybean viewOverlay = ViewOverlayPreJellybean.getOverlay(sceneRoot);
if (viewOverlay != null) {
viewOverlay.addView(overlayView, screenX, screenY);
}
}
public void removeOverlay(ViewGroup sceneRoot, View overlayView) {
ViewOverlayPreJellybean viewOverlay = ViewOverlayPreJellybean.getOverlay(sceneRoot);
if (viewOverlay != null) {
viewOverlay.removeView(overlayView);
}
}
public void moveViewInOverlay(ViewGroup sceneRoot, View overlayView, int screenX, int screenY) {
ViewOverlayPreJellybean viewOverlay = ViewOverlayPreJellybean.getOverlay(sceneRoot);
if (viewOverlay != null) {
viewOverlay.moveView(overlayView, screenX, screenY);
}
}
public void initializeOverlay(ViewGroup sceneRoot) {
ViewOverlayPreJellybean.getOverlay(sceneRoot);
}
public int[] getLocationOnScreenOfOverlayView(ViewGroup sceneRoot, View overlayView) {
int[] location = new int[2];
overlayView.getLocationOnScreen(location);
return location;
}
public void addCrossfadeOverlay(boolean useParentOverlay, View view, int fadeBehavior,
BitmapDrawable startDrawable, BitmapDrawable endDrawable) {
//TODO ViewOverlay
}
public void removeCrossfadeOverlay(boolean useParentOverlay, View view, int fadeBehavior,
BitmapDrawable startDrawable, BitmapDrawable endDrawable) {
//TODO ViewOverlay
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
static class JellyBeanMR2ViewGroupUtils extends BaseViewGroupOverlayUtils {
@Override
public void addOverlay(ViewGroup sceneRoot, View overlayView, int screenX, int screenY) {
moveViewInOverlay(sceneRoot, overlayView, screenX, screenY);
sceneRoot.getOverlay().add(overlayView);
}
@Override
public void removeOverlay(ViewGroup sceneRoot, View overlayView) {
sceneRoot.getOverlay().remove(overlayView);
}
@Override
public void moveViewInOverlay(ViewGroup sceneRoot, View overlayView, int screenX, int screenY) {
if (screenX != 0 || screenY != 0) {
int[] loc = new int[2];
sceneRoot.getLocationOnScreen(loc);
overlayView.offsetLeftAndRight((screenX - loc[0]) - overlayView.getLeft());
overlayView.offsetTopAndBottom((screenY - loc[1]) - overlayView.getTop());
}
}
@Override
public void initializeOverlay(ViewGroup sceneRoot) {
// do nothing
}
@Override
public void addCrossfadeOverlay(boolean useParentOverlay, View view, int fadeBehavior,
BitmapDrawable startDrawable, BitmapDrawable endDrawable) {
ViewOverlay overlay = getViewOverlay(useParentOverlay, view);
overlay.remove(startDrawable);
if (fadeBehavior == Crossfade.FADE_BEHAVIOR_REVEAL) {
overlay.remove(endDrawable);
}
}
@Override
public int[] getLocationOnScreenOfOverlayView(ViewGroup sceneRoot, View overlayView) {
int[] location = new int[2];
sceneRoot.getLocationOnScreen(location);
location[0] += overlayView.getLeft();
location[1] += overlayView.getTop();
return location;
}
@Override
public void removeCrossfadeOverlay(boolean useParentOverlay, View view, int fadeBehavior,
BitmapDrawable startDrawable, BitmapDrawable endDrawable) {
ViewOverlay overlay = getViewOverlay(useParentOverlay, view);
if (fadeBehavior == Crossfade.FADE_BEHAVIOR_REVEAL) {
overlay.add(endDrawable);
}
overlay.add(startDrawable);
}
private static ViewOverlay getViewOverlay(boolean useParentOverlay, View view) {
return useParentOverlay ? ((ViewGroup) view.getParent()).getOverlay() : view.getOverlay();
}
}
private static final BaseViewGroupOverlayUtils IMPL;
static {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
IMPL = new JellyBeanMR2ViewGroupUtils();
} else {
IMPL = new BaseViewGroupOverlayUtils();
}
}
public static void addOverlay(ViewGroup sceneRoot, View overlayView, int screenX, int screenY) {
if (overlayView != null) {
IMPL.addOverlay(sceneRoot, overlayView, screenX, screenY);
}
}
public static void initializeOverlay(ViewGroup sceneRoot) {
IMPL.initializeOverlay(sceneRoot);
}
public static void removeOverlay(ViewGroup sceneRoot, View overlayView) {
if (overlayView != null) {
IMPL.removeOverlay(sceneRoot, overlayView);
}
}
public static void moveViewInOverlay(ViewGroup sceneRoot, View overlayView, int screenX, int screenY) {
if (overlayView != null) {
IMPL.moveViewInOverlay(sceneRoot, overlayView, screenX, screenY);
}
}
public static int[] getLocationOnScreenOfOverlayView(ViewGroup sceneRoot, View overlayView) {
if (overlayView != null) {
return IMPL.getLocationOnScreenOfOverlayView(sceneRoot, overlayView);
} else {
return new int[2];
}
}
public static void addCrossfadeOverlay(boolean useParentOverlay, View view, int fadeBehavior,
BitmapDrawable startDrawable, BitmapDrawable endDrawable) {
IMPL.addCrossfadeOverlay(useParentOverlay, view, fadeBehavior, startDrawable, endDrawable);
}
public static void removeCrossfadeOverlay(boolean useParentOverlay, View view, int fadeBehavior,
BitmapDrawable startDrawable, BitmapDrawable endDrawable) {
IMPL.removeCrossfadeOverlay(useParentOverlay, view, fadeBehavior, startDrawable, endDrawable);
}
}
|
apache-2.0
|
minagri-rwanda/DHIS2-Agriculture
|
dhis-web/dhis-web-api/src/main/java/org/hisp/dhis/webapi/controller/dataelement/DataElementController.java
|
2130
|
package org.hisp.dhis.webapi.controller.dataelement;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.schema.descriptors.DataElementSchemaDescriptor;
import org.hisp.dhis.webapi.controller.AbstractCrudController;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
@Controller
@RequestMapping( value = DataElementSchemaDescriptor.API_ENDPOINT )
public class DataElementController
extends AbstractCrudController<DataElement>
{
}
|
bsd-3-clause
|
evilwan/raptor-chess-interface
|
raptor/src/testcases/TestFr.java
|
2752
|
package testcases;
import junit.framework.Assert;
import org.junit.Test;
import raptor.chess.FischerRandomGame;
import raptor.chess.GameFactory;
import raptor.chess.Variant;
public class TestFr {
@Test
public void testCastlingBlackQs() {
String fen1 = "rk2bqr1/p1p1bppp/p2np1n1/8/3P4/5B2/PPP2PPP/RKN1BQR1 w KQkq - 0 8";
FischerRandomGame game = (FischerRandomGame) GameFactory.createFromFen(fen1,
Variant.fischerRandom);
game.makeSanMove("Bxa8");
Assert.assertEquals("Bk2bqr1/p1p1bppp/p2np1n1/8/3P4/8/PPP2PPP/RKN1BQR1 b KQk - 0 8", game.toFen());
}
@Test
public void testCastlingWhiteKs() {
String fen1 = "rq1bknnr/pbpppppp/1p6/8/5P2/1P4P1/P1PPP2P/RQBBKNNR b KQkq f3 0 3";
FischerRandomGame game = (FischerRandomGame) GameFactory.createFromFen(fen1,
Variant.fischerRandom);
game.makeSanMove("Bxh1");
Assert.assertEquals("rq1bknnr/p1pppppp/1p6/8/5P2/1P4P1/P1PPP2P/RQBBKNNb w Qkq - 0 4", game.toFen());
}
@Test
public void testCastlingBlackKs() {
String fen1 = "rq1bkn1r/p1pppp1p/1p4pn/8/5P2/1P2P1P1/PBPP3P/RQ1BKNNb w Qkq - 2 6";
FischerRandomGame game = (FischerRandomGame) GameFactory.createFromFen(fen1,
Variant.fischerRandom);
game.makeSanMove("Bxh8");
Assert.assertEquals("rq1bkn1B/p1pppp1p/1p4pn/8/5P2/1P2P1P1/P1PP3P/RQ1BKNNb b Qq - 0 6", game.toFen());
}
@Test
public void testCastlingWhiteQs() {
String fen1 = "rqkrnnbb/pppppp1p/6p1/8/8/1P4P1/P1PPPP1P/RQKRNNBB b KQkq - 0 2";
FischerRandomGame game = (FischerRandomGame) GameFactory.createFromFen(fen1,
Variant.fischerRandom);
game.makeSanMove("Bxa1");
Assert.assertEquals("rqkrnnb1/pppppp1p/6p1/8/8/1P4P1/P1PPPP1P/bQKRNNBB w Kkq - 0 3", game.toFen());
}
@Test
public void testCastlingRollback() {
String fen1 = "rqkrnnbb/pppppp1p/6p1/8/8/1P4P1/P1PPPP1P/RQKRNNBB b KQkq - 0 2";
FischerRandomGame game = (FischerRandomGame) GameFactory.createFromFen(fen1,
Variant.fischerRandom);
game.makeSanMove("Bxa1");
game.rollback();
Assert.assertEquals(fen1, game.toFen());
}
@Test
public void testOtherCastling() {
String fen = "rkb1qrnb/ppp1p3/2np4/6pp/4Pp2/2NP3P/PPP2PPB/RK2QRNB w KQkq - 0 9";
FischerRandomGame game = (FischerRandomGame) GameFactory.createFromFen(fen,
Variant.fischerRandom);
game.makeSanMove("O-O-O");
Assert.assertEquals("rkb1qrnb/ppp1p3/2np4/6pp/4Pp2/2NP3P/PPP2PPB/2KRQRNB b kq - 1 9", game.toFen());
game.rollback();
Assert.assertEquals(fen, game.toFen());
fen = "rk2qr1b/1pp1n3/p2p4/2nPp3/2P2pp1/3P4/PP1QNPPB/2K1RR1B b kq - 3 19";
game = (FischerRandomGame) GameFactory.createFromFen(fen,
Variant.fischerRandom);
game.makeSanMove("O-O-O");
Assert.assertEquals("2krqr1b/1pp1n3/p2p4/2nPp3/2P2pp1/3P4/PP1QNPPB/2K1RR1B w - - 4 20", game.toFen());
}
}
|
bsd-3-clause
|
plumer/codana
|
tomcat_files/8.0.22/TestTomcatClassLoader.java
|
3788
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.startup;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URL;
import java.net.URLClassLoader;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.apache.catalina.Context;
import org.apache.catalina.loader.WebappClassLoader;
import org.apache.tomcat.util.buf.ByteChunk;
public class TestTomcatClassLoader extends TomcatBaseTest {
@Test
public void testDefaultClassLoader() throws Exception {
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
Tomcat.addServlet(ctx, "ClassLoaderReport", new ClassLoaderReport(null));
ctx.addServletMapping("/", "ClassLoaderReport");
tomcat.start();
ByteChunk res = getUrl("http://localhost:" + getPort() + "/");
assertEquals("WEBAPP,SYSTEM,OTHER,", res.toString());
}
@Test
public void testNonDefaultClassLoader() throws Exception {
ClassLoader cl = new URLClassLoader(new URL[0],
Thread.currentThread().getContextClassLoader());
Thread.currentThread().setContextClassLoader(cl);
Tomcat tomcat = getTomcatInstance();
tomcat.getServer().setParentClassLoader(cl);
// No file system docBase required
Context ctx = tomcat.addContext("", null);
Tomcat.addServlet(ctx, "ClassLoaderReport", new ClassLoaderReport(cl));
ctx.addServletMapping("/", "ClassLoaderReport");
tomcat.start();
ByteChunk res = getUrl("http://localhost:" + getPort() + "/");
assertEquals("WEBAPP,CUSTOM,SYSTEM,OTHER,", res.toString());
}
private static final class ClassLoaderReport extends HttpServlet {
private static final long serialVersionUID = 1L;
private transient ClassLoader custom;
public ClassLoaderReport(ClassLoader custom) {
this.custom = custom;
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.setContentType("text/plain");
PrintWriter out = resp.getWriter();
ClassLoader system = ClassLoader.getSystemClassLoader();
ClassLoader cl = Thread.currentThread().getContextClassLoader();
while (cl != null) {
if (system == cl) {
out.print("SYSTEM,");
} else if (custom == cl) {
out.print("CUSTOM,");
} else if (cl instanceof WebappClassLoader) {
out.print("WEBAPP,");
} else {
out.print("OTHER,");
}
cl = cl.getParent();
}
}
}
}
|
mit
|
littlefoot32/GithubAndroidSdk
|
src/main/java/com/alorma/github/sdk/bean/dto/response/Gist.java
|
1775
|
package com.alorma.github.sdk.bean.dto.response;
import android.os.Parcel;
import com.google.gson.annotations.SerializedName;
import java.util.List;
import java.util.Map;
public class Gist extends ShaUrl{
@SerializedName("public")
public boolean isPublic;
public String created_at;
public String updated_at;
public int comments;
public List<GistRevision> history;
public Map<String, GistFile> files;
public String description;
@SerializedName("git_pull_url")
public String gitPullUrl;
@SerializedName("git_push_url")
public String gitPushUrl;
@SerializedName("forks_url")
public String forksUrl;
public String id;
public User owner;
public User user;
public Gist() {
}
protected Gist(Parcel in) {
super(in);
created_at = in.readString();
updated_at = in.readString();
comments = in.readInt();
description = in.readString();
gitPullUrl = in.readString();
gitPushUrl = in.readString();
forksUrl = in.readString();
id = in.readString();
owner = in.readParcelable(User.class.getClassLoader());
user = in.readParcelable(User.class.getClassLoader());
}
public static final Creator<Gist> CREATOR = new Creator<Gist>() {
@Override
public Gist createFromParcel(Parcel in) {
return new Gist(in);
}
@Override
public Gist[] newArray(int size) {
return new Gist[size];
}
};
@Override
public void writeToParcel(Parcel dest, int flags) {
super.writeToParcel(dest, flags);
dest.writeString(created_at);
dest.writeString(updated_at);
dest.writeInt(comments);
dest.writeString(description);
dest.writeString(gitPullUrl);
dest.writeString(gitPushUrl);
dest.writeString(forksUrl);
dest.writeString(id);
dest.writeParcelable(owner, flags);
dest.writeParcelable(user, flags);
}
}
|
mit
|
0359xiaodong/eclipse-color-theme
|
com.github.eclipsecolortheme/src/com/github/eclipsecolortheme/mapper/PerlEditorMapper.java
|
1327
|
package com.github.eclipsecolortheme.mapper;
import java.util.Map;
import org.eclipse.core.runtime.preferences.IEclipsePreferences;
import com.github.eclipsecolortheme.ColorThemeMapping;
import com.github.eclipsecolortheme.ColorThemeSetting;
public class PerlEditorMapper extends GenericMapper {
private class Mapping extends ColorThemeMapping {
public Mapping(String pluginKey, String themeKey) {
super(pluginKey, themeKey);
}
@Override
public void putPreferences(IEclipsePreferences preferences,
ColorThemeSetting setting) {
preferences.put(pluginKey, setting.getColor().asRGB());
if (setting.isBoldEnabled() != null)
preferences.putBoolean(pluginKey + "Bold", setting.isBoldEnabled());
}
}
@Override
protected ColorThemeMapping createMapping(String pluginKey, String themeKey) {
return new Mapping(pluginKey, themeKey);
}
@Override
public void map(Map<String, ColorThemeSetting> theme, Map<String, ColorThemeMapping> overrideMappings) {
preferences.putBoolean("AbstractTextEditor.Color.Background.SystemDefault", false);
preferences.putBoolean("AbstractTextEditor.Color.Foreground.SystemDefault", false);
super.map(theme, overrideMappings);
}
}
|
epl-1.0
|
snjeza/che
|
core/che-core-api-model/src/main/java/org/eclipse/che/api/core/model/machine/Command.java
|
1436
|
/*******************************************************************************
* Copyright (c) 2012-2017 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.api.core.model.machine;
import java.util.Map;
/**
* Command that can be used to create {@link Process} in a machine
*
* @author Eugene Voevodin
* @author gazarenkov
*/
public interface Command {
/**
* Returns command name (i.e. 'start tomcat')
* <p>
* The name should be unique per user in one workspace,
* which means that user may create only one command with the same name in the same workspace
*/
String getName();
/**
* Returns command line (i.e. 'mvn clean install') which is going to be executed
* <p>
* Serves as a base for {@link Process} creation.
*/
String getCommandLine();
/**
* Returns command type (i.e. 'maven')
*/
String getType();
/**
* Returns attributes related to this command.
*
* @return command attributes
*/
Map<String, String> getAttributes();
}
|
epl-1.0
|
elucash/eclipse-oxygen
|
org.eclipse.jdt.core/src/org/eclipse/jdt/internal/compiler/codegen/CaseLabel.java
|
2482
|
/*******************************************************************************
* Copyright (c) 2000, 2009 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.compiler.codegen;
public class CaseLabel extends BranchLabel {
public int instructionPosition = POS_NOT_SET;
/**
* CaseLabel constructor comment.
* @param codeStream org.eclipse.jdt.internal.compiler.codegen.CodeStream
*/
public CaseLabel(CodeStream codeStream) {
super(codeStream);
}
/*
* Put down a reference to the array at the location in the codestream.
* #placeInstruction() must be performed prior to any #branch()
*/
void branch() {
if (this.position == POS_NOT_SET) {
addForwardReference(this.codeStream.position);
// Leave 4 bytes free to generate the jump offset afterwards
this.codeStream.position += 4;
this.codeStream.classFileOffset += 4;
} else { //Position is set. Write it!
/*
* Position is set. Write it if it is not a wide branch.
*/
this.codeStream.writeSignedWord(this.position - this.instructionPosition);
}
}
/*
* No support for wide branches yet
*/
void branchWide() {
branch(); // case label branch is already wide
}
public boolean isCaseLabel() {
return true;
}
public boolean isStandardLabel(){
return false;
}
/*
* Put down a reference to the array at the location in the codestream.
*/
public void place() {
if ((this.tagBits & USED) != 0) {
this.position = this.codeStream.getPosition();
} else {
this.position = this.codeStream.position;
}
if (this.instructionPosition != POS_NOT_SET) {
int offset = this.position - this.instructionPosition;
int[] forwardRefs = forwardReferences();
for (int i = 0, length = forwardReferenceCount(); i < length; i++) {
this.codeStream.writeSignedWord(forwardRefs[i], offset);
}
// add the label in the codeStream labels collection
this.codeStream.addLabel(this);
}
}
/*
* Put down a reference to the array at the location in the codestream.
*/
void placeInstruction() {
if (this.instructionPosition == POS_NOT_SET) {
this.instructionPosition = this.codeStream.position;
}
}
}
|
epl-1.0
|
codenvy/che-core
|
ide/che-core-ide-api/src/main/java/org/eclipse/che/ide/api/parts/HasView.java
|
745
|
/*******************************************************************************
* Copyright (c) 2012-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.ide.api.parts;
import org.eclipse.che.ide.api.mvp.View;
/**
* Determines an object containing View.
*
* @author Vitaliy Guliy
*/
public interface HasView<V extends View> {
V getView();
}
|
epl-1.0
|
jdufner/fitnesse
|
src/fit/TypeAdapter.java
|
12240
|
// Modified or written by Object Mentor, Inc. for inclusion with FitNesse.
// Copyright (c) 2002 Cunningham & Cunningham, Inc.
// Released under the terms of the GNU General Public License version 2 or later.
package fit;
// Copyright (c) 2002 Cunningham & Cunningham, Inc.
// Released under the terms of the GNU General Public License version 2 or later.
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.regex.Pattern;
public class TypeAdapter {
public Object target;
public Fixture fixture;
public Field field;
public Method method;
public Class<?> type;
public boolean isRegex;
private static final Map<Class<?>, TypeAdapter> PARSE_DELEGATES = new HashMap<>();
// Factory //////////////////////////////////
public static TypeAdapter on(Fixture target, Class<?> type) {
TypeAdapter a = adapterFor(type);
a.init(target, type);
return a;
}
public static TypeAdapter on(Fixture fixture, Field field) {
TypeAdapter a = on(fixture, field.getType());
a.target = fixture;
a.field = field;
a.field.setAccessible(true);
return a;
}
public static TypeAdapter on(Fixture fixture, Method method) {
return on(fixture, method, false);
}
public static TypeAdapter on(Fixture fixture, Method method, boolean isRegex) {
TypeAdapter a = on(fixture, method.getReturnType());
a.target = fixture;
a.method = method;
a.isRegex = isRegex;
return a;
}
public static TypeAdapter adapterFor(Class<?> type) throws UnsupportedOperationException {
if (type.isPrimitive()) {
if (type.equals(byte.class)) return new ByteAdapter();
if (type.equals(short.class)) return new ShortAdapter();
if (type.equals(int.class)) return new IntAdapter();
if (type.equals(long.class)) return new LongAdapter();
if (type.equals(float.class)) return new FloatAdapter();
if (type.equals(double.class)) return new DoubleAdapter();
if (type.equals(char.class)) return new CharAdapter();
if (type.equals(boolean.class)) return new BooleanAdapter();
throw new UnsupportedOperationException("can't yet adapt " + type);
} else {
Object delegate = PARSE_DELEGATES.get(type);
if (delegate instanceof DelegateClassAdapter)
return (TypeAdapter) ((DelegateClassAdapter) delegate).clone();
if (delegate instanceof DelegateObjectAdapter)
return (TypeAdapter) ((DelegateObjectAdapter) delegate).clone();
if (type.equals(Byte.class)) return new ClassByteAdapter();
if (type.equals(Short.class)) return new ClassShortAdapter();
if (type.equals(Integer.class)) return new ClassIntegerAdapter();
if (type.equals(Long.class)) return new ClassLongAdapter();
if (type.equals(Float.class)) return new ClassFloatAdapter();
if (type.equals(Double.class)) return new ClassDoubleAdapter();
if (type.equals(Character.class)) return new ClassCharacterAdapter();
if (type.equals(Boolean.class)) return new ClassBooleanAdapter();
if (type.isArray()) return new ArrayAdapter();
return new TypeAdapter();
}
}
// Accessors ////////////////////////////////
public void init(Fixture fixture, Class<?> type) {
this.fixture = fixture;
this.type = type;
}
public Object get() throws IllegalAccessException, InvocationTargetException {
if (field != null) {
return field.get(target);
}
if (method != null) {
return invoke();
}
return null;
}
public void set(Object value) throws Exception {
field.set(target, value);
}
public Object invoke() throws IllegalAccessException, InvocationTargetException {
Object[] params = {};
return method.invoke(target, params);
}
public Object parse(String s) throws Exception {
Object obj;
obj = isRegex ? s : fixture.parse(s, type);
return obj;
}
public boolean equals(Object a, Object b) {
boolean isEqual = false;
if (isRegex) {
if (b != null)
isEqual = Pattern.matches(a.toString(), b.toString());
} else {
if (a == null)
isEqual = (b == null);
else
isEqual = a.equals(b);
}
return isEqual;
}
public String toString(Object o) {
if (o == null) {
return "null";
} else if (o instanceof String && ((String) o).equals(""))
return "blank";
else
return o.toString();
}
/*
* Registers a delegate, a class that will handle parsing of other types of values.
*/
public static void registerParseDelegate(Class<?> type, Class<?> parseDelegate) {
try {
PARSE_DELEGATES.put(type, new DelegateClassAdapter(parseDelegate));
} catch (Exception ex) {
throw new RuntimeException("Parse delegate class " + parseDelegate.getName()
+ " does not have a suitable static parse() method.");
}
}
/*
* Registers a delegate object that will handle parsing of other types of values.
*/
public static void registerParseDelegate(Class<?> type, Object parseDelegate) {
try {
PARSE_DELEGATES.put(type, new DelegateObjectAdapter(parseDelegate));
} catch (Exception ex) {
throw new RuntimeException("Parse delegate object of class " + parseDelegate.getClass().getName()
+ " does not have a suitable parse() method.");
}
}
public static void clearDelegatesForNextTest() {
PARSE_DELEGATES.clear();
}
// Subclasses ///////////////////////////////
static class ByteAdapter extends ClassByteAdapter {
@Override
public void set(Object i) throws IllegalAccessException {
field.setByte(target, ((Byte) i).byteValue());
}
}
static class ClassByteAdapter extends TypeAdapter {
@Override
public Object parse(String s) {
return ("null".equals(s)) ? null : new Byte(Byte.parseByte(s));
}
}
static class ShortAdapter extends ClassShortAdapter {
@Override
public void set(Object i) throws IllegalAccessException {
field.setShort(target, ((Short) i).shortValue());
}
}
static class ClassShortAdapter extends TypeAdapter {
@Override
public Object parse(String s) {
return ("null".equals(s)) ? null : new Short(Short.parseShort(s));
}
}
static class IntAdapter extends ClassIntegerAdapter {
@Override
public void set(Object i) throws IllegalAccessException {
field.setInt(target, ((Integer) i).intValue());
}
}
static class ClassIntegerAdapter extends TypeAdapter {
@Override
public Object parse(String s) {
return ("null".equals(s)) ? null : new Integer(Integer.parseInt(s));
}
}
static class LongAdapter extends ClassLongAdapter {
public void set(Long i) throws IllegalAccessException {
field.setLong(target, i.longValue());
}
}
static class ClassLongAdapter extends TypeAdapter {
@Override
public Object parse(String s) {
return ("null".equals(s)) ? null : new Long(Long.parseLong(s));
}
}
static class FloatAdapter extends ClassFloatAdapter {
@Override
public void set(Object i) throws IllegalAccessException {
field.setFloat(target, ((Number) i).floatValue());
}
@Override
public Object parse(String s) {
return ("null".equals(s)) ? null : new Float(Float.parseFloat(s));
}
}
static class ClassFloatAdapter extends TypeAdapter {
@Override
public Object parse(String s) {
return ("null".equals(s)) ? null : new Float(Float.parseFloat(s));
}
}
static class DoubleAdapter extends ClassDoubleAdapter {
@Override
public void set(Object i) throws IllegalAccessException {
field.setDouble(target, ((Number) i).doubleValue());
}
@Override
public Object parse(String s) {
return new Double(Double.parseDouble(s));
}
}
static class ClassDoubleAdapter extends TypeAdapter {
@Override
public Object parse(String s) {
return ("null".equals(s)) ? null : new Double(Double.parseDouble(s));
}
}
static class CharAdapter extends ClassCharacterAdapter {
@Override
public void set(Object i) throws IllegalAccessException {
field.setChar(target, ((Character) i).charValue());
}
}
static class ClassCharacterAdapter extends TypeAdapter {
@Override
public Object parse(String s) {
return ("null".equals(s)) ? null : new Character(s.charAt(0));
}
}
static class BooleanAdapter extends ClassBooleanAdapter {
@Override
public void set(Object i) throws IllegalAccessException {
field.setBoolean(target, ((Boolean) i).booleanValue());
}
}
static class ClassBooleanAdapter extends TypeAdapter {
@Override
public Object parse(String s) {
if ("null".equals(s)) return null;
String ls = s.toLowerCase();
if (ls.equals("true"))
return Boolean.TRUE;
if (ls.equals("yes"))
return Boolean.TRUE;
if (ls.equals("1"))
return Boolean.TRUE;
if (ls.equals("y"))
return Boolean.TRUE;
if (ls.equals("+"))
return Boolean.TRUE;
return Boolean.FALSE;
}
}
static class ArrayAdapter extends TypeAdapter {
Class<?> componentType;
TypeAdapter componentAdapter;
@Override
public void init(Fixture target, Class<?> type) {
super.init(target, type);
componentType = type.getComponentType();
componentAdapter = on(target, componentType);
}
@Override
public Object parse(String s) throws Exception {
StringTokenizer t = new StringTokenizer(s, ",");
Object array = Array.newInstance(componentType, t.countTokens());
for (int i = 0; t.hasMoreTokens(); i++) {
Array.set(array, i, componentAdapter.parse(t.nextToken().trim()));
}
return array;
}
@Override
public String toString(Object o) {
if (o == null)
return "";
int length = Array.getLength(o);
StringBuilder b = new StringBuilder(5 * length);
for (int i = 0; i < length; i++) {
b.append(componentAdapter.toString(Array.get(o, i)));
if (i < (length - 1)) {
b.append(", ");
}
}
return b.toString();
}
@Override
public boolean equals(Object a, Object b) {
int length = Array.getLength(a);
if (length != Array.getLength(b))
return false;
for (int i = 0; i < length; i++) {
if (!componentAdapter.equals(Array.get(a, i), Array.get(b, i)))
return false;
}
return true;
}
}
static class DelegateClassAdapter extends TypeAdapter implements Cloneable {
private Method parseMethod;
public DelegateClassAdapter(Class<?> parseDelegate) throws SecurityException, NoSuchMethodException {
this.parseMethod = parseDelegate.getMethod("parse", new Class[]{String.class});
int modifiers = parseMethod.getModifiers();
if (!Modifier.isStatic(modifiers) || !Modifier.isPublic(modifiers)
|| parseMethod.getReturnType() == Void.class)
throw new NoSuchMethodException();
}
@Override
public Object parse(String s) throws Exception {
return parseMethod.invoke(null, new Object[]
{s});
}
@Override
protected Object clone() {
try {
return super.clone();
} catch (CloneNotSupportedException e) {
return null;
}
}
}
static class DelegateObjectAdapter extends TypeAdapter implements Cloneable {
private Object delegate;
private Method parseMethod;
public DelegateObjectAdapter(Object delegate) throws SecurityException, NoSuchMethodException {
this.delegate = delegate;
this.parseMethod = delegate.getClass().getMethod("parse", new Class[]
{String.class});
}
@Override
public Object parse(String s) throws Exception {
return parseMethod.invoke(delegate, new Object[]
{s});
}
@Override
protected Object clone() {
try {
return super.clone();
} catch (CloneNotSupportedException e) {
return null;
}
}
}
}
|
epl-1.0
|
RallySoftware/eclipselink.runtime
|
dbws/eclipselink.dbws.test/src/dbws/testing/xrdynamicentity/XRDynamicEntityTestSuite.java
|
6291
|
/*******************************************************************************
* Copyright (c) 1998, 2015 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Mike Norman - May 2008, created DBWS test package
******************************************************************************/
package dbws.testing.xrdynamicentity;
//javase imports
import java.util.HashSet;
import java.util.Set;
//JUnit4 imports
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
//EclipseLink imports
import org.eclipse.persistence.dynamic.DynamicEntity;
import org.eclipse.persistence.exceptions.DynamicException;
import org.eclipse.persistence.internal.xr.XRDynamicPropertiesManager;
import org.eclipse.persistence.internal.xr.XRClassWriter;
import org.eclipse.persistence.internal.xr.XRDynamicClassLoader;
import org.eclipse.persistence.internal.xr.XRDynamicEntity;
public class XRDynamicEntityTestSuite {
static final String PACKAGE_PREFIX =
XRDynamicEntityTestSuite.class.getPackage().getName();
static final String TEST_CLASSNAME = PACKAGE_PREFIX + ".TestClass";
static final String FIELD_1 = "field1";
static final String FIELD_2 = "field2";
static final String TEST_STRING = "this is a test";
//test fixtures
static XRDynamicEntity entity1 = null;
@BeforeClass
public static void setUp() throws NoSuchFieldException, IllegalArgumentException,
IllegalAccessException {
Set<String> propertyNames = new HashSet<String>();
propertyNames.add(FIELD_1);
propertyNames.add(FIELD_2);
XRCustomer.DPM.setPropertyNames(propertyNames);
entity1 = new XRCustomer();
}
@Test
public void nullParent() throws Exception {
XRDynamicClassLoader xrdcl = new XRDynamicClassLoader(null);
assertNull(xrdcl.getParent());
}
@Test
public void defaultWriter() throws Exception {
XRDynamicClassLoader xrdcl = new XRDynamicClassLoader(null);
assertEquals(XRClassWriter.class, xrdcl.getDefaultWriter().getClass());
}
@Test(expected=IllegalArgumentException.class)
public void coreClass() throws ClassNotFoundException {
XRDynamicClassLoader xrdcl = new XRDynamicClassLoader(null);
xrdcl.createDynamicClass("java.lang.String");
}
@Test
public void buildTestClass() throws ClassNotFoundException {
//Needs non-null parent ClassLoader for createDynamicClass to work
XRDynamicClassLoader xrdcl =
new XRDynamicClassLoader(XRDynamicEntityTestSuite.class.getClassLoader());
Class<?> testClass = xrdcl.createDynamicClass(TEST_CLASSNAME);
assertEquals("test class wrong name", testClass.getName(), TEST_CLASSNAME);
assertTrue("test class not assignableFrom DynamicEntity",
DynamicEntity.class.isAssignableFrom(testClass));
}
@SuppressWarnings("unchecked")
@Test
public void buildTestEntity() throws InstantiationException, IllegalAccessException,
NoSuchFieldException {
XRDynamicClassLoader xrdcl =
new XRDynamicClassLoader(XRDynamicEntityTestSuite.class.getClassLoader());
Class<XRDynamicEntity> testClass =
(Class<XRDynamicEntity>)xrdcl.createDynamicClass(TEST_CLASSNAME);
XRDynamicEntity newInstance = testClass.newInstance();
XRDynamicPropertiesManager xrDPM = newInstance.fetchPropertiesManager();
Set<String> propertyNames = new HashSet<String>();
propertyNames.add(FIELD_1);
propertyNames.add(FIELD_2);
xrDPM.setPropertyNames(propertyNames);
//build instance
XRDynamicEntity newInstance2 = testClass.newInstance();
assertNotNull(newInstance2);
}
@Test
public void createTestClassTwice() throws Exception {
XRDynamicClassLoader xrdcl =
new XRDynamicClassLoader(XRDynamicEntityTestSuite.class.getClassLoader());
Class<?> dynamicClass = xrdcl.createDynamicClass(TEST_CLASSNAME);
assertNotNull(dynamicClass);
assertEquals(TEST_CLASSNAME, dynamicClass.getName());
Class<?> dynamicClass2 = xrdcl.createDynamicClass(TEST_CLASSNAME);
assertSame(dynamicClass, dynamicClass2);
}
@Test
public void testEntityOps() {
// test #1
Object field = entity1.get(FIELD_1);
assertNull(FIELD_1 + " should be null", field);
assertFalse(FIELD_2 + " shouldn't be set", entity1.isSet(FIELD_2));
// test #2
DynamicEntity e = entity1.set(FIELD_1, TEST_STRING);
assertSame(e, entity1);
e = entity1.set(FIELD_2, Integer.valueOf(17));
assertSame(e, entity1);
// test #3
String test = entity1.<String>get(FIELD_1);
assertEquals(FIELD_1 + " incorrect value", test, TEST_STRING);
Integer i = entity1.<Integer>get(FIELD_2);
assertEquals(FIELD_2 + " incorrect value", i, Integer.valueOf(17));
// test #4
boolean expectedExceptionOccurred = false;
try {
String s = entity1.<String>get("field2");
System.identityHashCode(s);
} catch (ClassCastException cce) {
expectedExceptionOccurred = true;
}
assertTrue("The expected ClassCastException did not occur", expectedExceptionOccurred);
// test #5
expectedExceptionOccurred = false;
try {
entity1.<String>get("field3");
} catch (DynamicException de) {
expectedExceptionOccurred = true;
}
assertTrue("The expected DynamicException did not occur", expectedExceptionOccurred);
}
}
|
epl-1.0
|
OpenLiberty/open-liberty
|
dev/com.ibm.ws.wssecurity_fat.wsscxf.saml/test-applications/samlcxfclient/src/fats/cxf/basic/wssec/SAMLSOAPService5.java
|
2900
|
/*******************************************************************************
* Copyright (c) 2021 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package fats.cxf.basic.wssec;
import java.net.MalformedURLException;
import java.net.URL;
import javax.xml.namespace.QName;
import javax.xml.ws.WebEndpoint;
import javax.xml.ws.WebServiceClient;
import javax.xml.ws.WebServiceFeature;
import javax.xml.ws.Service;
/**
* This class was generated by Apache CXF 2.6.2
* 2015-09-16T17:50:53.062-05:00
* Generated source version: 2.6.2
*
*/
@WebServiceClient(name = "SAMLSOAPService5",
wsdlLocation = "../../samltoken/resources/WEB-INF/SamlTokenWebSvc.wsdl",
targetNamespace = "http://wssec.basic.cxf.fats")
public class SAMLSOAPService5 extends Service {
public final static URL WSDL_LOCATION;
public final static QName SERVICE = new QName("http://wssec.basic.cxf.fats", "SAMLSOAPService5");
public final static QName SAMLSoapPort5 = new QName("http://wssec.basic.cxf.fats", "SAMLSoapPort5");
static {
URL url = SAMLSOAPService5.class.getResource("../../samltoken/resources/WEB-INF/SamlTokenWebSvc.wsdl");
if (url == null) {
java.util.logging.Logger.getLogger(SAMLSOAPService5.class.getName())
.log(java.util.logging.Level.INFO,
"Can not initialize the default wsdl from {0}", "../../samltoken/resources/WEB-INF/SamlTokenWebSvc.wsdl");
}
WSDL_LOCATION = url;
}
public SAMLSOAPService5(URL wsdlLocation) {
super(wsdlLocation, SERVICE);
}
public SAMLSOAPService5(URL wsdlLocation, QName serviceName) {
super(wsdlLocation, serviceName);
}
public SAMLSOAPService5() {
super(WSDL_LOCATION, SERVICE);
}
/**
*
* @return
* returns SamlTokenType
*/
@WebEndpoint(name = "SAMLSoapPort5")
public SamlTokenType getSAMLSoapPort5() {
return super.getPort(SAMLSoapPort5, SamlTokenType.class);
}
/**
*
* @param features
* A list of {@link javax.xml.ws.WebServiceFeature} to configure on the proxy. Supported features not in the <code>features</code> parameter will have their default values.
* @return
* returns SamlTokenType
*/
@WebEndpoint(name = "SAMLSoapPort5")
public SamlTokenType getSAMLSoapPort5(WebServiceFeature... features) {
return super.getPort(SAMLSoapPort5, SamlTokenType.class, features);
}
}
|
epl-1.0
|
RallySoftware/eclipselink.runtime
|
moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/javadoc/xmlelementrefs/ViaLand.java
|
1430
|
/*******************************************************************************
* Copyright (c) 2011, 2015 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Praba Vijayaratnam - 2.3 - initial implementation
******************************************************************************/
package org.eclipse.persistence.testing.jaxb.javadoc.xmlelementrefs;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement (name="land-transport")
public class ViaLand extends TransportType{
@XmlAttribute
public String truckCompany;
public boolean equals(Object obj) {
if (obj == null || !(obj instanceof ViaLand)) {
return false;
}
ViaLand t = (ViaLand) obj;
return t.transportTypeID == this.transportTypeID && t.transportCost == this.transportCost && t.truckCompany.equals(this.truckCompany);
}
}
|
epl-1.0
|
rex-xxx/mt6572_x201
|
tools/build/testapps/libsTest/lib1/src/main/java/com/android/tests/libstest/lib1/MainActivity.java
|
486
|
package com.android.tests.libstest.lib1;
import android.app.Activity;
import android.os.Bundle;
import com.android.tests.libstest.lib2.Lib2;
public class MainActivity extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.lib1_main);
Lib1.handleTextView(this);
Lib2.handleTextView(this);
}
}
|
gpl-2.0
|
md-5/jdk10
|
test/hotspot/jtreg/gc/metaspace/G1AddMetaspaceDependency.java
|
4036
|
/*
* Copyright (c) 2013, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package gc.metaspace;
/*
* @test G1AddMetaspaceDependency
* @bug 8010196
* @requires vm.gc.G1
* @library /
* @summary Checks that we don't get locking problems when adding metaspace dependencies with the G1 update buffer monitor
* @run main/othervm -XX:+UseG1GC -XX:G1UpdateBufferSize=1 gc.metaspace.G1AddMetaspaceDependency
*/
import java.io.InputStream;
public class G1AddMetaspaceDependency {
static byte[] getClassBytes(String name) {
byte[] b = null;
try (InputStream is = ClassLoader.getSystemResourceAsStream(name)) {
byte[] tmp = new byte[is.available()];
is.read(tmp);
b = tmp;
} finally {
if (b == null) {
throw new RuntimeException("Unable to load class file");
}
return b;
}
}
static final String a_name = A.class.getName();
static final String b_name = B.class.getName();
public static void main(String... args) throws Exception {
final byte[] a_bytes = getClassBytes(a_name.replace('.', '/') + ".class");
final byte[] b_bytes = getClassBytes(b_name.replace('.', '/') + ".class");
for (int i = 0; i < 1000; i += 1) {
runTest(a_bytes, b_bytes);
}
}
static class Loader extends ClassLoader {
private final String myClass;
private final byte[] myBytes;
private final String friendClass;
private final ClassLoader friendLoader;
Loader(String myClass, byte[] myBytes,
String friendClass, ClassLoader friendLoader) {
this.myClass = myClass;
this.myBytes = myBytes;
this.friendClass = friendClass;
this.friendLoader = friendLoader;
}
Loader(String myClass, byte[] myBytes) {
this(myClass, myBytes, null, null);
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
Class<?> c = findLoadedClass(name);
if (c != null) {
return c;
}
if (name.equals(friendClass)) {
return friendLoader.loadClass(name);
}
if (name.equals(myClass)) {
c = defineClass(name, myBytes, 0, myBytes.length);
resolveClass(c);
return c;
}
return findSystemClass(name);
}
}
private static void runTest(final byte[] a_bytes, final byte[] b_bytes) throws Exception {
Loader a_loader = new Loader(a_name, a_bytes);
Loader b_loader = new Loader(b_name, b_bytes, a_name, a_loader);
Loader c_loader = new Loader(b_name, b_bytes, a_name, a_loader);
Loader d_loader = new Loader(b_name, b_bytes, a_name, a_loader);
Loader e_loader = new Loader(b_name, b_bytes, a_name, a_loader);
Loader f_loader = new Loader(b_name, b_bytes, a_name, a_loader);
Loader g_loader = new Loader(b_name, b_bytes, a_name, a_loader);
b_loader.loadClass(b_name);
c_loader.loadClass(b_name);
d_loader.loadClass(b_name);
e_loader.loadClass(b_name);
f_loader.loadClass(b_name);
g_loader.loadClass(b_name);
}
public class A {
}
class B extends A {
}
}
|
gpl-2.0
|
eethomas/eucalyptus
|
clc/modules/simpleworkflow-common/src/main/java/com/eucalyptus/simpleworkflow/common/model/RequestCancelExternalWorkflowExecutionFailedCause.java
|
3489
|
/*************************************************************************
* Copyright 2014 Eucalyptus Systems, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*
* Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta
* CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you
* need additional information or have any questions.
*
* This file may incorporate work covered under the following copyright
* and permission notice:
*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
* ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
************************************************************************/
package com.eucalyptus.simpleworkflow.common.model;
/**
* Request Cancel External Workflow Execution Failed Cause
*/
public enum RequestCancelExternalWorkflowExecutionFailedCause {
UNKNOWN_EXTERNAL_WORKFLOW_EXECUTION("UNKNOWN_EXTERNAL_WORKFLOW_EXECUTION"),
REQUEST_CANCEL_EXTERNAL_WORKFLOW_EXECUTION_RATE_EXCEEDED("REQUEST_CANCEL_EXTERNAL_WORKFLOW_EXECUTION_RATE_EXCEEDED"),
OPERATION_NOT_PERMITTED("OPERATION_NOT_PERMITTED");
private String value;
private RequestCancelExternalWorkflowExecutionFailedCause(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return RequestCancelExternalWorkflowExecutionFailedCause corresponding to the value
*/
public static RequestCancelExternalWorkflowExecutionFailedCause fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
} else if ("UNKNOWN_EXTERNAL_WORKFLOW_EXECUTION".equals(value)) {
return RequestCancelExternalWorkflowExecutionFailedCause.UNKNOWN_EXTERNAL_WORKFLOW_EXECUTION;
} else if ("REQUEST_CANCEL_EXTERNAL_WORKFLOW_EXECUTION_RATE_EXCEEDED".equals(value)) {
return RequestCancelExternalWorkflowExecutionFailedCause.REQUEST_CANCEL_EXTERNAL_WORKFLOW_EXECUTION_RATE_EXCEEDED;
} else if ("OPERATION_NOT_PERMITTED".equals(value)) {
return RequestCancelExternalWorkflowExecutionFailedCause.OPERATION_NOT_PERMITTED;
} else {
throw new IllegalArgumentException("Cannot create enum from " + value + " value!");
}
}
}
|
gpl-3.0
|
jtux270/translate
|
ovirt/3.6_source/backend/manager/modules/restapi/jaxrs/src/main/java/org/ovirt/engine/api/restapi/resource/AbstractBackendAssignedTagsResource.java
|
3404
|
package org.ovirt.engine.api.restapi.resource;
import static org.ovirt.engine.api.utils.ReflectionHelper.assignChildModel;
import java.util.List;
import javax.ws.rs.core.Response;
import org.ovirt.engine.api.model.BaseResource;
import org.ovirt.engine.api.model.Tag;
import org.ovirt.engine.api.resource.AssignedTagsResource;
import org.ovirt.engine.core.common.action.AttachEntityToTagParameters;
import org.ovirt.engine.core.common.action.TagsActionParametersBase;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.businessentities.Tags;
import org.ovirt.engine.core.common.queries.IdQueryParameters;
import org.ovirt.engine.core.common.queries.VdcQueryParametersBase;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.compat.Guid;
public abstract class AbstractBackendAssignedTagsResource
extends AbstractBackendCollectionResource<Tag, Tags>
implements AssignedTagsResource {
protected Class<? extends BaseResource> parentType;
protected String parentId;
protected VdcActionType attachAction;
public AbstractBackendAssignedTagsResource(Class<? extends BaseResource> parentType,
String parentId,
VdcActionType attachAction) {
super(Tag.class, Tags.class);
this.parentType = parentType;
this.parentId = parentId;
this.attachAction = attachAction;
}
public String getParentId() {
return parentId;
}
protected abstract List<Tags> getCollection();
private TagsActionParametersBase getAttachParams(String id) {
return new AttachEntityToTagParameters(asGuid(id), asList(asGuid(parentId)));
}
public org.ovirt.engine.api.model.Tags list() {
org.ovirt.engine.api.model.Tags ret = new org.ovirt.engine.api.model.Tags();
for (Tags tag : getCollection()) {
ret.getTags().add(addLinks(populate(map(tag), tag)));
}
return ret;
}
public Response add(Tag tag) {
validateParameters(tag, "id|name");
if (!tag.isSetId()) {
tag = lookupTagByName(tag.getName());
}
return performCreate(attachAction, getAttachParams(tag.getId()), new TagIdResolver(asGuid(tag.getId())));
}
@Override
public Tag addParents(Tag tag) {
assignChildModel(tag, parentType).setId(parentId);
return tag;
}
protected Tag lookupTagByName(String name) {
for (Tags tag : getBackendCollection(Tags.class, VdcQueryType.GetAllTags, new VdcQueryParametersBase())) {
if (tag.gettag_name().equals(name)) {
return map(tag);
}
}
return handleError(new EntityNotFoundException(name), false);
}
public Tags lookupTagById(Guid id) {
return getEntity(Tags.class, VdcQueryType.GetTagByTagId, new IdQueryParameters(id), id.toString(), true);
}
protected class TagIdResolver extends EntityIdResolver<Guid> {
private Guid id;
TagIdResolver(Guid id) {
this.id = id;
}
@Override
public Tags lookupEntity(Guid id) throws BackendFailureException {
assert (id == null); // attach actions return nothing, lookup original id instead
return lookupTagById(this.id);
}
}
}
|
gpl-3.0
|
jtux270/translate
|
ovirt/3.6_source/frontend/webadmin/modules/webadmin/src/main/java/org/ovirt/engine/ui/webadmin/section/main/view/popup/gluster/DetachGlusterHostsPopupView.java
|
3658
|
package org.ovirt.engine.ui.webadmin.section.main.view.popup.gluster;
import org.ovirt.engine.ui.common.idhandler.ElementIdHandler;
import org.ovirt.engine.ui.common.idhandler.WithElementId;
import org.ovirt.engine.ui.common.view.popup.AbstractModelBoundPopupView;
import org.ovirt.engine.ui.common.widget.Align;
import org.ovirt.engine.ui.common.widget.dialog.SimpleDialogPanel;
import org.ovirt.engine.ui.common.widget.editor.EntityModelCellTable;
import org.ovirt.engine.ui.common.widget.editor.generic.EntityModelCheckBoxEditor;
import org.ovirt.engine.ui.common.widget.table.column.AbstractEntityModelTextColumn;
import org.ovirt.engine.ui.uicommonweb.models.ListModel;
import org.ovirt.engine.ui.uicommonweb.models.gluster.DetachGlusterHostsModel;
import org.ovirt.engine.ui.webadmin.ApplicationConstants;
import org.ovirt.engine.ui.webadmin.gin.AssetProvider;
import org.ovirt.engine.ui.webadmin.section.main.presenter.popup.gluster.DetachGlusterHostsPopupPresenterWidget;
import com.google.gwt.core.client.GWT;
import com.google.gwt.editor.client.SimpleBeanEditorDriver;
import com.google.gwt.event.shared.EventBus;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.Label;
import com.google.inject.Inject;
public class DetachGlusterHostsPopupView extends AbstractModelBoundPopupView<DetachGlusterHostsModel> implements DetachGlusterHostsPopupPresenterWidget.ViewDef {
interface Driver extends SimpleBeanEditorDriver<DetachGlusterHostsModel, DetachGlusterHostsPopupView> {
}
interface ViewUiBinder extends UiBinder<SimpleDialogPanel, DetachGlusterHostsPopupView> {
ViewUiBinder uiBinder = GWT.create(ViewUiBinder.class);
}
interface ViewIdHandler extends ElementIdHandler<DetachGlusterHostsPopupView> {
ViewIdHandler idHandler = GWT.create(ViewIdHandler.class);
}
@UiField(provided = true)
@Ignore
@WithElementId
EntityModelCellTable<ListModel> hostsTable;
@UiField(provided = true)
@Path(value = "force.entity")
@WithElementId
EntityModelCheckBoxEditor forceEditor;
@UiField
@Ignore
Label messageLabel;
private final Driver driver = GWT.create(Driver.class);
private final static ApplicationConstants constants = AssetProvider.getConstants();
@Inject
public DetachGlusterHostsPopupView(EventBus eventBus) {
super(eventBus);
hostsTable = new EntityModelCellTable<ListModel>(true, false, true);
forceEditor = new EntityModelCheckBoxEditor(Align.RIGHT);
initWidget(ViewUiBinder.uiBinder.createAndBindUi(this));
ViewIdHandler.idHandler.generateAndSetIds(this);
localize();
initTableColumns();
driver.initialize(this);
}
protected void initTableColumns(){
// Table Entity Columns
hostsTable.addColumn(new AbstractEntityModelTextColumn<String>() {
@Override
public String getText(String hostAddress) {
return hostAddress;
}
}, constants.detachGlusterHostsHostAddress());
}
private void localize() {
forceEditor.setLabel(constants.detachGlusterHostsForcefully());
}
@Override
public void edit(DetachGlusterHostsModel object) {
hostsTable.asEditor().edit(object.getHosts());
driver.edit(object);
}
@Override
public void setMessage(String message) {
super.setMessage(message);
messageLabel.setText(message);
}
@Override
public DetachGlusterHostsModel flush() {
hostsTable.flush();
return driver.flush();
}
}
|
gpl-3.0
|
cascheberg/Signal-Android
|
app/src/main/java/org/thoughtcrime/securesms/contacts/SelectedContactSet.java
|
1426
|
package org.thoughtcrime.securesms.contacts;
import androidx.annotation.NonNull;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
/**
* Specialised set for {@link SelectedContact} that will not allow more than one entry that
* {@link SelectedContact#matches(SelectedContact)} any other.
*/
public final class SelectedContactSet {
private final List<SelectedContact> contacts = new LinkedList<>();
public boolean add(@NonNull SelectedContact contact) {
if (contains(contact)) {
return false;
}
contacts.add(contact);
return true;
}
public boolean contains(@NonNull SelectedContact otherContact) {
for (SelectedContact contact : contacts) {
if (otherContact.matches(contact)) {
return true;
}
}
return false;
}
public List<SelectedContact> getContacts() {
return new ArrayList<>(contacts);
}
public int size() {
return contacts.size();
}
public void clear() {
contacts.clear();
}
public int remove(@NonNull SelectedContact otherContact) {
int removeCount = 0;
Iterator<SelectedContact> iterator = contacts.iterator();
while (iterator.hasNext()) {
SelectedContact next = iterator.next();
if (next.matches(otherContact)) {
iterator.remove();
removeCount++;
}
}
return removeCount;
}
}
|
gpl-3.0
|
CURocketry/Ground_Station_GUI
|
src/org/openstreetmap/josm/gui/IconToggleButton.java
|
5407
|
// License: GPL. For details, see LICENSE file.
package org.openstreetmap.josm.gui;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.Action;
import javax.swing.Icon;
import javax.swing.JToggleButton;
import org.openstreetmap.josm.Main;
import org.openstreetmap.josm.actions.ExpertToggleAction;
import org.openstreetmap.josm.actions.ExpertToggleAction.ExpertModeChangeListener;
import org.openstreetmap.josm.tools.Destroyable;
/**
* Just a toggle button, with smaller border and icon only to display in
* MapFrame toolbars.
* Also provides methods for storing hidden state in preferences
* @author imi, akks
*/
public class IconToggleButton extends JToggleButton implements HideableButton, PropertyChangeListener, Destroyable, ExpertModeChangeListener {
public boolean groupbutton;
private ShowHideButtonListener listener;
private boolean hideIfDisabled = false;
private boolean isExpert;
/**
* Construct the toggle button with the given action.
*/
public IconToggleButton(Action action) {
this(action, false);
}
/**
* Construct the toggle button with the given action.
*/
public IconToggleButton(Action action, boolean isExpert) {
super(action);
this.isExpert = isExpert;
setText(null);
Object o = action.getValue(Action.SHORT_DESCRIPTION);
if (o != null) {
setToolTipText(o.toString());
}
action.addPropertyChangeListener(this);
addMouseListener(new MouseAdapter(){
@Override public void mousePressed(MouseEvent e) {
groupbutton = e.getX() > getWidth()/2 && e.getY() > getHeight()/2;
}
});
ExpertToggleAction.addExpertModeChangeListener(this);
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getPropertyName().equals("active")) {
setSelected((Boolean)evt.getNewValue());
requestFocusInWindow();
} else if (evt.getPropertyName().equals("selected")) {
setSelected((Boolean)evt.getNewValue());
}
}
@Override
public void destroy() {
Action action = getAction();
if (action instanceof Destroyable) {
((Destroyable) action).destroy();
}
if (action != null) {
action.removePropertyChangeListener(this);
}
}
String getPreferenceKey() {
String s = (String) getSafeActionValue("toolbar");
if (s == null) {
if (getAction()!=null) {
s = getAction().getClass().getName();
}
}
return "sidetoolbar.hidden."+s;
}
@Override
public void expertChanged(boolean isExpert) {
applyButtonHiddenPreferences();
}
@Override
public void applyButtonHiddenPreferences() {
boolean alwaysHideDisabled = Main.pref.getBoolean("sidetoolbar.hideDisabledButtons", false);
if (!isEnabled() && (hideIfDisabled || alwaysHideDisabled)) {
setVisible(false); // hide because of disabled button
} else {
boolean hiddenFlag = false;
String hiddenFlagStr = Main.pref.get(getPreferenceKey(), null);
if (hiddenFlagStr == null) {
if (isExpert && !ExpertToggleAction.isExpert()) {
hiddenFlag = true;
}
} else {
hiddenFlag = Boolean.parseBoolean(hiddenFlagStr);
}
setVisible( !hiddenFlag ); // show or hide, do what preferences say
}
}
@Override
public void setButtonHidden(boolean b) {
setVisible(!b);
if (listener!=null) { // if someone wants to know about changes of visibility
if (!b) listener.buttonShown(); else listener.buttonHidden();
}
if ((b && isExpert && !ExpertToggleAction.isExpert()) ||
(!b && isExpert && ExpertToggleAction.isExpert())) {
Main.pref.put(getPreferenceKey(), null);
} else {
Main.pref.put(getPreferenceKey(), b);
}
}
/*
* This fuction should be called for plugins that want to enable auto-hiding
* custom buttons when they are disabled (because of incorrect layer, for example)
*/
public void setAutoHideDisabledButton(boolean b) {
hideIfDisabled = b;
if (b && !isEnabled()) {
setVisible(false);
}
}
@Override
public void showButton() {
setButtonHidden(false);
}
@Override
public void hideButton() {
setButtonHidden(true);
}
@Override
public String getActionName() {
return (String) getSafeActionValue(Action.NAME);
}
@Override
public Icon getIcon() {
return (Icon) getSafeActionValue(Action.SMALL_ICON);
}
@Override
public boolean isButtonVisible() {
return isVisible();
}
@Override
public void setShowHideButtonListener(ShowHideButtonListener l) {
listener = l;
}
protected final Object getSafeActionValue(String key) {
// Mac OS X Aqua L&F can call accessors from constructor, so getAction() can be null in those cases
return getAction() != null ? getAction().getValue(key) : null;
}
}
|
gpl-3.0
|
madhumita-git/Excitement-Open-Platform
|
transformations/src/main/java/eu/excitementproject/eop/transformations/operations/finders/RulesByBagOfRulesRuleBaseFinder.java
|
8911
|
package eu.excitementproject.eop.transformations.operations.finders;
import java.util.LinkedHashSet;
import java.util.Set;
import org.apache.log4j.Logger;
import eu.excitementproject.eop.common.component.syntacticknowledge.RuleWithConfidenceAndDescription;
import eu.excitementproject.eop.common.datastructures.BidirectionalMap;
import eu.excitementproject.eop.common.datastructures.FlippedBidirectionalMap;
import eu.excitementproject.eop.common.representation.parse.representation.basic.Info;
import eu.excitementproject.eop.common.representation.parse.tree.TreeAndParentMap;
import eu.excitementproject.eop.common.representation.parse.tree.TreeIterator;
import eu.excitementproject.eop.common.representation.parse.tree.dependency.basic.BasicNode;
import eu.excitementproject.eop.common.representation.parse.tree.match.AllEmbeddedMatcher;
import eu.excitementproject.eop.common.representation.parse.tree.match.MatcherException;
import eu.excitementproject.eop.common.utilities.Cache;
import eu.excitementproject.eop.common.utilities.CacheFactory;
import eu.excitementproject.eop.transformations.operations.OperationException;
import eu.excitementproject.eop.transformations.operations.finders.auxiliary.AllowedRootsByAffectedNodesUtility;
import eu.excitementproject.eop.transformations.operations.finders.auxiliary.LemmaAndSimplerCanonicalPos;
import eu.excitementproject.eop.transformations.operations.finders.auxiliary.ParseTreeCharacteristics;
import eu.excitementproject.eop.transformations.operations.finders.auxiliary.ParseTreeCharacteristicsCollector;
import eu.excitementproject.eop.transformations.operations.finders.auxiliary.PosRelPos;
import eu.excitementproject.eop.transformations.operations.finders.auxiliary.SingleItemBidirectionalMap;
import eu.excitementproject.eop.transformations.operations.rules.BagOfRulesRuleBase;
import eu.excitementproject.eop.transformations.operations.rules.RuleBaseException;
import eu.excitementproject.eop.transformations.operations.specifications.RuleSpecification;
import eu.excitementproject.eop.transformations.representation.ExtendedInfo;
import eu.excitementproject.eop.transformations.representation.ExtendedMatchCriteria;
import eu.excitementproject.eop.transformations.representation.ExtendedNode;
import static eu.excitementproject.eop.transformations.utilities.Constants.CACHE_SIZE_BAG_OF_RULES;
/**
* This {@link Finder} returns a set of {@link RuleSpecification}s, based on the
* given text tree and a rule base. The rule-base is given as a set of rules,
* implemented as {@link BagOfRulesRuleBase}.
* <P>
* Note that {@link BagOfRulesRuleBase} and this finder are not very efficient,
* since finding a matching rules is done by trying to find a match for each rule,
* with no prior rule filtering.
*
* @author Asher Stern
* @since Feb 24, 2011
*
*/
public class RulesByBagOfRulesRuleBaseFinder implements Finder<RuleSpecification>
{
public RulesByBagOfRulesRuleBaseFinder(TreeAndParentMap<ExtendedInfo, ExtendedNode> textTree,
BagOfRulesRuleBase<Info, BasicNode> ruleBase, String ruleBaseName)
{
super();
this.textTree = textTree;
this.ruleBase = ruleBase;
this.ruleBaseName = ruleBaseName;
}
@Override
public void optionallyOptimizeRuntimeByAffectedNodes(Set<ExtendedNode> affectedNodes) throws OperationException
{
this.affectedNodes = affectedNodes;
}
@Override
public void find() throws OperationException
{
try
{
matchCriteria = new ExtendedMatchCriteria();
extractGivenTreeCharacteristics();
Set<ExtendedNode> allowedRoots = null;
if (affectedNodes!=null) {allowedRoots = AllowedRootsByAffectedNodesUtility.findAllowedRootsByAffectedNodes(textTree, affectedNodes);}
specs = new LinkedHashSet<RuleSpecification>();
debug_numberOfFilteredRules=0;
for (RuleWithConfidenceAndDescription<Info, BasicNode> rule : ruleBase.getRules())
{
if (mightMatch(rule))
{
if (!(rule.getRule().getLeftHandSide().hasChildren()))
{
findForSingleNodeRule(rule);
}
else
{
AllEmbeddedMatcher<ExtendedInfo, Info, ExtendedNode, BasicNode> matcher =
new AllEmbeddedMatcher<ExtendedInfo, Info, ExtendedNode, BasicNode>(matchCriteria);
if (allowedRoots!=null)
{
matcher.setAllowedRoots(allowedRoots);
}
matcher.setTrees(this.textTree.getTree(), rule.getRule().getLeftHandSide());
matcher.findMatches();
Set<BidirectionalMap<ExtendedNode, BasicNode>> matches = matcher.getMatches();
for (BidirectionalMap<ExtendedNode, BasicNode> singleLhsMatch : matches)
{
BidirectionalMap<BasicNode, ExtendedNode> mapLhsToTree = new FlippedBidirectionalMap<BasicNode, ExtendedNode>(singleLhsMatch);
boolean introduction = false;
if (rule.getRule().isExtraction()!=null)
{
introduction = rule.getRule().isExtraction().booleanValue();
}
specs.add(new RuleSpecification(this.ruleBaseName,rule,mapLhsToTree,introduction));
}
}
}
}
if (logger.isDebugEnabled())
{
logger.debug("Number of filtered rules: "+debug_numberOfFilteredRules+" out of "+ruleBase.getRules().size()+" total rules.");
}
}
catch(MatcherException e)
{
throw new OperationException("Matcher failed. See nested exception.",e);
}
catch(RuleBaseException e)
{
throw new OperationException("RuleBase failure. See nested exception.",e);
}
}
@Override
public Set<RuleSpecification> getSpecs() throws OperationException
{
if (specs==null)
throw new OperationException("You did not call find()");
return this.specs;
}
private void findForSingleNodeRule(RuleWithConfidenceAndDescription<Info, BasicNode> rule)
{
BasicNode lhs = rule.getRule().getLeftHandSide();
for (ExtendedNode node : TreeIterator.iterableTree(textTree.getTree()))
{
if (matchCriteria.nodesMatch(node, lhs))
{
specs.add(new RuleSpecification(this.ruleBaseName,rule,
new SingleItemBidirectionalMap<BasicNode, ExtendedNode>(lhs, node)
,false));
}
}
}
private ParseTreeCharacteristics<Info, BasicNode> getCharacteristicsOfRule(RuleWithConfidenceAndDescription<Info, BasicNode> rule)
{
ParseTreeCharacteristics<Info, BasicNode> ret = null;
BasicNode lhs = rule.getRule().getLeftHandSide();
if (ruleBaseCharacteristicsCache.containsKey(lhs))
{
ret = ruleBaseCharacteristicsCache.get(lhs);
}
else
{
ParseTreeCharacteristicsCollector<Info, BasicNode> collector = new ParseTreeCharacteristicsCollector<Info, BasicNode>(lhs);
collector.extract();
ret = new ParseTreeCharacteristics<Info, BasicNode>(collector.getPosRelPosSet(),collector.getLemmaAndPosSet());
ruleBaseCharacteristicsCache.put(lhs, ret);
}
return ret;
}
private void extractGivenTreeCharacteristics()
{
ParseTreeCharacteristicsCollector<ExtendedInfo,ExtendedNode> collector = new ParseTreeCharacteristicsCollector<ExtendedInfo,ExtendedNode>(textTree.getTree());
collector.extract();
posRelPosTree = collector.getPosRelPosSet();
lemmaAndPosTree = collector.getLemmaAndPosSet();
if (logger.isDebugEnabled())
{
logger.debug("Given tree characteristics:");
logger.debug("posRelPosTree = "+printSet(posRelPosTree));
logger.debug("lemmaAndPosTree = "+printSet(lemmaAndPosTree));
}
}
private boolean mightMatch(RuleWithConfidenceAndDescription<Info, BasicNode> rule)
{
boolean ret = false;
ParseTreeCharacteristics<Info, BasicNode> ruleCharacteristics = getCharacteristicsOfRule(rule);
if (posRelPosTree.containsAll(ruleCharacteristics.getPosRelPosSet()))
{
if (lemmaAndPosTree.containsAll(ruleCharacteristics.getLemmaAndPosSet()))
{
ret = true;
}
}
if (!ret) {++debug_numberOfFilteredRules;}
return ret;
}
private static <T> String printSet(Set<T> set)
{
StringBuilder sb = new StringBuilder();
sb.append("{ ");
boolean firstIteration = true;
for (T t : set)
{
if (firstIteration) {firstIteration=false;}
else {sb.append(",");}
sb.append(t.toString());
}
sb.append(" }");
return sb.toString();
}
// input
private final TreeAndParentMap<ExtendedInfo, ExtendedNode> textTree;
private final BagOfRulesRuleBase<Info, BasicNode> ruleBase;
private final String ruleBaseName;
private Set<ExtendedNode> affectedNodes = null;
// internals
private ExtendedMatchCriteria matchCriteria = null;
private Set<PosRelPos> posRelPosTree;
private Set<LemmaAndSimplerCanonicalPos> lemmaAndPosTree;
private Cache<BasicNode, ParseTreeCharacteristics<Info, BasicNode>> ruleBaseCharacteristicsCache = new CacheFactory<BasicNode, ParseTreeCharacteristics<Info, BasicNode>>().getCache(CACHE_SIZE_BAG_OF_RULES);
private int debug_numberOfFilteredRules = 0;
// output
private Set<RuleSpecification> specs = null;
private static final Logger logger = Logger.getLogger(RulesByBagOfRulesRuleBaseFinder.class);
}
|
gpl-3.0
|
DISID/disid-proofs
|
spring-boot-soap/server/contract-last/src/main/java/com/springsource/petclinic/format/VisitFormatter.java
|
402
|
package com.springsource.petclinic.format;
import com.springsource.petclinic.domain.Visit;
import com.springsource.petclinic.service.api.VisitService;
import org.springframework.core.convert.ConversionService;
import org.springframework.roo.addon.web.mvc.controller.annotations.formatters.RooFormatter;
@RooFormatter(entity = Visit.class, service = VisitService.class)
public class VisitFormatter {
}
|
gpl-3.0
|
mcomella/FirefoxAccounts-android
|
thirdparty/src/main/java/ch/boye/httpclientandroidlib/protocol/BasicHttpProcessor.java
|
8465
|
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package ch.boye.httpclientandroidlib.protocol;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import ch.boye.httpclientandroidlib.HttpException;
import ch.boye.httpclientandroidlib.HttpRequest;
import ch.boye.httpclientandroidlib.HttpRequestInterceptor;
import ch.boye.httpclientandroidlib.HttpResponse;
import ch.boye.httpclientandroidlib.HttpResponseInterceptor;
import ch.boye.httpclientandroidlib.annotation.NotThreadSafe;
import ch.boye.httpclientandroidlib.util.Args;
/**
* Default implementation of {@link HttpProcessor}.
* <p>
* Please note access to the internal structures of this class is not
* synchronized and therefore this class may be thread-unsafe.
*
* @since 4.0
*
* @deprecated (4.3)
*/
@NotThreadSafe
@Deprecated
public final class BasicHttpProcessor implements
HttpProcessor, HttpRequestInterceptorList, HttpResponseInterceptorList, Cloneable {
// Don't allow direct access, as nulls are not allowed
protected final List<HttpRequestInterceptor> requestInterceptors = new ArrayList<HttpRequestInterceptor>();
protected final List<HttpResponseInterceptor> responseInterceptors = new ArrayList<HttpResponseInterceptor>();
public void addRequestInterceptor(final HttpRequestInterceptor itcp) {
if (itcp == null) {
return;
}
this.requestInterceptors.add(itcp);
}
public void addRequestInterceptor(
final HttpRequestInterceptor itcp, final int index) {
if (itcp == null) {
return;
}
this.requestInterceptors.add(index, itcp);
}
public void addResponseInterceptor(
final HttpResponseInterceptor itcp, final int index) {
if (itcp == null) {
return;
}
this.responseInterceptors.add(index, itcp);
}
public void removeRequestInterceptorByClass(final Class<? extends HttpRequestInterceptor> clazz) {
for (final Iterator<HttpRequestInterceptor> it = this.requestInterceptors.iterator();
it.hasNext(); ) {
final Object request = it.next();
if (request.getClass().equals(clazz)) {
it.remove();
}
}
}
public void removeResponseInterceptorByClass(final Class<? extends HttpResponseInterceptor> clazz) {
for (final Iterator<HttpResponseInterceptor> it = this.responseInterceptors.iterator();
it.hasNext(); ) {
final Object request = it.next();
if (request.getClass().equals(clazz)) {
it.remove();
}
}
}
public final void addInterceptor(final HttpRequestInterceptor interceptor) {
addRequestInterceptor(interceptor);
}
public final void addInterceptor(final HttpRequestInterceptor interceptor, final int index) {
addRequestInterceptor(interceptor, index);
}
public int getRequestInterceptorCount() {
return this.requestInterceptors.size();
}
public HttpRequestInterceptor getRequestInterceptor(final int index) {
if ((index < 0) || (index >= this.requestInterceptors.size())) {
return null;
}
return this.requestInterceptors.get(index);
}
public void clearRequestInterceptors() {
this.requestInterceptors.clear();
}
public void addResponseInterceptor(final HttpResponseInterceptor itcp) {
if (itcp == null) {
return;
}
this.responseInterceptors.add(itcp);
}
public final void addInterceptor(final HttpResponseInterceptor interceptor) {
addResponseInterceptor(interceptor);
}
public final void addInterceptor(final HttpResponseInterceptor interceptor, final int index) {
addResponseInterceptor(interceptor, index);
}
public int getResponseInterceptorCount() {
return this.responseInterceptors.size();
}
public HttpResponseInterceptor getResponseInterceptor(final int index) {
if ((index < 0) || (index >= this.responseInterceptors.size())) {
return null;
}
return this.responseInterceptors.get(index);
}
public void clearResponseInterceptors() {
this.responseInterceptors.clear();
}
/**
* Sets the interceptor lists.
* First, both interceptor lists maintained by this processor
* will be cleared.
* Subsequently,
* elements of the argument list that are request interceptors will be
* added to the request interceptor list.
* Elements that are response interceptors will be
* added to the response interceptor list.
* Elements that are both request and response interceptor will be
* added to both lists.
* Elements that are neither request nor response interceptor
* will be ignored.
*
* @param list the list of request and response interceptors
* from which to initialize
*/
public void setInterceptors(final List<?> list) {
Args.notNull(list, "Inteceptor list");
this.requestInterceptors.clear();
this.responseInterceptors.clear();
for (final Object obj : list) {
if (obj instanceof HttpRequestInterceptor) {
addInterceptor((HttpRequestInterceptor) obj);
}
if (obj instanceof HttpResponseInterceptor) {
addInterceptor((HttpResponseInterceptor) obj);
}
}
}
/**
* Clears both interceptor lists maintained by this processor.
*/
public void clearInterceptors() {
clearRequestInterceptors();
clearResponseInterceptors();
}
public void process(
final HttpRequest request,
final HttpContext context)
throws IOException, HttpException {
for (final HttpRequestInterceptor interceptor : this.requestInterceptors) {
interceptor.process(request, context);
}
}
public void process(
final HttpResponse response,
final HttpContext context)
throws IOException, HttpException {
for (final HttpResponseInterceptor interceptor : this.responseInterceptors) {
interceptor.process(response, context);
}
}
/**
* Sets up the target to have the same list of interceptors
* as the current instance.
*
* @param target object to be initialised
*/
protected void copyInterceptors(final BasicHttpProcessor target) {
target.requestInterceptors.clear();
target.requestInterceptors.addAll(this.requestInterceptors);
target.responseInterceptors.clear();
target.responseInterceptors.addAll(this.responseInterceptors);
}
/**
* Creates a copy of this instance
*
* @return new instance of the BasicHttpProcessor
*/
public BasicHttpProcessor copy() {
final BasicHttpProcessor clone = new BasicHttpProcessor();
copyInterceptors(clone);
return clone;
}
@Override
public Object clone() throws CloneNotSupportedException {
final BasicHttpProcessor clone = (BasicHttpProcessor) super.clone();
copyInterceptors(clone);
return clone;
}
}
|
mpl-2.0
|
open-health-hub/openMAXIMS
|
openmaxims_workspace/ValueObjects/src/ims/icp/vo/OutpatientEpisodeWithICPInfoVo.java
|
31151
|
//#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.icp.vo;
/**
* Linked to Scheduling.Booking_Appointment business object (ID: 1055100007).
*/
public class OutpatientEpisodeWithICPInfoVo extends ims.scheduling.vo.Booking_AppointmentRefVo implements ims.vo.ImsCloneable, Comparable
{
private static final long serialVersionUID = 1L;
public OutpatientEpisodeWithICPInfoVo()
{
}
public OutpatientEpisodeWithICPInfoVo(Integer id, int version)
{
super(id, version);
}
public OutpatientEpisodeWithICPInfoVo(ims.icp.vo.beans.OutpatientEpisodeWithICPInfoVoBean bean)
{
this.id = bean.getId();
this.version = bean.getVersion();
this.patient = bean.getPatient() == null ? null : bean.getPatient().buildVo();
this.appointmenttime = bean.getAppointmentTime() == null ? null : bean.getAppointmentTime().buildTime();
this.session = bean.getSession() == null ? null : bean.getSession().buildVo();
this.icpinfo = bean.getICPInfo() == null ? null : bean.getICPInfo().buildVo();
this.apptstatus = bean.getApptStatus() == null ? null : ims.scheduling.vo.lookups.Status_Reason.buildLookup(bean.getApptStatus());
this.appointmentdate = bean.getAppointmentDate() == null ? null : bean.getAppointmentDate().buildDate();
this.currentappttrakingstatus = bean.getCurrentApptTrakingStatus() == null ? null : bean.getCurrentApptTrakingStatus().buildVo();
this.activity = bean.getActivity() == null ? null : bean.getActivity().buildVo();
this.appointmenttrackingstatus = bean.getAppointmentTrackingStatus() == null ? null : bean.getAppointmentTrackingStatus().buildVo();
this.referral = bean.getReferral() == null ? null : bean.getReferral().buildVo();
this.haselectivelist = bean.getHasElectiveList();
this.outcome = bean.getOutcome() == null ? null : ims.scheduling.vo.lookups.ApptOutcome.buildLookup(bean.getOutcome());
this.outcomeactions = ims.scheduling.vo.AppointmentOutcomeActionVoCollection.buildFromBeanCollection(bean.getOutcomeActions());
this.casenotespulled = bean.getCaseNotesPulled();
this.wasselected = bean.getWasSelected();
}
public void populate(ims.vo.ValueObjectBeanMap map, ims.icp.vo.beans.OutpatientEpisodeWithICPInfoVoBean bean)
{
this.id = bean.getId();
this.version = bean.getVersion();
this.patient = bean.getPatient() == null ? null : bean.getPatient().buildVo(map);
this.appointmenttime = bean.getAppointmentTime() == null ? null : bean.getAppointmentTime().buildTime();
this.session = bean.getSession() == null ? null : bean.getSession().buildVo(map);
this.icpinfo = bean.getICPInfo() == null ? null : bean.getICPInfo().buildVo(map);
this.apptstatus = bean.getApptStatus() == null ? null : ims.scheduling.vo.lookups.Status_Reason.buildLookup(bean.getApptStatus());
this.appointmentdate = bean.getAppointmentDate() == null ? null : bean.getAppointmentDate().buildDate();
this.currentappttrakingstatus = bean.getCurrentApptTrakingStatus() == null ? null : bean.getCurrentApptTrakingStatus().buildVo(map);
this.activity = bean.getActivity() == null ? null : bean.getActivity().buildVo(map);
this.appointmenttrackingstatus = bean.getAppointmentTrackingStatus() == null ? null : bean.getAppointmentTrackingStatus().buildVo(map);
this.referral = bean.getReferral() == null ? null : bean.getReferral().buildVo(map);
this.haselectivelist = bean.getHasElectiveList();
this.outcome = bean.getOutcome() == null ? null : ims.scheduling.vo.lookups.ApptOutcome.buildLookup(bean.getOutcome());
this.outcomeactions = ims.scheduling.vo.AppointmentOutcomeActionVoCollection.buildFromBeanCollection(bean.getOutcomeActions());
this.casenotespulled = bean.getCaseNotesPulled();
this.wasselected = bean.getWasSelected();
}
public ims.vo.ValueObjectBean getBean()
{
return this.getBean(new ims.vo.ValueObjectBeanMap());
}
public ims.vo.ValueObjectBean getBean(ims.vo.ValueObjectBeanMap map)
{
ims.icp.vo.beans.OutpatientEpisodeWithICPInfoVoBean bean = null;
if(map != null)
bean = (ims.icp.vo.beans.OutpatientEpisodeWithICPInfoVoBean)map.getValueObjectBean(this);
if (bean == null)
{
bean = new ims.icp.vo.beans.OutpatientEpisodeWithICPInfoVoBean();
map.addValueObjectBean(this, bean);
bean.populate(map, this);
}
return bean;
}
public Object getFieldValueByFieldName(String fieldName)
{
if(fieldName == null)
throw new ims.framework.exceptions.CodingRuntimeException("Invalid field name");
fieldName = fieldName.toUpperCase();
if(fieldName.equals("PATIENT"))
return getPatient();
if(fieldName.equals("APPOINTMENTTIME"))
return getAppointmentTime();
if(fieldName.equals("SESSION"))
return getSession();
if(fieldName.equals("ICPINFO"))
return getICPInfo();
if(fieldName.equals("APPTSTATUS"))
return getApptStatus();
if(fieldName.equals("APPOINTMENTDATE"))
return getAppointmentDate();
if(fieldName.equals("CURRENTAPPTTRAKINGSTATUS"))
return getCurrentApptTrakingStatus();
if(fieldName.equals("ACTIVITY"))
return getActivity();
if(fieldName.equals("APPOINTMENTTRACKINGSTATUS"))
return getAppointmentTrackingStatus();
if(fieldName.equals("REFERRAL"))
return getReferral();
if(fieldName.equals("HASELECTIVELIST"))
return getHasElectiveList();
if(fieldName.equals("OUTCOME"))
return getOutcome();
if(fieldName.equals("OUTCOMEACTIONS"))
return getOutcomeActions();
if(fieldName.equals("CASENOTESPULLED"))
return getCaseNotesPulled();
if(fieldName.equals("WASSELECTED"))
return getWasSelected();
return super.getFieldValueByFieldName(fieldName);
}
public boolean getPatientIsNotNull()
{
return this.patient != null;
}
public ims.core.vo.PatientShort getPatient()
{
return this.patient;
}
public void setPatient(ims.core.vo.PatientShort value)
{
this.isValidated = false;
this.patient = value;
}
public boolean getAppointmentTimeIsNotNull()
{
return this.appointmenttime != null;
}
public ims.framework.utils.Time getAppointmentTime()
{
return this.appointmenttime;
}
public void setAppointmentTime(ims.framework.utils.Time value)
{
this.isValidated = false;
this.appointmenttime = value;
}
public boolean getSessionIsNotNull()
{
return this.session != null;
}
public ims.scheduling.vo.SessionLiteWithListownerVo getSession()
{
return this.session;
}
public void setSession(ims.scheduling.vo.SessionLiteWithListownerVo value)
{
this.isValidated = false;
this.session = value;
}
public boolean getICPInfoIsNotNull()
{
return this.icpinfo != null;
}
public ims.icp.vo.PatientICPLiteVo getICPInfo()
{
return this.icpinfo;
}
public void setICPInfo(ims.icp.vo.PatientICPLiteVo value)
{
this.isValidated = false;
this.icpinfo = value;
}
public boolean getApptStatusIsNotNull()
{
return this.apptstatus != null;
}
public ims.scheduling.vo.lookups.Status_Reason getApptStatus()
{
return this.apptstatus;
}
public void setApptStatus(ims.scheduling.vo.lookups.Status_Reason value)
{
this.isValidated = false;
this.apptstatus = value;
}
public boolean getAppointmentDateIsNotNull()
{
return this.appointmentdate != null;
}
public ims.framework.utils.Date getAppointmentDate()
{
return this.appointmentdate;
}
public void setAppointmentDate(ims.framework.utils.Date value)
{
this.isValidated = false;
this.appointmentdate = value;
}
public boolean getCurrentApptTrakingStatusIsNotNull()
{
return this.currentappttrakingstatus != null;
}
public ims.scheduling.vo.Appt_Tracking_Status_HistoryVo getCurrentApptTrakingStatus()
{
return this.currentappttrakingstatus;
}
public void setCurrentApptTrakingStatus(ims.scheduling.vo.Appt_Tracking_Status_HistoryVo value)
{
this.isValidated = false;
this.currentappttrakingstatus = value;
}
public boolean getActivityIsNotNull()
{
return this.activity != null;
}
public ims.core.vo.ActivityLiteVo getActivity()
{
return this.activity;
}
public void setActivity(ims.core.vo.ActivityLiteVo value)
{
this.isValidated = false;
this.activity = value;
}
public boolean getAppointmentTrackingStatusIsNotNull()
{
return this.appointmenttrackingstatus != null;
}
public ims.admin.vo.AppointmentTrackingstatusColourConfigVo getAppointmentTrackingStatus()
{
return this.appointmenttrackingstatus;
}
public void setAppointmentTrackingStatus(ims.admin.vo.AppointmentTrackingstatusColourConfigVo value)
{
this.isValidated = false;
this.appointmenttrackingstatus = value;
}
public boolean getReferralIsNotNull()
{
return this.referral != null;
}
public ims.RefMan.vo.CatsReferralBreachDatesVo getReferral()
{
return this.referral;
}
public void setReferral(ims.RefMan.vo.CatsReferralBreachDatesVo value)
{
this.isValidated = false;
this.referral = value;
}
public boolean getHasElectiveListIsNotNull()
{
return this.haselectivelist != null;
}
public Boolean getHasElectiveList()
{
return this.haselectivelist;
}
public void setHasElectiveList(Boolean value)
{
this.isValidated = false;
this.haselectivelist = value;
}
public boolean getOutcomeIsNotNull()
{
return this.outcome != null;
}
public ims.scheduling.vo.lookups.ApptOutcome getOutcome()
{
return this.outcome;
}
public void setOutcome(ims.scheduling.vo.lookups.ApptOutcome value)
{
this.isValidated = false;
this.outcome = value;
}
public boolean getOutcomeActionsIsNotNull()
{
return this.outcomeactions != null;
}
public ims.scheduling.vo.AppointmentOutcomeActionVoCollection getOutcomeActions()
{
return this.outcomeactions;
}
public void setOutcomeActions(ims.scheduling.vo.AppointmentOutcomeActionVoCollection value)
{
this.isValidated = false;
this.outcomeactions = value;
}
public boolean getCaseNotesPulledIsNotNull()
{
return this.casenotespulled != null;
}
public Boolean getCaseNotesPulled()
{
return this.casenotespulled;
}
public void setCaseNotesPulled(Boolean value)
{
this.isValidated = false;
this.casenotespulled = value;
}
public boolean getWasSelectedIsNotNull()
{
return this.wasselected != null;
}
public Boolean getWasSelected()
{
return this.wasselected;
}
public void setWasSelected(Boolean value)
{
this.isValidated = false;
this.wasselected = value;
}
/**
* OutpatientEpisodeWithICPInfoVoTimeComparator (class definition)
*/
public static class OutpatientEpisodeWithICPInfoVoTimeComparator implements java.util.Comparator
{
private int direction = 1;
public OutpatientEpisodeWithICPInfoVoTimeComparator ()
{
this(ims.framework.enumerations.SortOrder.ASCENDING);
}
public OutpatientEpisodeWithICPInfoVoTimeComparator (ims.framework.enumerations.SortOrder order)
{
if (order == ims.framework.enumerations.SortOrder.DESCENDING)
{
direction = -1;
}
}
public int compare(Object obj1, Object obj2)
{
OutpatientEpisodeWithICPInfoVo voObj1 = (OutpatientEpisodeWithICPInfoVo)obj1;
OutpatientEpisodeWithICPInfoVo voObj2 = (OutpatientEpisodeWithICPInfoVo)obj2;
if (voObj1.getAppointmentTimeIsNotNull()
&& voObj2.getAppointmentTimeIsNotNull())
{
if (voObj1.getAppointmentTime() == null)
return -1;
if (voObj2.getAppointmentTime() == null)
return 1;
return direction*(voObj1.getAppointmentTime().compareTo(voObj2.getAppointmentTime()));
}
else
return direction;
}
public boolean equals(Object obj)
{
return false;
}
}
/**
* getTimeComparator - retrieves a new instance of static class OutpatientEpisodeWithICPInfoVoTimeComparator
*/
public static OutpatientEpisodeWithICPInfoVoTimeComparator getTimeComparator(ims.framework.enumerations.SortOrder sortOrder)
{
return new OutpatientEpisodeWithICPInfoVo.OutpatientEpisodeWithICPInfoVoTimeComparator(sortOrder);
}
/**
* getDOBComparator - retrieves a new instance of static class OutpatientEpisodeWithICPInfoVoDOBComparator
*/
public static OutpatientEpisodeWithICPInfoVoDOBComparator getDOBComparator(ims.framework.enumerations.SortOrder sortOrder)
{
return new OutpatientEpisodeWithICPInfoVo.OutpatientEpisodeWithICPInfoVoDOBComparator(sortOrder);
}
/**
* OutpatientEpisodeWithICPInfoVoDOBComparator (class definition)
*/
public static class OutpatientEpisodeWithICPInfoVoDOBComparator implements java.util.Comparator
{
private int direction = 1;
public OutpatientEpisodeWithICPInfoVoDOBComparator ()
{
this(ims.framework.enumerations.SortOrder.ASCENDING);
}
public OutpatientEpisodeWithICPInfoVoDOBComparator (ims.framework.enumerations.SortOrder order)
{
if (order == ims.framework.enumerations.SortOrder.DESCENDING)
{
direction = -1;
}
}
public int compare(Object obj1, Object obj2)
{
OutpatientEpisodeWithICPInfoVo voObj1 = (OutpatientEpisodeWithICPInfoVo )obj1;
OutpatientEpisodeWithICPInfoVo voObj2 = (OutpatientEpisodeWithICPInfoVo )obj2;
ims.framework.utils.PartialDate pdate1 = null;
ims.framework.utils.PartialDate pdate2 = null;
pdate1 = voObj1.getPatient().getDob();
pdate2 = voObj2.getPatient().getDob();
if(pdate1 != null )
return pdate1.compareTo(pdate2)*direction;
if(pdate2 != null)
return (-1)*direction;
return 0;
}
public boolean equals(Object obj)
{
return false;
}
}
/**
* OutpatientEpisodeWithICPInfoVoICPImageComparator (class definition)
*/
public static class OutpatientEpisodeWithICPInfoVoICPImageComparator implements java.util.Comparator
{
private int direction = 1;
public OutpatientEpisodeWithICPInfoVoICPImageComparator ()
{
this(ims.framework.enumerations.SortOrder.ASCENDING);
}
public OutpatientEpisodeWithICPInfoVoICPImageComparator (ims.framework.enumerations.SortOrder order)
{
if (order == ims.framework.enumerations.SortOrder.DESCENDING)
{
direction = -1;
}
}
public int compare(Object obj1, Object obj2)
{
OutpatientEpisodeWithICPInfoVo voObj1 = (OutpatientEpisodeWithICPInfoVo)obj1;
OutpatientEpisodeWithICPInfoVo voObj2 = (OutpatientEpisodeWithICPInfoVo)obj2;
if( (voObj1.getICPInfoIsNotNull()
&& voObj2.getICPInfoIsNotNull() )
|| (voObj1.getICPInfoIsNotNull()
&& voObj2.getICPInfo() == null) )
return -1*direction;
if( ( voObj2.getICPInfoIsNotNull()
&& voObj1.getICPInfoIsNotNull() )
|| (voObj2.getICPInfoIsNotNull()
&& voObj1.getICPInfo() == null ) )
return 1*direction;
return direction;
}
public boolean equals(Object obj)
{
return false;
}
}
/**
* OutpatientEpisodeWithICPInfoVoNurseImageComparator (class definition)
*/
public static class OutpatientEpisodeWithICPInfoVoNurseImageComparator implements java.util.Comparator
{
private int direction = 1;
public OutpatientEpisodeWithICPInfoVoNurseImageComparator ()
{
this(ims.framework.enumerations.SortOrder.ASCENDING);
}
public OutpatientEpisodeWithICPInfoVoNurseImageComparator (ims.framework.enumerations.SortOrder order)
{
if (order == ims.framework.enumerations.SortOrder.DESCENDING)
{
direction = -1;
}
}
public int compare(Object obj1, Object obj2)
{
OutpatientEpisodeWithICPInfoVo voObj1 = (OutpatientEpisodeWithICPInfoVo)obj1;
OutpatientEpisodeWithICPInfoVo voObj2 = (OutpatientEpisodeWithICPInfoVo)obj2;
if( (voObj1.getICPInfoIsNotNull() && voObj1.getICPInfo().getHasOutstandingNursingActions()
&& voObj2.getICPInfoIsNotNull() && ! voObj2.getICPInfo().getHasOutstandingNursingActions())
|| (voObj1.getICPInfoIsNotNull() && voObj1.getICPInfo().getHasOutstandingNursingActions()
&& voObj2.getICPInfo() == null) )
return -1*direction;
if( ( voObj2.getICPInfoIsNotNull() && voObj2.getICPInfo().getHasOutstandingNursingActions()
&& voObj1.getICPInfoIsNotNull() && ! voObj1.getICPInfo().getHasOutstandingNursingActions())
|| (voObj2.getICPInfoIsNotNull() && voObj2.getICPInfo().getHasOutstandingNursingActions()
&& voObj1.getICPInfo() == null ) )
return 1*direction;
return direction;
}
public boolean equals(Object obj)
{
return false;
}
}
/**
* OutpatientEpisodeWithICPInfoVoMedicImageComparator (class definition)
*/
public static class OutpatientEpisodeWithICPInfoVoMedicImageComparator implements java.util.Comparator
{
private int direction = 1;
public OutpatientEpisodeWithICPInfoVoMedicImageComparator ()
{
this(ims.framework.enumerations.SortOrder.ASCENDING);
}
public OutpatientEpisodeWithICPInfoVoMedicImageComparator (ims.framework.enumerations.SortOrder order)
{
if (order == ims.framework.enumerations.SortOrder.DESCENDING)
{
direction = -1;
}
}
public int compare(Object obj1, Object obj2)
{
OutpatientEpisodeWithICPInfoVo voObj1 = (OutpatientEpisodeWithICPInfoVo)obj1;
OutpatientEpisodeWithICPInfoVo voObj2 = (OutpatientEpisodeWithICPInfoVo)obj2;
if( (voObj1.getICPInfoIsNotNull() && voObj1.getICPInfo().getHasOutstandingClinicalActions()
&& voObj2.getICPInfoIsNotNull() && ! voObj2.getICPInfo().getHasOutstandingClinicalActions())
|| (voObj1.getICPInfoIsNotNull() && voObj1.getICPInfo().getHasOutstandingClinicalActions()
&& voObj2.getICPInfo() == null) )
return -1*direction;
if( ( voObj2.getICPInfoIsNotNull() && voObj2.getICPInfo().getHasOutstandingClinicalActions()
&& voObj1.getICPInfoIsNotNull() && ! voObj1.getICPInfo().getHasOutstandingClinicalActions())
|| (voObj2.getICPInfoIsNotNull() && voObj2.getICPInfo().getHasOutstandingClinicalActions()
&& voObj1.getICPInfo() == null ) )
return 1*direction;
return direction;
}
public boolean equals(Object obj)
{
return false;
}
}
/**
* OutpatientEpisodeWithICPInfoVoPhysioImageComparator (class definition)
*/
public static class OutpatientEpisodeWithICPInfoVoPhysioImageComparator implements java.util.Comparator
{
private int direction = 1;
public OutpatientEpisodeWithICPInfoVoPhysioImageComparator ()
{
this(ims.framework.enumerations.SortOrder.ASCENDING);
}
public OutpatientEpisodeWithICPInfoVoPhysioImageComparator (ims.framework.enumerations.SortOrder order)
{
if (order == ims.framework.enumerations.SortOrder.DESCENDING)
{
direction = -1;
}
}
public int compare(Object obj1, Object obj2)
{
OutpatientEpisodeWithICPInfoVo voObj1 = (OutpatientEpisodeWithICPInfoVo)obj1;
OutpatientEpisodeWithICPInfoVo voObj2 = (OutpatientEpisodeWithICPInfoVo)obj2;
if( (voObj1.getICPInfoIsNotNull() && voObj1.getICPInfo().getHasOutstandingPhysioActions()
&& voObj2.getICPInfoIsNotNull() && ! voObj2.getICPInfo().getHasOutstandingPhysioActions())
|| (voObj1.getICPInfoIsNotNull() && voObj1.getICPInfo().getHasOutstandingPhysioActions()
&& voObj2.getICPInfo() == null) )
return -1*direction;
if( ( voObj2.getICPInfoIsNotNull() && voObj2.getICPInfo().getHasOutstandingPhysioActions()
&& voObj1.getICPInfoIsNotNull() && ! voObj1.getICPInfo().getHasOutstandingPhysioActions())
|| (voObj2.getICPInfoIsNotNull() && voObj2.getICPInfo().getHasOutstandingPhysioActions()
&& voObj1.getICPInfo() == null ) )
return 1*direction;
return direction;
}
public boolean equals(Object obj)
{
return false;
}
}
/**
* OutpatientEpisodeWithICPInfoVoAdminImageComparator (class definition)
*/
public static class OutpatientEpisodeWithICPInfoVoAdminImageComparator implements java.util.Comparator
{
private int direction = 1;
public OutpatientEpisodeWithICPInfoVoAdminImageComparator ()
{
this(ims.framework.enumerations.SortOrder.ASCENDING);
}
public OutpatientEpisodeWithICPInfoVoAdminImageComparator (ims.framework.enumerations.SortOrder order)
{
if (order == ims.framework.enumerations.SortOrder.DESCENDING)
{
direction = -1;
}
}
public int compare(Object obj1, Object obj2)
{
OutpatientEpisodeWithICPInfoVo voObj1 = (OutpatientEpisodeWithICPInfoVo)obj1;
OutpatientEpisodeWithICPInfoVo voObj2 = (OutpatientEpisodeWithICPInfoVo)obj2;
if( (voObj1.getICPInfoIsNotNull() && voObj1.getICPInfo().getHasOutstandingAdminActions()
&& voObj2.getICPInfoIsNotNull() && ! voObj2.getICPInfo().getHasOutstandingAdminActions())
|| (voObj1.getICPInfoIsNotNull() && voObj1.getICPInfo().getHasOutstandingAdminActions()
&& voObj2.getICPInfo() == null) )
return -1*direction;
if( ( voObj2.getICPInfoIsNotNull() && voObj2.getICPInfo().getHasOutstandingAdminActions()
&& voObj1.getICPInfoIsNotNull() && ! voObj1.getICPInfo().getHasOutstandingAdminActions())
|| (voObj2.getICPInfoIsNotNull() && voObj2.getICPInfo().getHasOutstandingAdminActions()
&& voObj1.getICPInfo() == null ) )
return 1*direction;
return direction;
}
public boolean equals(Object obj)
{
return false;
}
}
/**
* getICPImageComparator - retrieves a new instance of static class OutpatientEpisodeWithICPInfoVoICPImageComparator
*/
public static OutpatientEpisodeWithICPInfoVoICPImageComparator getICPImageComparator(ims.framework.enumerations.SortOrder sortOrder)
{
return new OutpatientEpisodeWithICPInfoVo.OutpatientEpisodeWithICPInfoVoICPImageComparator(sortOrder);
}
/**
* getNurseImageComparator - retrieves a new instance of static class OutpatientEpisodeWithICPInfoVoNurseImageComparator
*/
public static OutpatientEpisodeWithICPInfoVoNurseImageComparator getNurseImageComparator(ims.framework.enumerations.SortOrder sortOrder)
{
return new OutpatientEpisodeWithICPInfoVo.OutpatientEpisodeWithICPInfoVoNurseImageComparator(sortOrder);
}
/**
* getMedicImageComparator - retrieves a new instance of static class OutpatientEpisodeWithICPInfoVoMedicImageComparator
*/
public static OutpatientEpisodeWithICPInfoVoMedicImageComparator getMedicImageComparator(ims.framework.enumerations.SortOrder sortOrder)
{
return new OutpatientEpisodeWithICPInfoVo.OutpatientEpisodeWithICPInfoVoMedicImageComparator(sortOrder);
}
/**
* getPhysioImageComparator - retrieves a new instance of static class OutpatientEpisodeWithICPInfoVoPhysioImageComparator
*/
public static OutpatientEpisodeWithICPInfoVoPhysioImageComparator getPhysioImageComparator(ims.framework.enumerations.SortOrder sortOrder)
{
return new OutpatientEpisodeWithICPInfoVo.OutpatientEpisodeWithICPInfoVoPhysioImageComparator(sortOrder);
}
/**
* getAdminImageComparator - retrieves a new instance of static class OutpatientEpisodeWithICPInfoVoAdminImageComparator
*/
public static OutpatientEpisodeWithICPInfoVoAdminImageComparator getAdminImageComparator(ims.framework.enumerations.SortOrder sortOrder)
{
return new OutpatientEpisodeWithICPInfoVo.OutpatientEpisodeWithICPInfoVoAdminImageComparator(sortOrder);
}
public boolean isValidated()
{
if(this.isBusy)
return true;
this.isBusy = true;
if(!this.isValidated)
{
this.isBusy = false;
return false;
}
if(this.icpinfo != null)
{
if(!this.icpinfo.isValidated())
{
this.isBusy = false;
return false;
}
}
if(this.referral != null)
{
if(!this.referral.isValidated())
{
this.isBusy = false;
return false;
}
}
this.isBusy = false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(this.isBusy)
return null;
this.isBusy = true;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
if(this.patient == null)
listOfErrors.add("Patient is mandatory");
if(this.icpinfo != null)
{
String[] listOfOtherErrors = this.icpinfo.validate();
if(listOfOtherErrors != null)
{
for(int x = 0; x < listOfOtherErrors.length; x++)
{
listOfErrors.add(listOfOtherErrors[x]);
}
}
}
if(this.referral != null)
{
String[] listOfOtherErrors = this.referral.validate();
if(listOfOtherErrors != null)
{
for(int x = 0; x < listOfOtherErrors.length; x++)
{
listOfErrors.add(listOfOtherErrors[x]);
}
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
{
this.isBusy = false;
this.isValidated = true;
return null;
}
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
this.isBusy = false;
this.isValidated = false;
return result;
}
public void clearIDAndVersion()
{
this.id = null;
this.version = 0;
}
public Object clone()
{
if(this.isBusy)
return this;
this.isBusy = true;
OutpatientEpisodeWithICPInfoVo clone = new OutpatientEpisodeWithICPInfoVo(this.id, this.version);
if(this.patient == null)
clone.patient = null;
else
clone.patient = (ims.core.vo.PatientShort)this.patient.clone();
if(this.appointmenttime == null)
clone.appointmenttime = null;
else
clone.appointmenttime = (ims.framework.utils.Time)this.appointmenttime.clone();
if(this.session == null)
clone.session = null;
else
clone.session = (ims.scheduling.vo.SessionLiteWithListownerVo)this.session.clone();
if(this.icpinfo == null)
clone.icpinfo = null;
else
clone.icpinfo = (ims.icp.vo.PatientICPLiteVo)this.icpinfo.clone();
if(this.apptstatus == null)
clone.apptstatus = null;
else
clone.apptstatus = (ims.scheduling.vo.lookups.Status_Reason)this.apptstatus.clone();
if(this.appointmentdate == null)
clone.appointmentdate = null;
else
clone.appointmentdate = (ims.framework.utils.Date)this.appointmentdate.clone();
if(this.currentappttrakingstatus == null)
clone.currentappttrakingstatus = null;
else
clone.currentappttrakingstatus = (ims.scheduling.vo.Appt_Tracking_Status_HistoryVo)this.currentappttrakingstatus.clone();
if(this.activity == null)
clone.activity = null;
else
clone.activity = (ims.core.vo.ActivityLiteVo)this.activity.clone();
if(this.appointmenttrackingstatus == null)
clone.appointmenttrackingstatus = null;
else
clone.appointmenttrackingstatus = (ims.admin.vo.AppointmentTrackingstatusColourConfigVo)this.appointmenttrackingstatus.clone();
if(this.referral == null)
clone.referral = null;
else
clone.referral = (ims.RefMan.vo.CatsReferralBreachDatesVo)this.referral.clone();
clone.haselectivelist = this.haselectivelist;
if(this.outcome == null)
clone.outcome = null;
else
clone.outcome = (ims.scheduling.vo.lookups.ApptOutcome)this.outcome.clone();
if(this.outcomeactions == null)
clone.outcomeactions = null;
else
clone.outcomeactions = (ims.scheduling.vo.AppointmentOutcomeActionVoCollection)this.outcomeactions.clone();
clone.casenotespulled = this.casenotespulled;
clone.wasselected = this.wasselected;
clone.isValidated = this.isValidated;
this.isBusy = false;
return clone;
}
public int compareTo(Object obj)
{
return compareTo(obj, true);
}
public int compareTo(Object obj, boolean caseInsensitive)
{
if (obj == null)
{
return -1;
}
if(caseInsensitive); // this is to avoid eclipse warning only.
if (!(OutpatientEpisodeWithICPInfoVo.class.isAssignableFrom(obj.getClass())))
{
throw new ClassCastException("A OutpatientEpisodeWithICPInfoVo object cannot be compared an Object of type " + obj.getClass().getName());
}
if (this.id == null)
return 1;
if (((OutpatientEpisodeWithICPInfoVo)obj).getBoId() == null)
return -1;
return this.id.compareTo(((OutpatientEpisodeWithICPInfoVo)obj).getBoId());
}
public synchronized static int generateValueObjectUniqueID()
{
return ims.vo.ValueObject.generateUniqueID();
}
public int countFieldsWithValue()
{
int count = 0;
if(this.patient != null)
count++;
if(this.appointmenttime != null)
count++;
if(this.session != null)
count++;
if(this.icpinfo != null)
count++;
if(this.apptstatus != null)
count++;
if(this.appointmentdate != null)
count++;
if(this.currentappttrakingstatus != null)
count++;
if(this.activity != null)
count++;
if(this.appointmenttrackingstatus != null)
count++;
if(this.referral != null)
count++;
if(this.haselectivelist != null)
count++;
if(this.outcome != null)
count++;
if(this.outcomeactions != null)
count++;
if(this.casenotespulled != null)
count++;
if(this.wasselected != null)
count++;
return count;
}
public int countValueObjectFields()
{
return 15;
}
protected ims.core.vo.PatientShort patient;
protected ims.framework.utils.Time appointmenttime;
protected ims.scheduling.vo.SessionLiteWithListownerVo session;
protected ims.icp.vo.PatientICPLiteVo icpinfo;
protected ims.scheduling.vo.lookups.Status_Reason apptstatus;
protected ims.framework.utils.Date appointmentdate;
protected ims.scheduling.vo.Appt_Tracking_Status_HistoryVo currentappttrakingstatus;
protected ims.core.vo.ActivityLiteVo activity;
protected ims.admin.vo.AppointmentTrackingstatusColourConfigVo appointmenttrackingstatus;
protected ims.RefMan.vo.CatsReferralBreachDatesVo referral;
protected Boolean haselectivelist;
protected ims.scheduling.vo.lookups.ApptOutcome outcome;
protected ims.scheduling.vo.AppointmentOutcomeActionVoCollection outcomeactions;
protected Boolean casenotespulled;
protected Boolean wasselected;
private boolean isValidated = false;
private boolean isBusy = false;
}
|
agpl-3.0
|
EaglesoftZJ/actor-platform
|
actor-sdk/sdk-core/core/core-shared/src/main/java/im/actor/core/network/mtp/entity/Container.java
|
1511
|
/*
* Copyright (C) 2015 Actor LLC. <https://actor.im>
*/
package im.actor.core.network.mtp.entity;
import java.io.IOException;
import im.actor.runtime.bser.DataInput;
import im.actor.runtime.bser.DataOutput;
// Disabling Bounds checks for speeding up calculations
/*-[
#define J2OBJC_DISABLE_ARRAY_BOUND_CHECKS 1
]-*/
public class Container extends ProtoStruct {
public static final byte HEADER = (byte) 0x0A;
private ProtoMessage[] messages;
public Container(DataInput stream) throws IOException {
super(stream);
}
public Container(ProtoMessage[] messages) {
this.messages = messages;
}
public ProtoMessage[] getMessages() {
return messages;
}
@Override
protected byte getHeader() {
return HEADER;
}
@Override
protected void writeBody(DataOutput bs) throws IOException {
if (messages != null && messages.length > 0) {
bs.writeVarInt(messages.length);
for (ProtoMessage m : messages) {
m.writeObject(bs);
}
} else {
bs.writeVarInt(0);
}
}
@Override
protected void readBody(DataInput bs) throws IOException {
int size = (int) bs.readVarInt();
messages = new ProtoMessage[size];
for (int i = 0; i < size; ++i) {
messages[i] = new ProtoMessage(bs);
}
}
@Override
public String toString() {
return "Conatiner[" + messages.length + " items]";
}
}
|
agpl-3.0
|
wolfgangmm/exist
|
exist-core/src/main/java/org/exist/collections/IndexInfo.java
|
4384
|
/*
* eXist-db Open Source Native XML Database
* Copyright (C) 2001 The eXist-db Authors
*
* info@exist-db.org
* http://www.exist-db.org
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.exist.collections;
import org.exist.Indexer;
import org.exist.Namespaces;
import org.exist.collections.triggers.DocumentTriggers;
import org.exist.dom.persistent.DocumentImpl;
import org.exist.security.Permission;
import org.exist.storage.DBBroker;
import org.exist.storage.lock.ManagedDocumentLock;
import org.exist.storage.txn.Txn;
import org.exist.util.serializer.DOMStreamer;
import org.exist.xmldb.XmldbURI;
import org.xml.sax.ContentHandler;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.ext.LexicalHandler;
/**
* Internal class used to track required fields between calls to
* {@link org.exist.collections.Collection#validateXMLResource(Txn, DBBroker, XmldbURI, InputSource)} and
* {@link org.exist.collections.Collection#store(Txn, DBBroker, IndexInfo, InputSource)}.
*
* @author wolf
*/
public class IndexInfo {
private final Indexer indexer;
private final CollectionConfiguration collectionConfig;
private final ManagedDocumentLock documentLock;
private DOMStreamer streamer;
private DocumentTriggers docTriggers;
private boolean creating = false;
private Permission oldDocPermissions = null;
IndexInfo(final Indexer indexer, final CollectionConfiguration collectionConfig, final ManagedDocumentLock documentLock) {
this.indexer = indexer;
this.collectionConfig = collectionConfig;
this.documentLock = documentLock;
}
public Indexer getIndexer() {
return indexer;
}
//XXX: make protected
public void setTriggers(final DocumentTriggers triggersVisitor) {
this.docTriggers = triggersVisitor;
}
//XXX: make protected
public DocumentTriggers getTriggers() {
return docTriggers;
}
public void setCreating(final boolean creating) {
this.creating = creating;
}
public boolean isCreating() {
return creating;
}
public void setOldDocPermissions(final Permission oldDocPermissions) {
this.oldDocPermissions = oldDocPermissions;
}
public Permission getOldDocPermissions() {
return oldDocPermissions;
}
void setReader(final XMLReader reader, final EntityResolver entityResolver) throws SAXException {
if(entityResolver != null) {
reader.setEntityResolver(entityResolver);
}
final LexicalHandler lexicalHandler = docTriggers == null ? indexer : docTriggers;
final ContentHandler contentHandler = docTriggers == null ? indexer : docTriggers;
reader.setProperty(Namespaces.SAX_LEXICAL_HANDLER, lexicalHandler);
reader.setContentHandler(contentHandler);
reader.setErrorHandler(indexer);
}
void setDOMStreamer(final DOMStreamer streamer) {
this.streamer = streamer;
if (docTriggers == null) {
streamer.setContentHandler(indexer);
streamer.setLexicalHandler(indexer);
} else {
streamer.setContentHandler(docTriggers);
streamer.setLexicalHandler(docTriggers);
}
}
public DOMStreamer getDOMStreamer() {
return this.streamer;
}
public DocumentImpl getDocument() {
return indexer.getDocument();
}
public CollectionConfiguration getCollectionConfig() {
return collectionConfig;
}
public ManagedDocumentLock getDocumentLock() {
return documentLock;
}
}
|
lgpl-2.1
|
haozhun/presto
|
presto-spi/src/test/java/com/facebook/presto/spi/type/TestRowType.java
|
3879
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spi.type;
import org.testng.annotations.Test;
import java.util.List;
import static com.facebook.presto.spi.block.MethodHandleUtil.methodHandle;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static java.util.Arrays.asList;
import static org.testng.Assert.assertEquals;
public class TestRowType
{
@Test
public void testRowDisplayName()
{
List<RowType.Field> fields = asList(
RowType.field("bool_col", BOOLEAN),
RowType.field("double_col", DOUBLE),
RowType.field("array_col", new ArrayType(VARCHAR)),
RowType.field("map_col", new MapType(
BOOLEAN,
DOUBLE,
methodHandle(TestRowType.class, "throwUnsupportedOperation"),
methodHandle(TestRowType.class, "throwUnsupportedOperation"),
methodHandle(TestRowType.class, "throwUnsupportedOperation"),
methodHandle(TestRowType.class, "throwUnsupportedOperation"))));
RowType row = RowType.from(fields);
assertEquals(
row.getDisplayName(),
"row(bool_col boolean, double_col double, array_col array(varchar), map_col map(boolean, double))");
}
@Test
public void testRowDisplayNoColumnNames()
{
List<Type> types = asList(
BOOLEAN,
DOUBLE,
new ArrayType(VARCHAR),
new MapType(
BOOLEAN,
DOUBLE,
methodHandle(TestRowType.class, "throwUnsupportedOperation"),
methodHandle(TestRowType.class, "throwUnsupportedOperation"),
methodHandle(TestRowType.class, "throwUnsupportedOperation"),
methodHandle(TestRowType.class, "throwUnsupportedOperation")));
RowType row = RowType.anonymous(types);
assertEquals(
row.getDisplayName(),
"row(boolean, double, array(varchar), map(boolean, double))");
}
@Test
public void testRowDisplayMixedUnnamedColumns()
{
List<RowType.Field> fields = asList(
RowType.field(BOOLEAN),
RowType.field("double_col", DOUBLE),
RowType.field(new ArrayType(VARCHAR)),
RowType.field("map_col", new MapType(
BOOLEAN,
DOUBLE,
methodHandle(TestRowType.class, "throwUnsupportedOperation"),
methodHandle(TestRowType.class, "throwUnsupportedOperation"),
methodHandle(TestRowType.class, "throwUnsupportedOperation"),
methodHandle(TestRowType.class, "throwUnsupportedOperation"))));
RowType row = RowType.from(fields);
assertEquals(
row.getDisplayName(),
"row(boolean, double_col double, array(varchar), map_col map(boolean, double))");
}
public static void throwUnsupportedOperation()
{
throw new UnsupportedOperationException();
}
}
|
apache-2.0
|
ascherbakoff/ignite
|
modules/core/src/test/java/org/apache/ignite/platform/PlatformTestExecutor.java
|
1074
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.platform;
import java.util.concurrent.Executor;
/**
* Test executor.
*/
public class PlatformTestExecutor implements Executor {
/** {@inheritDoc} */
@Override public void execute(Runnable runnable) {
runnable.run();
}
}
|
apache-2.0
|
googleapis/google-api-java-client-services
|
clients/google-api-services-androidmanagement/v1/1.31.0/com/google/api/services/androidmanagement/v1/model/ManagedPropertyEntry.java
|
2882
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.androidmanagement.v1.model;
/**
* An entry of a managed property.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Android Management API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class ManagedPropertyEntry extends com.google.api.client.json.GenericJson {
/**
* The human-readable name of the value. Localized.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The machine-readable value of the entry, which should be used in the configuration. Not
* localized.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String value;
/**
* The human-readable name of the value. Localized.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The human-readable name of the value. Localized.
* @param name name or {@code null} for none
*/
public ManagedPropertyEntry setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The machine-readable value of the entry, which should be used in the configuration. Not
* localized.
* @return value or {@code null} for none
*/
public java.lang.String getValue() {
return value;
}
/**
* The machine-readable value of the entry, which should be used in the configuration. Not
* localized.
* @param value value or {@code null} for none
*/
public ManagedPropertyEntry setValue(java.lang.String value) {
this.value = value;
return this;
}
@Override
public ManagedPropertyEntry set(String fieldName, Object value) {
return (ManagedPropertyEntry) super.set(fieldName, value);
}
@Override
public ManagedPropertyEntry clone() {
return (ManagedPropertyEntry) super.clone();
}
}
|
apache-2.0
|
electrum/presto
|
core/trino-main/src/main/java/io/trino/operator/scalar/QuantileDigestFunctions.java
|
5787
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.operator.scalar;
import io.airlift.slice.Slice;
import io.airlift.stats.QuantileDigest;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.function.Description;
import io.trino.spi.function.ScalarFunction;
import io.trino.spi.function.SqlType;
import io.trino.spi.type.StandardTypes;
import static io.trino.operator.aggregation.FloatingPointBitsConverterUtil.sortableIntToFloat;
import static io.trino.operator.aggregation.FloatingPointBitsConverterUtil.sortableLongToDouble;
import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.RealType.REAL;
import static io.trino.util.Failures.checkCondition;
import static java.lang.Float.floatToRawIntBits;
public final class QuantileDigestFunctions
{
public static final double DEFAULT_ACCURACY = 0.01;
public static final long DEFAULT_WEIGHT = 1L;
private QuantileDigestFunctions() {}
@ScalarFunction("value_at_quantile")
@Description("Given an input q between [0, 1], find the value whose rank in the sorted sequence of the n values represented by the qdigest is qn.")
@SqlType(StandardTypes.DOUBLE)
public static double valueAtQuantileDouble(@SqlType("qdigest(double)") Slice input, @SqlType(StandardTypes.DOUBLE) double quantile)
{
return sortableLongToDouble(valueAtQuantileBigint(input, quantile));
}
@ScalarFunction("value_at_quantile")
@Description("Given an input q between [0, 1], find the value whose rank in the sorted sequence of the n values represented by the qdigest is qn.")
@SqlType(StandardTypes.REAL)
public static long valueAtQuantileReal(@SqlType("qdigest(real)") Slice input, @SqlType(StandardTypes.DOUBLE) double quantile)
{
return floatToRawIntBits(sortableIntToFloat((int) valueAtQuantileBigint(input, quantile)));
}
@ScalarFunction("value_at_quantile")
@Description("Given an input q between [0, 1], find the value whose rank in the sorted sequence of the n values represented by the qdigest is qn.")
@SqlType(StandardTypes.BIGINT)
public static long valueAtQuantileBigint(@SqlType("qdigest(bigint)") Slice input, @SqlType(StandardTypes.DOUBLE) double quantile)
{
return new QuantileDigest(input).getQuantile(quantile);
}
@ScalarFunction("values_at_quantiles")
@Description("For each input q between [0, 1], find the value whose rank in the sorted sequence of the n values represented by the qdigest is qn.")
@SqlType("array(double)")
public static Block valuesAtQuantilesDouble(@SqlType("qdigest(double)") Slice input, @SqlType("array(double)") Block percentilesArrayBlock)
{
QuantileDigest digest = new QuantileDigest(input);
BlockBuilder output = DOUBLE.createBlockBuilder(null, percentilesArrayBlock.getPositionCount());
for (int i = 0; i < percentilesArrayBlock.getPositionCount(); i++) {
DOUBLE.writeDouble(output, sortableLongToDouble(digest.getQuantile(DOUBLE.getDouble(percentilesArrayBlock, i))));
}
return output.build();
}
@ScalarFunction("values_at_quantiles")
@Description("For each input q between [0, 1], find the value whose rank in the sorted sequence of the n values represented by the qdigest is qn.")
@SqlType("array(real)")
public static Block valuesAtQuantilesReal(@SqlType("qdigest(real)") Slice input, @SqlType("array(double)") Block percentilesArrayBlock)
{
QuantileDigest digest = new QuantileDigest(input);
BlockBuilder output = REAL.createBlockBuilder(null, percentilesArrayBlock.getPositionCount());
for (int i = 0; i < percentilesArrayBlock.getPositionCount(); i++) {
REAL.writeLong(output, floatToRawIntBits(sortableIntToFloat((int) digest.getQuantile(DOUBLE.getDouble(percentilesArrayBlock, i)))));
}
return output.build();
}
@ScalarFunction("values_at_quantiles")
@Description("For each input q between [0, 1], find the value whose rank in the sorted sequence of the n values represented by the qdigest is qn.")
@SqlType("array(bigint)")
public static Block valuesAtQuantilesBigint(@SqlType("qdigest(bigint)") Slice input, @SqlType("array(double)") Block percentilesArrayBlock)
{
QuantileDigest digest = new QuantileDigest(input);
BlockBuilder output = BIGINT.createBlockBuilder(null, percentilesArrayBlock.getPositionCount());
for (int i = 0; i < percentilesArrayBlock.getPositionCount(); i++) {
BIGINT.writeLong(output, digest.getQuantile(DOUBLE.getDouble(percentilesArrayBlock, i)));
}
return output.build();
}
public static double verifyAccuracy(double accuracy)
{
checkCondition(accuracy > 0 && accuracy < 1, INVALID_FUNCTION_ARGUMENT, "Percentile accuracy must be exclusively between 0 and 1, was %s", accuracy);
return accuracy;
}
public static long verifyWeight(long weight)
{
checkCondition(weight > 0, INVALID_FUNCTION_ARGUMENT, "Percentile weight must be > 0, was %s", weight);
return weight;
}
}
|
apache-2.0
|
Gugli/Openfire
|
src/plugins/rayo/src/java/org/ifsoft/rtp/RTCPSourceDescriptionChunk.java
|
617
|
package org.ifsoft.rtp;
import java.util.ArrayList;
public class RTCPSourceDescriptionChunk
{
public RTCPSourceDescriptionChunk()
{
}
public Byte[] getBytes()
{
return getBytes(this);
}
public static Byte[] getBytes(RTCPSourceDescriptionChunk chunk)
{
if(chunk == null)
{
return new Byte[0];
} else
{
ArrayList list = new ArrayList();
return (Byte[])list.toArray(new Byte[0]);
}
}
public static RTCPSourceDescriptionChunk parseBytes(Byte chunkBytes[])
{
return null;
}
}
|
apache-2.0
|
masaki-yamakawa/geode
|
geode-core/src/test/java/org/apache/geode/internal/cache/RequestFilterInfoMessageTest.java
|
2325
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.Before;
import org.junit.Test;
import org.apache.geode.distributed.internal.ClusterDistributionManager;
import org.apache.geode.internal.cache.InitialImageOperation.RequestFilterInfoMessage;
public class RequestFilterInfoMessageTest {
private ClusterDistributionManager dm;
private InternalCache cache;
private String path;
private LocalRegion region;
@Before
public void setUp() {
path = "path";
dm = mock(ClusterDistributionManager.class);
cache = mock(InternalCache.class);
region = mock(LocalRegion.class);
when(dm.getCache()).thenReturn(cache);
when(cache.getInternalRegionByPath(path)).thenReturn(region);
}
@Test
public void shouldBeMockable() throws Exception {
RequestFilterInfoMessage mockRequestFilterInfoMessage = mock(RequestFilterInfoMessage.class);
when(mockRequestFilterInfoMessage.getProcessorType()).thenReturn(1);
assertThat(mockRequestFilterInfoMessage.getProcessorType()).isEqualTo(1);
}
@Test
public void getsRegionFromCacheFromDM() {
RequestFilterInfoMessage message = new RequestFilterInfoMessage();
message.regionPath = path;
message.process(dm);
verify(dm, times(1)).getCache();
verify(cache, times(1)).getInternalRegionByPath(path);
}
}
|
apache-2.0
|
charafau/weechat-android
|
weechat-relay/src/main/java/com/ubergeek42/weechat/relay/protocol/Hashtable.java
|
1940
|
/*******************************************************************************
* Copyright 2012 Keith Johnson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.ubergeek42.weechat.relay.protocol;
import java.util.HashMap;
/**
* Hashtable implementation from Weechat See the following URL(s) for more information:
* http://www.weechat.org/files/doc/devel/weechat_plugin_api.en.html#hashtables
* http://www.weechat.org/files/doc/devel/weechat_relay_protocol.en.html#object_hashtable
*
* @author ubergeek42<kj@ubergeek42.com>
*/
public class Hashtable extends RelayObject {
private HashMap<String, RelayObject> hashtable = new HashMap<String, RelayObject>();
protected Hashtable(WType keyType, WType valueType) {
}
protected void put(RelayObject key, RelayObject value) {
hashtable.put(key.toString(), value);
}
public RelayObject get(String key) {
return hashtable.get(key);
}
/**
* Debug toString
*/
@Override
public String toString() {
StringBuilder map = new StringBuilder();
for (String key : hashtable.keySet()) {
RelayObject value = hashtable.get(key);
map.append(key);
map.append(" -> ");
map.append(value);
map.append(", ");
}
return map.toString();
}
}
|
apache-2.0
|
NotBadPad/hadoop-hbase
|
src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceMetrics.java
|
4195
|
/**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication.regionserver;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import org.apache.hadoop.hbase.metrics.MetricsRate;
import org.apache.hadoop.metrics.MetricsContext;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.MetricsUtil;
import org.apache.hadoop.metrics.Updater;
import org.apache.hadoop.metrics.jvm.JvmMetrics;
import org.apache.hadoop.metrics.util.MetricsIntValue;
import org.apache.hadoop.metrics.util.MetricsLongValue;
import org.apache.hadoop.metrics.util.MetricsRegistry;
/**
* This class is for maintaining the various replication statistics
* for a source and publishing them through the metrics interfaces.
*/
public class ReplicationSourceMetrics implements Updater {
private final MetricsRecord metricsRecord;
private MetricsRegistry registry = new MetricsRegistry();
/** Rate of shipped operations by the source */
public final MetricsRate shippedOpsRate =
new MetricsRate("shippedOpsRate", registry);
/** Rate of shipped batches by the source */
public final MetricsRate shippedBatchesRate =
new MetricsRate("shippedBatchesRate", registry);
/** Rate of log entries (can be multiple Puts) read from the logs */
public final MetricsRate logEditsReadRate =
new MetricsRate("logEditsReadRate", registry);
/** Rate of log entries filtered by the source */
public final MetricsRate logEditsFilteredRate =
new MetricsRate("logEditsFilteredRate", registry);
/** Age of the last operation that was shipped by the source */
private final MetricsLongValue ageOfLastShippedOp =
new MetricsLongValue("ageOfLastShippedOp", registry);
/**
* Current size of the queue of logs to replicate,
* excluding the one being processed at the moment
*/
public final MetricsIntValue sizeOfLogQueue =
new MetricsIntValue("sizeOfLogQueue", registry);
/**
* Constructor used to register the metrics
* @param id Name of the source this class is monitoring
*/
public ReplicationSourceMetrics(String id) {
MetricsContext context = MetricsUtil.getContext("hbase");
String name = Thread.currentThread().getName();
metricsRecord = MetricsUtil.createRecord(context, "replication");
metricsRecord.setTag("RegionServer", name);
context.registerUpdater(this);
try {
id = URLEncoder.encode(id, "UTF8");
} catch (UnsupportedEncodingException e) {
id = "CAN'T ENCODE UTF8";
}
// export for JMX
new ReplicationStatistics(this.registry, "ReplicationSource for " + id);
}
/**
* Set the age of the last edit that was shipped
* @param timestamp write time of the edit
*/
public void setAgeOfLastShippedOp(long timestamp) {
ageOfLastShippedOp.set(System.currentTimeMillis() - timestamp);
}
@Override
public void doUpdates(MetricsContext metricsContext) {
synchronized (this) {
this.shippedOpsRate.pushMetric(this.metricsRecord);
this.shippedBatchesRate.pushMetric(this.metricsRecord);
this.logEditsReadRate.pushMetric(this.metricsRecord);
this.logEditsFilteredRate.pushMetric(this.metricsRecord);
this.ageOfLastShippedOp.pushMetric(this.metricsRecord);
this.sizeOfLogQueue.pushMetric(this.metricsRecord);
}
this.metricsRecord.update();
}
}
|
apache-2.0
|
stoksey69/googleads-java-lib
|
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201411/ContentMetadataKeyHierarchyServiceInterfaceperformContentMetadataKeyHierarchyActionResponse.java
|
1704
|
package com.google.api.ads.dfp.jaxws.v201411;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for performContentMetadataKeyHierarchyActionResponse element declaration.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <element name="performContentMetadataKeyHierarchyActionResponse">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="rval" type="{https://www.google.com/apis/ads/publisher/v201411}UpdateResult" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"rval"
})
@XmlRootElement(name = "performContentMetadataKeyHierarchyActionResponse")
public class ContentMetadataKeyHierarchyServiceInterfaceperformContentMetadataKeyHierarchyActionResponse {
protected UpdateResult rval;
/**
* Gets the value of the rval property.
*
* @return
* possible object is
* {@link UpdateResult }
*
*/
public UpdateResult getRval() {
return rval;
}
/**
* Sets the value of the rval property.
*
* @param value
* allowed object is
* {@link UpdateResult }
*
*/
public void setRval(UpdateResult value) {
this.rval = value;
}
}
|
apache-2.0
|
dashorst/wicket
|
wicket-core/src/test/java/org/apache/wicket/AddChildToParentInOnInitializeTest.java
|
3105
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket;
import org.apache.wicket.markup.IMarkupResourceStreamProvider;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.WebPage;
import org.apache.wicket.util.resource.IResourceStream;
import org.apache.wicket.util.resource.StringResourceStream;
import org.apache.wicket.util.tester.WicketTestCase;
import org.junit.Test;
/**
* https://issues.apache.org/jira/browse/WICKET-6021
*/
public class AddChildToParentInOnInitializeTest extends WicketTestCase
{
@Test
public void addChildToParentInOnInitialize()
{
tester.startPage(TestPage.class);
tester.assertRenderedPage(TestPage.class);
tester.assertComponent(TestPage.PARENT_ID + ':' + TestPage.FIRST_CHILD_ID, WebMarkupContainer.class);
tester.assertComponent(TestPage.PARENT_ID, Parent.class);
}
public static class TestPage extends WebPage implements IMarkupResourceStreamProvider
{
public static final String FIRST_CHILD_ID = "firstChild";
public static final String PARENT_ID = "parentContainer";
public static final String SECOND_CHILD_ID = "thirdChild";
public static final String THIRD_CHILD_ID = "fourthChild";
@Override
protected void onInitialize()
{
super.onInitialize();
final Parent parent = new Parent();
add(parent);
parent.addOrReplace(new WebMarkupContainer(TestPage.THIRD_CHILD_ID));
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container, Class<?> containerClass)
{
return new StringResourceStream("<html><head></head><body>" +
"<div wicket:id='"+ PARENT_ID +"'>" +
"<div wicket:id='"+FIRST_CHILD_ID+"'></div>" +
"<div wicket:id='"+ SECOND_CHILD_ID +"'></div>" +
"<div wicket:id='"+ THIRD_CHILD_ID +"'></div>" +
"</div>" +
"</body></html>");
}
}
private static class Parent extends WebMarkupContainer
{
public Parent()
{
super(TestPage.PARENT_ID);
add(new WebMarkupContainer(TestPage.FIRST_CHILD_ID));
add(new SecondChild());
}
}
private static class SecondChild extends WebMarkupContainer
{
public SecondChild()
{
super(TestPage.SECOND_CHILD_ID);
}
@Override
protected void onInitialize()
{
super.onInitialize();
getParent().addOrReplace(new WebMarkupContainer(TestPage.THIRD_CHILD_ID));
}
}
}
|
apache-2.0
|
uaraven/nano
|
sample/webservice/HelloAmazonProductAdvertising/src/com/amazon/webservices/awsecommerceservice/_2011_08_01/Collections.java
|
619
|
// Generated by xsd compiler for android/java
// DO NOT CHANGE!
package com.amazon.webservices.awsecommerceservice._2011_08_01;
import java.io.Serializable;
import com.leansoft.nano.annotation.*;
import com.amazon.webservices.awsecommerceservice._2011_08_01.collections.Collection;
import java.util.List;
@RootElement(name = "Collections", namespace = "http://webservices.amazon.com/AWSECommerceService/2011-08-01")
public class Collections implements Serializable {
private static final long serialVersionUID = -1L;
@Element(name = "Collection")
@Order(value=0)
public List<Collection> collection;
}
|
apache-2.0
|
gfyoung/elasticsearch
|
server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java
|
7036
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.nodes.BaseNodeRequest;
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
import org.elasticsearch.action.support.nodes.BaseNodesRequest;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.action.support.nodes.TransportNodesAction;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.List;
public class TransportNodesListGatewayMetaState extends TransportNodesAction<TransportNodesListGatewayMetaState.Request,
TransportNodesListGatewayMetaState.NodesGatewayMetaState,
TransportNodesListGatewayMetaState.NodeRequest,
TransportNodesListGatewayMetaState.NodeGatewayMetaState> {
public static final String ACTION_NAME = "internal:gateway/local/meta_state";
private final GatewayMetaState metaState;
@Inject
public TransportNodesListGatewayMetaState(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, ActionFilters actionFilters, GatewayMetaState metaState) {
super(settings, ACTION_NAME, threadPool, clusterService, transportService, actionFilters,
Request::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeGatewayMetaState.class);
this.metaState = metaState;
}
public ActionFuture<NodesGatewayMetaState> list(String[] nodesIds, @Nullable TimeValue timeout) {
PlainActionFuture<NodesGatewayMetaState> future = PlainActionFuture.newFuture();
execute(new Request(nodesIds).timeout(timeout), future);
return future;
}
@Override
protected boolean transportCompress() {
return true; // compress since the metadata can become large
}
@Override
protected NodeRequest newNodeRequest(String nodeId, Request request) {
return new NodeRequest(nodeId);
}
@Override
protected NodeGatewayMetaState newNodeResponse() {
return new NodeGatewayMetaState();
}
@Override
protected NodesGatewayMetaState newResponse(Request request, List<NodeGatewayMetaState> responses, List<FailedNodeException> failures) {
return new NodesGatewayMetaState(clusterService.getClusterName(), responses, failures);
}
@Override
protected NodeGatewayMetaState nodeOperation(NodeRequest request) {
try {
return new NodeGatewayMetaState(clusterService.localNode(), metaState.loadMetaState());
} catch (Exception e) {
throw new ElasticsearchException("failed to load metadata", e);
}
}
public static class Request extends BaseNodesRequest<Request> {
public Request() {
}
public Request(String... nodesIds) {
super(nodesIds);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
public static class NodesGatewayMetaState extends BaseNodesResponse<NodeGatewayMetaState> {
NodesGatewayMetaState() {
}
public NodesGatewayMetaState(ClusterName clusterName, List<NodeGatewayMetaState> nodes, List<FailedNodeException> failures) {
super(clusterName, nodes, failures);
}
@Override
protected List<NodeGatewayMetaState> readNodesFrom(StreamInput in) throws IOException {
return in.readStreamableList(NodeGatewayMetaState::new);
}
@Override
protected void writeNodesTo(StreamOutput out, List<NodeGatewayMetaState> nodes) throws IOException {
out.writeStreamableList(nodes);
}
}
public static class NodeRequest extends BaseNodeRequest {
public NodeRequest() {
}
NodeRequest(String nodeId) {
super(nodeId);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
public static class NodeGatewayMetaState extends BaseNodeResponse {
private MetaData metaData;
NodeGatewayMetaState() {
}
public NodeGatewayMetaState(DiscoveryNode node, MetaData metaData) {
super(node);
this.metaData = metaData;
}
public MetaData metaData() {
return metaData;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
if (in.readBoolean()) {
metaData = MetaData.readFrom(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (metaData == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
metaData.writeTo(out);
}
}
}
}
|
apache-2.0
|
droolsjbpm/drools
|
drools-test-coverage/test-compiler-integration/src/test/java/org/drools/compiler/integrationtests/BackwardChainingTest.java
|
67009
|
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.integrationtests;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.drools.compiler.integrationtests.incrementalcompilation.TestUtil;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.impl.InternalKnowledgeBase;
import org.drools.testcoverage.common.model.Address;
import org.drools.testcoverage.common.model.Person;
import org.drools.testcoverage.common.util.KieBaseTestConfiguration;
import org.drools.testcoverage.common.util.KieBaseUtil;
import org.drools.testcoverage.common.util.SerializationHelper;
import org.drools.testcoverage.common.util.TestParametersUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.kie.api.KieBase;
import org.kie.api.definition.KiePackage;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.rule.FactHandle;
import org.kie.api.runtime.rule.QueryResults;
import org.kie.api.runtime.rule.QueryResultsRow;
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.io.ResourceFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.kie.api.runtime.rule.Variable.v;
@RunWith(Parameterized.class)
public class BackwardChainingTest extends AbstractBackwardChainingTest {
public BackwardChainingTest(final KieBaseTestConfiguration kieBaseTestConfiguration) {
super(kieBaseTestConfiguration);
}
@Parameterized.Parameters(name = "KieBase type={0}")
public static Collection<Object[]> getParameters() {
return TestParametersUtil.getKieBaseCloudConfigurations(true);
}
@Test(timeout = 10000)
public void testQueryPatternBindingAsResult() throws IOException, ClassNotFoundException {
String str = "" +
"package org.drools.compiler.test \n" +
"import " + Person.class.getCanonicalName() + "\n" +
"global java.util.List list\n" +
"query peeps( Person $p, String $name, String $likes, int $age ) \n" +
" $p := Person( $name := name, $likes := likes, $age := age; ) \n" +
"end\n";
str += "rule x1\n" +
"when\n" +
" String( this == \"go1\" )\n" +
// output, output, output ,output
" ?peeps($p, $name1; $likes1 : $likes, $age1 : $age )\n" +
"then\n" +
" list.add( $p );\n" +
" list.add( $name1 + \" : \" + $age1 );\n" +
"end \n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, str);
KieSession ksession = kbase.newKieSession();
try {
final List list = new ArrayList<>();
ksession.setGlobal("list", list);
final Person p1 = new Person("darth",
"stilton",
100);
final Person p2 = new Person("darth",
"stilton",
200);
final Person p3 = new Person("yoda",
"stilton",
300);
final Person p4 = new Person("luke",
"brie",
300);
final Person p5 = new Person("bobba",
"cheddar",
300);
ksession.insert(p1);
ksession.insert(p2);
ksession.insert(p3);
ksession.insert(p4);
ksession.insert(p5);
ksession.insert("go1");
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true);
ksession.fireAllRules();
if (kieBaseTestConfiguration.isIdentity()) {
assertEquals(10, list.size());
assertEquals(p1, list.get(list.indexOf("darth : 100") - 1));
assertTrue(list.contains("darth : 100"));
assertEquals(p2, list.get(list.indexOf("darth : 200") - 1));
assertTrue(list.contains("darth : 200"));
assertEquals(p3, list.get(list.indexOf("yoda : 300") - 1));
assertTrue(list.contains("yoda : 300"));
assertEquals(p4, list.get(list.indexOf("luke : 300") - 1));
assertTrue(list.contains("luke : 300"));
assertEquals(p5, list.get(list.indexOf("bobba : 300") - 1));
assertTrue(list.contains("bobba : 300"));
} else {
assertEquals(8, list.size());
assertEquals(p1, list.get(list.indexOf("darth : 100") - 1));
assertTrue(list.contains("darth : 100"));
assertEquals(p3, list.get(list.indexOf("yoda : 300") - 1));
assertTrue(list.contains("yoda : 300"));
assertEquals(p4, list.get(list.indexOf("luke : 300") - 1));
assertTrue(list.contains("luke : 300"));
assertEquals(p5, list.get(list.indexOf("bobba : 300") - 1));
assertTrue(list.contains("bobba : 300"));
}
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testQueriesWithNestedAccessorsAllOutputs() throws IOException, ClassNotFoundException {
String drl = "" +
"package org.drools.compiler.test \n" +
"import " + Person.class.getCanonicalName() + "\n" +
"global java.util.List list\n" +
"query peeps( String $name, String $likes, String $street ) \n" +
" Person( $name := name, $likes := likes, $street := address.street ) \n" +
"end\n";
drl += "rule x1\n" +
"when\n" +
" String( this == \"go1\" )\n" +
// output, output, ,output
" ?peeps($name1; $likes1 : $likes, $street1 : $street )\n" +
"then\n" +
" list.add( $name1 + \" : \" + $likes1 + \" : \" + $street1 );\n" +
"end \n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
try {
final List<String> list = new ArrayList<>();
ksession.setGlobal("list", list);
final Person p1 = new Person("darth",
"stilton",
100);
p1.setAddress(new Address("s1"));
final Person p2 = new Person("yoda",
"stilton",
300);
p2.setAddress(new Address("s2"));
ksession.insert(p1);
ksession.insert(p2);
ksession.insert("go1");
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true);
ksession.fireAllRules();
assertEquals(2, list.size());
assertTrue(list.contains("darth : stilton : s1"));
assertTrue(list.contains("yoda : stilton : s2"));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testQueriesWithNestedAcecssorsMixedArgs() {
String drl = "" +
"package org.drools.compiler.test \n" +
"import " + Person.class.getCanonicalName() + "\n" +
"global java.util.List list\n" +
"query peeps( String $name, String $likes, String $street ) \n" +
" Person( $name := name, $likes := likes, $street := address.street ) \n" +
"end\n";
drl += "rule x1\n" +
"when\n" +
" $s : String()\n" +
// output, output, ,input
" ?peeps($name1; $likes1 : $likes, $street : $s )\n" +
"then\n" +
" list.add( $name1 + \" : \" + $likes1 + \" : \" + $s );\n" +
"end \n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final List<String> list = new ArrayList<>();
ksession.setGlobal("list", list);
final Person p1 = new Person("darth",
"stilton",
100);
p1.setAddress(new Address("s1"));
final Person p2 = new Person("yoda",
"stilton",
300);
p2.setAddress(new Address("s2"));
ksession.insert(p1);
ksession.insert(p2);
ksession.insert("s1");
ksession.fireAllRules();
assertEquals(1, list.size());
assertTrue(list.contains("darth : stilton : s1"));
list.clear();
ksession.insert("s2");
ksession.fireAllRules();
assertEquals(1, list.size());
assertTrue(list.contains("yoda : stilton : s2"));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testQueryWithDynamicData() throws IOException, ClassNotFoundException {
String drl = "" +
"package org.drools.compiler.test \n" +
"import " + Person.class.getCanonicalName() + "\n" +
"global java.util.List list\n" +
"query peeps( Person $p, String $name, String $likes, int $age ) \n" +
" $p := Person( ) from new Person( $name, $likes, $age ) \n" +
"end\n";
drl += "rule x1\n" +
"when\n" +
" $n1 : String( )\n" +
// output, input ,input ,input
" ?peeps($p; $name : $n1, $likes : \"stilton\", $age : 100 )\n" +
"then\n" +
" list.add( $p );\n" +
"end \n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
try {
final List<String> list = new ArrayList<>();
ksession.setGlobal("list", list);
final Person p1 = new Person("darth",
"stilton",
100);
final Person p2 = new Person("yoda",
"stilton",
100);
ksession.insert("darth");
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true);
ksession.fireAllRules();
assertEquals(1, list.size());
assertEquals(p1, list.get(0));
list.clear();
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true);
ksession.insert("yoda");
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true);
ksession.fireAllRules();
assertEquals(1, list.size());
assertEquals(p2, list.get(0));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testQueryWithDyanmicInsert() throws IOException, ClassNotFoundException {
String drl = "" +
"package org.drools.compiler.test \n" +
"import " + Person.class.getCanonicalName() + "\n" +
"global java.util.List list\n" +
"query peeps( Person $p, String $name, String $likes, int $age ) \n" +
" $p := Person( ) from new Person( $name, $likes, $age ) \n" +
"end\n";
drl += "rule x1\n" +
"when\n" +
" $n1 : String( )\n" +
" not Person( name == 'darth' )\n " +
// output, input ,input ,input
" ?peeps($p; $name : $n1, $likes : \"stilton\", $age : 100 )\n" +
"then\n" +
" insert( $p );\n" +
"end \n";
drl += "rule x2\n" +
"when\n" +
" $p : Person( )\n" +
"then\n" +
" list.add( $p );\n" +
"end \n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
try {
final List<String> list = new ArrayList<>();
ksession.setGlobal("list", list);
final Person p1 = new Person("darth",
"stilton",
100);
ksession.insert("darth");
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true);
ksession.fireAllRules();
ksession.insert("yoda"); // darth exists, so yoda won't get created
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true);
ksession.fireAllRules();
assertEquals(1, list.size());
assertEquals(p1, list.get(0));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testQueryWithOr() {
final String drl = "" +
"package org.drools.compiler.test \n" +
"import java.util.List\n" +
"import java.util.ArrayList\n" +
"import " + BackwardChainingTest.class.getName() + ".Q\n" +
"import " + BackwardChainingTest.class.getName() + ".R\n" +
"import " + BackwardChainingTest.class.getName() + ".S\n" +
"global List list\n" +
"dialect \"mvel\"\n" +
"\n" +
"query q(int x)\n" +
" Q( x := value )\n" +
"end\n" +
"\n" +
"query r(int x)\n" +
" R( x := value )\n" +
"end\n" +
"\n" +
"query s(int x)\n" +
" S( x := value ) \n" +
"end\n" +
"\n" +
"query p(int x)\n" +
" (?q(x;) and ?r(x;) ) \n" +
" or\n" +
" ?s(x;)\n" +
"end\n" +
"rule init when\n" +
"then\n" +
" insert( new Q(1) );\n " +
" insert( new Q(5) );\n " +
" insert( new Q(6) );\n " +
" insert( new R(1) );\n " +
" insert( new R(4) );\n " +
" insert( new R(6) );\n " +
" insert( new R(2) );\n " +
" insert( new S(2) );\n " +
" insert( new S(3) );\n " +
" insert( new S(6) );\n " +
"end\n" +
"";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final List<Integer> list = new ArrayList<>();
ksession.setGlobal("list", list);
ksession.fireAllRules();
QueryResults results;
list.clear();
results = ksession.getQueryResults("p", new Integer[]{0});
for (final QueryResultsRow result : results) {
list.add((Integer) result.get("x"));
}
assertEquals(0, list.size());
list.clear();
results = ksession.getQueryResults("p", new Integer[]{1});
for (final QueryResultsRow result : results) {
list.add((Integer) result.get("x"));
}
assertEquals(1, list.size());
assertEquals(1, list.get(0).intValue());
list.clear();
results = ksession.getQueryResults("p", new Integer[]{2});
for (final QueryResultsRow result : results) {
list.add((Integer) result.get("x"));
}
assertEquals(1, list.size());
assertEquals(2, list.get(0).intValue());
list.clear();
results = ksession.getQueryResults("p", new Integer[]{3});
for (final QueryResultsRow result : results) {
list.add((Integer) result.get("x"));
}
assertEquals(1, list.size());
assertEquals(3, list.get(0).intValue());
list.clear();
results = ksession.getQueryResults("p", new Integer[]{4});
for (final QueryResultsRow result : results) {
list.add((Integer) result.get("x"));
}
assertEquals(0, list.size());
list.clear();
results = ksession.getQueryResults("p", new Integer[]{5});
for (final QueryResultsRow result : results) {
list.add((Integer) result.get("x"));
}
assertEquals(0, list.size());
list.clear();
results = ksession.getQueryResults("p", new Integer[]{6});
for (final QueryResultsRow result : results) {
list.add((Integer) result.get("x"));
}
assertEquals(2, list.size());
assertEquals(6, list.get(0).intValue());
assertEquals(6, list.get(1).intValue());
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testGeneology() throws IOException, ClassNotFoundException {
// from http://kti.mff.cuni.cz/~bartak/prolog/genealogy.html
final String drl = "" +
"package org.drools.compiler.test2 \n" +
"global java.util.List list\n" +
"dialect \"mvel\"\n" +
"query man( String name ) \n" +
" " + BackwardChainingTest.class.getName() + ".Man( name := name ) \n" +
"end\n" +
"query woman( String name ) \n" +
" " + BackwardChainingTest.class.getName() + ".Woman( name := name ) \n" +
"end\n" +
"query parent( String parent, String child ) \n" +
" " + BackwardChainingTest.class.getName() + ".Parent( parent := parent, child := child ) \n" +
"end\n" +
"query father( String father, String child ) \n" +
" ?man( father; ) \n" +
" ?parent( father, child; ) \n" +
"end\n" +
"query mother( String mother, String child ) \n" +
" ?woman( mother; ) \n" +
" ?parent( mother, child; ) \n" +
"end\n" +
"query son( String son, String parent ) \n" +
" ?man( son; ) \n" +
" ?parent( parent, son; ) \n" +
"end\n" +
"query daughter( String daughter, String parent ) \n" +
" ?woman( daughter; ) \n" +
" ?parent( parent, daughter; ) \n" +
"end\n" +
"query siblings( String c1, String c2 ) \n" +
" ?parent( $p, c1; ) \n" +
" ?parent( $p, c2; ) \n" +
" eval( !c1.equals( c2 ) )\n" +
"end\n" +
"query fullSiblings( String c1, String c2 )\n" +
" ?parent( $p1, c1; ) ?parent( $p1, c2; )\n" +
" ?parent( $p2, c1; ) ?parent( $p2, c2; )\n" +
" eval( !c1.equals( c2 ) && !$p1.equals( $p2 ) )\n" +
"end\n" +
"query fullSiblings2( String c1, String c2 )\n" +
" ?father( $p1, c1; ) ?father( $p1, c2; )\n" +
" ?mother( $p2, c1; ) ?mother( $p2, c2; )\n" +
" eval( !c1.equals( c2 ) )\n" +
"end\n" +
"query uncle( String uncle, String n )\n" +
" ?man( uncle; ) ?siblings( uncle, parent; )\n" +
" ?parent( parent, n; )\n " +
"end\n" +
"query aunt( String aunt, String n )\n" +
" ?woman( aunt; ) ?siblings( aunt, parent; )\n" +
" ?parent( parent, n; )\n " +
"end\n" +
"query grantParents( String gp, String gc )\n" +
" ?parent( gp, p; ) ?parent( p, gc; )\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
try {
final List<String> list = new ArrayList<>();
ksession.setGlobal("list", list);
// grand parents
ksession.insert(new Man("john"));
ksession.insert(new Woman("janet"));
// parent
ksession.insert(new Man("adam"));
ksession.insert(new Parent("john",
"adam"));
ksession.insert(new Parent("janet",
"adam"));
ksession.insert(new Man("stan"));
ksession.insert(new Parent("john",
"stan"));
ksession.insert(new Parent("janet",
"stan"));
// grand parents
ksession.insert(new Man("carl"));
ksession.insert(new Woman("tina"));
//
// parent
ksession.insert(new Woman("eve"));
ksession.insert(new Parent("carl",
"eve"));
ksession.insert(new Parent("tina",
"eve"));
//
// parent
ksession.insert(new Woman("mary"));
ksession.insert(new Parent("carl",
"mary"));
ksession.insert(new Parent("tina",
"mary"));
ksession.insert(new Man("peter"));
ksession.insert(new Parent("adam",
"peter"));
ksession.insert(new Parent("eve",
"peter"));
ksession.insert(new Man("paul"));
ksession.insert(new Parent("adam",
"paul"));
ksession.insert(new Parent("mary",
"paul"));
ksession.insert(new Woman("jill"));
ksession.insert(new Parent("adam",
"jill"));
ksession.insert(new Parent("eve",
"jill"));
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true);
list.clear();
QueryResults results = ksession.getQueryResults("woman", v);
for (final QueryResultsRow result : results) {
list.add((String) result.get("name"));
}
assertEquals(5, list.size());
assertContains(new String[]{"janet", "mary", "tina", "eve", "jill"}, list);
list.clear();
results = ksession.getQueryResults("man", v);
for (final QueryResultsRow result : results) {
list.add((String) result.get("name"));
}
assertEquals(6, list.size());
assertContains(new String[]{"stan", "john", "peter", "carl", "adam", "paul"}, list);
list.clear();
results = ksession.getQueryResults("father", v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("father") + ", " + result.get("child"));
}
assertEquals(7, list.size());
assertContains(new String[]{"john, adam", "john, stan",
"carl, eve", "carl, mary",
"adam, peter", "adam, paul",
"adam, jill"}, list);
list.clear();
results = ksession.getQueryResults("mother", v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("mother") + ", " + result.get("child"));
}
assertEquals(7, list.size());
assertContains(new String[]{"janet, adam", "janet, stan",
"mary, paul", "tina, eve",
"tina, mary", "eve, peter",
"eve, jill"},
list);
list.clear();
results = ksession.getQueryResults("son",
v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("son") + ", " + result.get("parent"));
}
assertEquals(8,
list.size());
assertContains(new String[]{"stan, john", "stan, janet",
"peter, adam", "peter, eve",
"adam, john", "adam, janet",
"paul, mary", "paul, adam"}, list);
list.clear();
results = ksession.getQueryResults("daughter", v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("daughter") + ", " + result.get("parent"));
}
assertEquals(6, list.size());
assertContains(new String[]{"mary, carl", "mary, tina",
"eve, carl", "eve, tina",
"jill, adam", "jill, eve"}, list);
list.clear();
results = ksession.getQueryResults("siblings", v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("c1") + ", " + result.get("c2"));
}
assertEquals(16, list.size());
assertContains(new String[]{"eve, mary", "mary, eve",
"adam, stan", "stan, adam",
"adam, stan", "stan, adam",
"peter, paul", "peter, jill",
"paul, peter", "paul, jill",
"jill, peter", "jill, paul",
"peter, jill", "jill, peter",
"eve, mary", "mary, eve"}, list);
list.clear();
results = ksession.getQueryResults("fullSiblings", v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("c1") + ", " + result.get("c2"));
}
assertEquals(12, list.size());
assertContains(new String[]{"eve, mary", "mary, eve",
"adam, stan", "stan, adam",
"adam, stan", "stan, adam",
"peter, jill", "jill, peter",
"peter, jill", "jill, peter",
"eve, mary", "mary, eve"}, list);
list.clear();
results = ksession.getQueryResults("fullSiblings", v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("c1") + ", " + result.get("c2"));
}
assertEquals(12, list.size());
assertContains(new String[]{"eve, mary", "mary, eve",
"adam, stan", "stan, adam",
"adam, stan", "stan, adam",
"peter, jill", "jill, peter",
"peter, jill", "jill, peter",
"eve, mary", "mary, eve"}, list);
list.clear();
results = ksession.getQueryResults("uncle", v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("uncle") + ", " + result.get("n"));
}
assertEquals(6, list.size());
assertContains(new String[]{"stan, peter",
"stan, paul",
"stan, jill",
"stan, peter",
"stan, paul",
"stan, jill"}, list);
list.clear();
results = ksession.getQueryResults("aunt", v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("aunt") + ", " + result.get("n"));
}
assertEquals(6, list.size());
assertContains(new String[]{"mary, peter",
"mary, jill",
"mary, peter",
"mary, jill",
"eve, paul",
"eve, paul"}, list);
list.clear();
results = ksession.getQueryResults("grantParents", v, v);
for (final QueryResultsRow result : results) {
list.add(result.get("gp") + ", " + result.get("gc"));
}
assertEquals(12, list.size());
assertContains(new String[]{"carl, peter",
"carl, jill",
"carl, paul",
"john, peter",
"john, paul",
"john, jill",
"janet, peter",
"janet, paul",
"janet, jill",
"tina, peter",
"tina, jill",
"tina, paul",}, list);
} finally {
ksession.dispose();
}
}
@Test()
public void testDynamicRulesWithSharing() {
String drl = "" +
"package org.drools.compiler.test1 \n" +
"\n" +
"declare Location\n" +
" thing : String \n" +
" location : String \n" +
"end" +
"\n" +
"declare Edible\n" +
" thing : String\n" +
"end" +
"\n" +
"query whereFood( String x, String y ) \n" +
" Location(x, y;) Edible(x;) \n" +
"end\n" +
"\n" +
"rule init when\n" +
"then\n" +
" \n" +
" insert( new Location(\"apple\", \"kitchen\") );\n" +
" insert( new Location(\"crackers\", \"kitchen\") );\n" +
" insert( new Location(\"broccoli\", \"kitchen\") );\n" +
" insert( new Location(\"computer\", \"office\") );\n" +
" insert( new Edible(\"apple\") );\n" +
" insert( new Edible(\"crackers\") );\n" +
"end\n" +
"";
final KieBase kieBase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration);
final KnowledgeBuilder knowledgeBuilder = TestUtil.createKnowledgeBuilder(kieBase, drl);
drl = "" +
"package org.drools.compiler.test2 \n" +
"import org.drools.compiler.test1.*\n" +
"import java.util.List\n" +
"import java.util.ArrayList\n" +
"import java.util.Map\n" +
"import java.util.HashMap\n" +
"global List list\n" +
"\n" +
"rule look2 when\n" +
" $place : String() // just here to give a OTN lookup point\n" +
" whereFood(thing, $place;)\n" +
"then\n" +
" list.add( \"2:\" + thing );\n" +
"end\n";
knowledgeBuilder.add(ResourceFactory.newByteArrayResource(drl.getBytes()), ResourceType.DRL);
if (knowledgeBuilder.hasErrors()) {
fail(knowledgeBuilder.getErrors().toString());
}
drl = "" +
"package org.drools.compiler.test3 \n" +
"import org.drools.compiler.test1.*\n" +
"import java.util.List\n" +
"import java.util.ArrayList\n" +
"import java.util.Map\n" +
"import java.util.HashMap\n" +
"global List list\n" +
"\n" +
"rule look3 when\n" +
" $place : String() // just here to give a OTN lookup point\n" +
" whereFood(thing, $place;)\n" +
"then\n" +
" list.add( \"3:\" + thing );\n" +
"end\n";
knowledgeBuilder.add(ResourceFactory.newByteArrayResource(drl.getBytes()), ResourceType.DRL);
if (knowledgeBuilder.hasErrors()) {
fail(knowledgeBuilder.getErrors().toString());
}
drl = "" +
"package org.drools.compiler.test4 \n" +
"import org.drools.compiler.test1.*\n" +
"import java.util.List\n" +
"import java.util.ArrayList\n" +
"import java.util.Map\n" +
"import java.util.HashMap\n" +
"global List list\n" +
"\n" +
"rule look4 when\n" +
" $place : String() // just here to give a OTN lookup point\n" +
" whereFood(thing, $place;)\n" +
"then\n" +
" list.add( \"4:\" + thing );\n" +
"end\n";
knowledgeBuilder.add(ResourceFactory.newByteArrayResource(drl.getBytes()), ResourceType.DRL);
if (knowledgeBuilder.hasErrors()) {
fail(knowledgeBuilder.getErrors().toString());
}
final Map<String, KiePackage> pkgs = new HashMap<>();
for (final KiePackage pkg : knowledgeBuilder.getKnowledgePackages()) {
pkgs.put(pkg.getName(), pkg);
}
final InternalKnowledgeBase kbase =
(InternalKnowledgeBase) KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration);
kbase.addPackages(Arrays.asList(pkgs.get("org.drools.compiler.test1"), pkgs.get("org.drools.compiler.test2")));
final KieSession ksession = kbase.newKieSession();
try {
final List<Map<String, Object>> list = new ArrayList<>();
ksession.setGlobal("list", list);
ksession.insert("kitchen");
ksession.fireAllRules();
assertEquals(2, list.size());
assertContains(new String[]{"2:crackers", "2:apple"}, list);
list.clear();
kbase.addPackages(Collections.singletonList(pkgs.get("org.drools.compiler.test3")));
ksession.fireAllRules();
assertEquals(2, list.size());
assertContains(new String[]{"3:crackers", "3:apple"}, list);
list.clear();
kbase.addPackages(Collections.singletonList(pkgs.get("org.drools.compiler.test4")));
ksession.fireAllRules();
assertEquals(2, list.size());
assertContains(new String[]{"4:crackers", "4:apple"}, list);
} finally {
ksession.dispose();
}
}
@Test
public void testOpenBackwardChain() {
// http://www.amzi.com/AdventureInProlog/advtop.php
final String drl = "" +
"package org.drools.compiler.test \n" +
"import java.util.List\n" +
"import java.util.ArrayList\n" +
"import " + Person.class.getCanonicalName() + "\n" +
"global List list\n" +
"dialect \"mvel\"\n" +
"declare Location\n" +
" thing : String \n" +
" location : String \n" +
"end" +
"\n" +
"query isContainedIn( String x, String y ) \n" +
" Location(x, y;)\n" +
" or \n" +
" ( Location(z, y;) and isContainedIn(x, z;) )\n" +
"end\n" +
"\n" +
"rule look when \n" +
" Person( $l : likes ) \n" +
" isContainedIn( $l, 'office'; )\n" +
"then\n" +
" insertLogical( 'blah' );" +
"end\n" +
"rule existsBlah when \n" +
" exists String( this == 'blah') \n" +
"then\n" +
" list.add( 'exists blah' );" +
"end\n" +
"\n" +
"rule notBlah when \n" +
" not String( this == 'blah') \n" +
"then\n" +
" list.add( 'not blah' );" +
"end\n" +
"\n" +
"rule init when\n" +
"then\n" +
" insert( new Location(\"desk\", \"office\") );\n" +
" insert( new Location(\"envelope\", \"desk\") );\n" +
" insert( new Location(\"key\", \"envelope\") );\n" +
"end\n" +
"\n" +
"rule go1 when \n" +
" String( this == 'go1') \n" +
"then\n" +
" list.add( drools.getRule().getName() ); \n" +
" insert( new Location('lamp', 'desk') );\n" +
"end\n" +
"\n" +
"rule go2 when \n" +
" String( this == 'go2') \n" +
" $l : Location('lamp', 'desk'; )\n" +
"then\n" +
" list.add( drools.getRule().getName() ); \n" +
" retract( $l );\n" +
"end\n" +
"\n" +
"rule go3 when \n" +
" String( this == 'go3') \n" +
"then\n" +
" list.add( drools.getRule().getName() ); \n" +
" insert( new Location('lamp', 'desk') );\n" +
"end\n" +
"\n" +
"rule go4 when \n" +
" String( this == 'go4') \n" +
" $l : Location('lamp', 'desk'; )\n" +
"then\n" +
" list.add( drools.getRule().getName() ); \n" +
" modify( $l ) { thing = 'book' };\n" +
"end\n" +
"\n" +
"rule go5 when \n" +
" String( this == 'go5') \n" +
" $l : Location('book', 'desk'; )\n" +
"then\n" +
" list.add( drools.getRule().getName() ); \n" +
" modify( $l ) { thing = 'lamp' };\n" +
"end\n" +
"\n" +
"rule go6 when \n" +
" String( this == 'go6') \n" +
" $l : Location( 'lamp', 'desk'; )\n" +
"then\n" +
" list.add( drools.getRule().getName() ); \n" +
" modify( $l ) { thing = 'book' };\n" +
"end\n" +
"\n" +
"rule go7 when \n" +
" String( this == 'go7') \n" +
" $p : Person( likes == 'lamp' ) \n" +
"then\n" +
" list.add( drools.getRule().getName() ); \n" +
" modify( $p ) { likes = 'key' };\n" +
"end\n" +
"\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final List<String> list = new ArrayList<>();
ksession.setGlobal("list", list);
final Person p = new Person();
p.setLikes("lamp");
ksession.insert(p);
ksession.fireAllRules();
assertEquals("not blah", list.get(0));
list.clear();
InternalFactHandle fh = (InternalFactHandle) ksession.insert("go1");
ksession.fireAllRules();
fh = getFactHandle(fh, ksession);
ksession.delete(fh);
assertEquals("go1", list.get(0));
assertEquals("exists blah", list.get(1));
fh = (InternalFactHandle) ksession.insert("go2");
ksession.fireAllRules();
fh = getFactHandle(fh, ksession);
ksession.delete(fh);
assertEquals("go2", list.get(2));
assertEquals("not blah", list.get(3));
fh = (InternalFactHandle) ksession.insert("go3");
ksession.fireAllRules();
fh = getFactHandle(fh, ksession);
ksession.delete(fh);
assertEquals("go3", list.get(4));
assertEquals("exists blah", list.get(5));
fh = (InternalFactHandle) ksession.insert("go4");
ksession.fireAllRules();
fh = getFactHandle(fh, ksession);
ksession.delete(fh);
assertEquals("go4", list.get(6));
assertEquals("not blah", list.get(7));
fh = (InternalFactHandle) ksession.insert("go5");
ksession.fireAllRules();
fh = getFactHandle(fh, ksession);
ksession.delete(fh);
assertEquals("go5", list.get(8));
assertEquals("exists blah", list.get(9));
// This simulates a modify of the root DroolsQuery object, but first we break it
fh = (InternalFactHandle) ksession.insert("go6");
ksession.fireAllRules();
fh = getFactHandle(fh, ksession);
ksession.delete(fh);
assertEquals("go6", list.get(10));
assertEquals("not blah", list.get(11));
// now fix it
fh = (InternalFactHandle) ksession.insert("go7");
ksession.fireAllRules();
fh = getFactHandle(fh, ksession);
ksession.delete(fh);
assertEquals("go7", list.get(12));
assertEquals("exists blah", list.get(13));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testCompile() {
final String drl = "declare Location\n"
+ "thing : String\n"
+ "location : String\n"
+ "end\n\n"
+ "query isContainedIn( String x, String y )\n"
+ "Location( x := thing, y := location)\n"
+ "or \n"
+ "( Location(z := thing, y := location) and ?isContainedIn( x := x, z := y ) )\n"
+ "end\n";
KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
}
@Test(timeout = 10000)
public void testInsertionOrder() {
final String drl = "" +
"package org.drools.compiler.integrationtests \n" +
"import java.util.List\n" +
"import java.util.ArrayList\n" +
"global List list\n" +
"dialect \"mvel\"\n" +
"declare Person\n" +
" name : String\n" +
" likes : String\n" +
"end\n" +
"\n" +
"declare Location\n" +
" thing : String \n" +
" location : String \n" +
"end\n" +
"\n" +
"declare Edible\n" +
" thing : String\n" +
"end\n" +
"\n" +
"\n" +
"query hasFood( String x, String y ) \n" +
" Location(x, y;) " +
" or \n " +
" ( Location(z, y;) and hasFood(x, z;) )\n" +
"end\n" +
"\n" +
"rule look when \n" +
" Person( $l : likes ) \n" +
" hasFood( $l, 'kitchen'; )\n" +
"then\n" +
" list.add( 'kitchen has ' + $l );" +
"end\n" +
"rule go1 when\n" +
" String( this == 'go1') \n" +
"then\n" +
" insert( new Person('zool', 'peach') );\n" +
" insert( new Location(\"table\", \"kitchen\") );\n" +
" insert( new Location(\"peach\", \"table\") );\n" +
"end\n" +
"rule go2 when\n" +
" String( this == 'go2') \n" +
"then\n" +
" insert( new Person('zool', 'peach') );\n" +
" insert( new Location(\"peach\", \"table\") );\n" +
" insert( new Location(\"table\", \"kitchen\") );\n" +
"end\n" +
"\n" +
"rule go3 when\n" +
" String( this == 'go3') \n" +
"then\n" +
" insert( new Location(\"table\", \"kitchen\") );\n" +
" insert( new Location(\"peach\", \"table\") );\n" +
" insert( new Person('zool', 'peach') );\n" +
"end\n" +
"\n" +
"rule go4 when\n" +
" String( this == 'go4') \n" +
"then\n" +
" insert( new Location(\"peach\", \"table\") );\n" +
" insert( new Location(\"table\", \"kitchen\") );\n" +
" insert( new Person('zool', 'peach') );\n" +
"end\n" +
"rule go5 when\n" +
" String( this == 'go5') \n" +
"then\n" +
" insert( new Location(\"peach\", \"table\") );\n" +
" insert( new Person('zool', 'peach') );\n" +
" insert( new Location(\"table\", \"kitchen\") );\n" +
"end\n" +
"rule go6 when\n" +
" String( this == 'go6') \n" +
"then\n" +
" insert( new Location(\"table\", \"kitchen\") );\n" +
" insert( new Person('zool', 'peach') );\n" +
" insert( new Location(\"peach\", \"table\") );\n" +
"end\n" +
"\n" +
"\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
for (int i = 1; i <= 6; i++) {
final KieSession ksession = kbase.newKieSession();
try {
final List<String> list = new ArrayList<>();
ksession.setGlobal("list", list);
ksession.fireAllRules();
list.clear();
final FactHandle fh = ksession.insert("go" + i);
ksession.fireAllRules();
ksession.delete(fh);
assertEquals(1, list.size());
assertEquals("kitchen has peach", list.get(0));
} finally {
ksession.dispose();
}
}
}
@Test(timeout = 10000)
public void testQueryFindAll() {
final Object[] objects = new Object[]{42, "a String", 100};
final int oCount = objects.length;
final List<Object> queryList = new ArrayList<>();
final List<Object> ruleList = new ArrayList<>();
// expect all inserted objects + InitialFact
runTestQueryFindAll(0, queryList, ruleList, objects);
assertEquals(oCount, queryList.size());
assertContains(objects, queryList);
// expect inserted objects + InitialFact
queryList.clear();
ruleList.clear();
runTestQueryFindAll(1, queryList, ruleList, objects);
assertEquals(oCount * oCount, queryList.size());
queryList.clear();
ruleList.clear();
runTestQueryFindAll(2, queryList, ruleList, objects);
assertEquals(oCount * oCount, queryList.size());
}
private void runTestQueryFindAll(final int iCase,
final List<Object> queryList,
final List<Object> ruleList,
final Object[] objects) {
String drl = "" +
"package org.drools.compiler.test \n" +
"global java.util.List queryList \n" +
"global java.util.List ruleList \n" +
"query object( Object o ) \n" +
" o := Object( ) \n" +
"end \n" +
"rule findObjectByQuery \n" +
"when \n";
switch (iCase) {
case 0:
// omit Object()
drl += " object( $a ; ) \n";
break;
case 1:
drl += " Object() ";
drl += " object( $a ; ) \n";
break;
case 2:
drl += " object( $a ; ) \n";
drl += " Object() ";
break;
}
drl +=
"then \n" +
"// System.out.println( \"Object by query: \" + $a );\n" +
" queryList.add( $a ); \n" +
"end \n" +
"rule findObject \n" +
"salience 10 \n" +
"when \n" +
" $o: Object() \n" +
"then " +
"// System.out.println( \"Object: \" + $o );\n" +
" ruleList.add( $o ); \n" +
"end \n" +
"";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
ksession.setGlobal("queryList", queryList);
ksession.setGlobal("ruleList", ruleList);
for (final Object o : objects) {
ksession.insert(o);
}
ksession.fireAllRules();
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testQueryWithObject() {
final String drl = "" +
"package org.drools.compiler.test \n" +
"import java.util.List\n" +
"import java.util.ArrayList\n" +
"global List list\n" +
"dialect \"mvel\"\n" +
"\n" +
"import " + BackwardChainingTest.class.getName() + ".Q\n" +
"import " + BackwardChainingTest.class.getName() + ".R\n" +
"import " + BackwardChainingTest.class.getName() + ".S\n" +
"query object(Object o)\n" +
" o := Object() \n" +
"end\n" +
"rule collectObjects when\n" +
" String( this == 'go1' )\n" +
" object( o; )\n" +
"then\n" +
" list.add( o );\n" +
"end\n" +
"rule init when\n" +
" String( this == 'init' )\n" +
"then\n" +
" insert( new Q(1) );\n " +
" insert( new Q(5) );\n " +
" insert( new Q(6) );\n " +
" insert( new R(1) );\n " +
" insert( new R(4) );\n " +
" insert( new R(6) );\n " +
" insert( new R(2) );\n " +
" insert( new S(2) );\n " +
" insert( new S(3) );\n " +
" insert( new S(6) );\n " +
"end\n" +
"";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
List<Integer> list = new ArrayList<>();
try {
ksession.setGlobal("list", list);
ksession.insert("init");
ksession.fireAllRules();
ksession.insert("go1");
ksession.fireAllRules();
assertEquals(12, list.size());
assertContains(new Object[]{
"go1", "init", new Q(6), new R(6), new S(3),
new R(2), new R(1), new R(4), new S(2),
new S(6), new Q(1), new Q(5)},
list);
} finally {
ksession.dispose();
}
// now reverse the go1 and init order
ksession = kbase.newKieSession();
try {
list = new ArrayList<>();
ksession.setGlobal("list", list);
ksession.insert("go1");
ksession.fireAllRules();
ksession.insert("init");
ksession.fireAllRules();
assertEquals(12, list.size());
assertContains(new Object[]{
"go1", "init", new Q(6), new R(6), new S(3),
new R(2), new R(1), new R(4), new S(2),
new S(6), new Q(1), new Q(5)},
list);
} finally {
ksession.dispose();
}
}
public static void assertContains(final Object[] objects,
final List list) {
for (final Object object : objects) {
if (!list.contains(object)) {
fail("does not contain:" + object);
}
}
}
public static void assertContains(final List objects,
final List list) {
if (!list.contains(objects)) {
fail("does not contain:" + objects);
}
}
public static InternalFactHandle getFactHandle(final FactHandle factHandle,
final KieSession ksession) {
final Map<Long, FactHandle> handles = new HashMap<>();
ksession.getFactHandles().forEach(fh -> handles.put(((InternalFactHandle) fh).getId(), fh));
return (InternalFactHandle) handles.get(((InternalFactHandle) factHandle).getId());
}
public static class Man
implements
Serializable {
private String name;
public Man(final String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
@Override
public String toString() {
return "Man [name=" + name + "]";
}
}
public static class Woman
implements
Serializable {
private String name;
public Woman(final String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
@Override
public String toString() {
return "Woman [name=" + name + "]";
}
}
public static class Parent
implements
Serializable {
private String parent;
private String child;
public Parent(final String parent,
final String child) {
this.parent = parent;
this.child = child;
}
public String getParent() {
return parent;
}
public void setParent(final String parent) {
this.parent = parent;
}
public String getChild() {
return child;
}
public void setChild(final String child) {
this.child = child;
}
@Override
public String toString() {
return "Parent [parent=" + parent + ", child=" + child + "]";
}
}
public static class Q {
int value;
public Q(final int value) {
this.value = value;
}
public int getValue() {
return value;
}
public void setValue(final int value) {
this.value = value;
}
public String toString() {
return "Q" + value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + value;
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Q other = (Q) obj;
return value == other.value;
}
}
public static class R {
int value;
public R(final int value) {
this.value = value;
}
public int getValue() {
return value;
}
public void setValue(final int value) {
this.value = value;
}
public String toString() {
return "R" + value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + value;
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final R other = (R) obj;
return value == other.value;
}
}
public static class S {
int value;
public S(final int value) {
this.value = value;
}
public int getValue() {
return value;
}
public void setValue(final int value) {
this.value = value;
}
public String toString() {
return "S" + value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + value;
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final S other = (S) obj;
return value == other.value;
}
}
@Test(timeout = 10000)
public void testQueryWithClassLiterals() {
final String drl = "" +
"package org.drools.test \n" +
"import java.util.List\n" +
"import java.util.ArrayList\n" +
"global List list\n" +
"declare Foo end \n" +
"query klass( Class $c )\n" +
" Object( this.getClass() == $c ) \n" +
"end\n" +
"rule R when\n" +
" o : String( this == 'go1' )\n" +
" klass( String.class ; )\n" +
"then\n" +
" list.add( o );\n" +
" insert( new Foo() ); \n" +
"end\n" +
"rule S when\n" +
" o : Foo()\n" +
" klass( Foo.class ; )\n" +
"then\n" +
" list.add( o );\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final List list = new ArrayList();
ksession.setGlobal("list", list);
ksession.insert("go1");
ksession.fireAllRules();
assertEquals(2, list.size());
assertEquals("go1", list.get(0));
assertEquals("org.drools.test.Foo", list.get(1).getClass().getName());
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testQueryIndexingWithUnification() {
final String drl = "" +
"package org.drools.test \n" +
"import java.util.List\n" +
"import java.util.ArrayList\n" +
"global List list\n" +
"declare Foo id : int end \n" +
"declare Bar " +
" name : String " +
" val : int " +
"end \n" +
"query fooffa( String $name, Foo $f )\n" +
" Bar( name == $name, $id : val )\n" +
" $f := Foo( id == $id ) \n" +
"end\n" +
"rule R when\n" +
" o : String( this == 'go' )\n" +
" fooffa( \"x\", $f ; )\n" +
"then\n" +
" list.add( $f );\n" +
"end\n" +
"rule S when\n" +
"then\n" +
" insert( new Foo( 1 ) );\n" +
" insert( new Bar( \"x\", 1 ) );\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final List<Integer> list = new ArrayList<>();
ksession.setGlobal("list",
list);
ksession.fireAllRules();
ksession.insert("go");
ksession.fireAllRules();
assertEquals(1, list.size());
} finally {
ksession.dispose();
}
}
@Test
public void testQueryWithEvents() {
final String drl = "global java.util.List list; " +
"" +
"declare Inner\n" +
" @role(event)\n" +
"end\n" +
"rule \"Input\"\n" +
"when\n" +
"then\n" +
" insert( \"X\" );\n" +
" insert( new Inner( ) );\n" +
"end\n" +
"\n" +
"query myAgg( )\n" +
" Inner( )\n" +
"end\n" +
"\n" +
"rule \"React\"\n" +
"when\n" +
" String()\n" +
" myAgg( )\n" +
"then\n" +
" list.add( 42 );\n" +
"end";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final ArrayList list = new ArrayList();
ksession.setGlobal("list", list);
ksession.fireAllRules();
assertEquals(Collections.singletonList(42), list);
} finally {
ksession.dispose();
}
}
@Test
public void testNpeOnQuery() {
final String drl =
"global java.util.List list; " +
"query foo( Integer $i ) " +
" $i := Integer( this < 10 ) " +
"end\n" +
"\n" +
"rule r1 when " +
" foo( $i ; ) " +
" Integer( this == 10 ) " +
"then " +
" System.out.println(\"10 \" + $i);" +
" list.add( 10 );\n" +
"end\n" +
"\n" +
"rule r2 when " +
" foo( $i; ) " +
" Integer( this == 20 ) " +
"then " +
" System.out.println(\"20 \" + $i);" +
" list.add( 20 );\n" +
"end\n" +
"rule r3 when " +
" $i : Integer( this == 1 ) " +
"then " +
" System.out.println($i);" +
" update( kcontext.getKieRuntime().getFactHandle( $i ), $i + 1 );" +
"end\n" +
"\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("backward-chaining-test", kieBaseTestConfiguration, drl);
final KieSession kieSession = kbase.newKieSession();
try {
final List<Integer> list = new ArrayList<>();
kieSession.setGlobal("list", list);
kieSession.insert(1);
kieSession.insert(20);
kieSession.fireAllRules();
assertEquals(1, list.size());
assertEquals(20, (int) list.get(0));
} finally {
kieSession.dispose();
}
}
}
|
apache-2.0
|
dslomov/bazel
|
src/main/java/com/google/devtools/build/lib/windows/jni/WindowsProcesses.java
|
7430
|
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.windows.jni;
/** Process management on Windows. */
public class WindowsProcesses {
public static final long INVALID = -1;
private WindowsProcesses() {
// Prevent construction
}
/**
* Creates a process with the specified Windows command line.
*
* <p>Appropriately quoting arguments is the responsibility of the caller.
*
* @param argv0 the binary to run; must be unquoted; must be either an absolute, normalized
* Windows path with a drive letter (e.g. "c:\foo\bar app.exe") or a single file name (e.g.
* "foo app.exe")
* @param argvRest the rest of the command line, i.e. argv[1:] (needs to be quoted Windows style)
* @param env the environment of the new process. null means inherit that of the Bazel server
* @param cwd the working directory of the new process. if null, the same as that of the current
* process
* @param stdoutFile the file the stdout should be redirected to. if null, nativeReadStdout will
* work.
* @param stderrFile the file the stdout should be redirected to. if null, nativeReadStderr will
* work.
* @param redirectErrorStream whether we merge the process's standard error and standard output.
* @return the opaque identifier of the created process
*/
public static long createProcess(
String argv0,
String argvRest,
byte[] env,
String cwd,
String stdoutFile,
String stderrFile,
boolean redirectErrorStream) {
WindowsJniLoader.loadJni();
return nativeCreateProcess(
argv0, argvRest, env, cwd, stdoutFile, stderrFile, redirectErrorStream);
}
public static long createProcess(
String argv0, String argvRest, byte[] env, String cwd, String stdoutFile, String stderrFile) {
WindowsJniLoader.loadJni();
return nativeCreateProcess(argv0, argvRest, env, cwd, stdoutFile, stderrFile, false);
}
private static native long nativeCreateProcess(
String argv0,
String argvRest,
byte[] env,
String cwd,
String stdoutFile,
String stderrFile,
boolean redirectErrorStream);
/**
* Writes data from the given array to the stdin of the specified process.
*
* <p>Blocks until either some data was written or the process is terminated.
*
* @return the number of bytes written
*/
public static int writeStdin(long process, byte[] bytes, int offset, int length) {
WindowsJniLoader.loadJni();
return nativeWriteStdin(process, bytes, offset, length);
}
private static native int nativeWriteStdin(long process, byte[] bytes, int offset, int length);
/** Returns an opaque identifier of stdout stream for the process. */
public static long getStdout(long process) {
WindowsJniLoader.loadJni();
return nativeGetStdout(process);
}
private static native long nativeGetStdout(long process);
/** Returns an opaque identifier of stderr stream for the process. */
public static long getStderr(long process) {
WindowsJniLoader.loadJni();
return nativeGetStderr(process);
}
private static native long nativeGetStderr(long process);
/**
* Reads data from the stream into the given array. {@code stream} should come from {@link
* #nativeGetStdout(long)} or {@link #nativeGetStderr(long)}.
*
* <p>Blocks until either some data was read or the process is terminated.
*
* @return the number of bytes read, 0 on EOF, or -1 if there was an error.
*/
public static int readStream(long stream, byte[] bytes, int offset, int length) {
WindowsJniLoader.loadJni();
return nativeReadStream(stream, bytes, offset, length);
}
private static native int nativeReadStream(long stream, byte[] bytes, int offset, int length);
/**
* Waits until the given process terminates. If timeout is non-negative, it indicates the number
* of milliseconds before the call times out.
*
* <p>Return values:
* <li>0: Process finished
* <li>1: Timeout
* <li>2: Something went wrong
*/
public static int waitFor(long process, long timeout) {
WindowsJniLoader.loadJni();
return nativeWaitFor(process, timeout);
}
private static native int nativeWaitFor(long process, long timeout);
/**
* Returns the exit code of the process. Throws {@code IllegalStateException} if something goes
* wrong.
*/
public static int getExitCode(long process) {
WindowsJniLoader.loadJni();
return nativeGetExitCode(process);
}
private static native int nativeGetExitCode(long process);
/** Returns the process ID of the given process or -1 if there was an error. */
public static int getProcessPid(long process) {
WindowsJniLoader.loadJni();
return nativeGetProcessPid(process);
}
private static native int nativeGetProcessPid(long process);
/** Terminates the given process. Returns true if the termination was successful. */
public static boolean terminate(long process) {
WindowsJniLoader.loadJni();
return nativeTerminate(process);
}
private static native boolean nativeTerminate(long process);
/**
* Releases the native data structures associated with the process.
*
* <p>Calling any other method on the same process after this call will result in the JVM crashing
* or worse.
*/
public static void deleteProcess(long process) {
WindowsJniLoader.loadJni();
nativeDeleteProcess(process);
}
private static native void nativeDeleteProcess(long process);
/**
* Closes the stream
*
* @param stream should come from {@link #nativeGetStdout(long)} or {@link
* #nativeGetStderr(long)}.
*/
public static void closeStream(long stream) {
WindowsJniLoader.loadJni();
nativeCloseStream(stream);
}
private static native void nativeCloseStream(long stream);
/**
* Returns a string representation of the last error caused by any call on the given process or
* the empty string if the last operation was successful.
*
* <p>Does <b>NOT</b> terminate the process if it is still running.
*
* <p>After this call returns, subsequent calls will return the empty string if there was no
* failed operation in between.
*/
public static String processGetLastError(long process) {
WindowsJniLoader.loadJni();
return nativeProcessGetLastError(process);
}
private static native String nativeProcessGetLastError(long process);
public static String streamGetLastError(long process) {
WindowsJniLoader.loadJni();
return nativeStreamGetLastError(process);
}
private static native String nativeStreamGetLastError(long process);
/** returns the PID of the current process. */
public static int getpid() {
WindowsJniLoader.loadJni();
return nativeGetpid();
}
private static native int nativeGetpid();
}
|
apache-2.0
|
krzysztof-magosa/encog-java-core
|
src/main/java/org/encog/ml/data/versatile/normalizers/strategies/NormalizationStrategy.java
|
2834
|
/*
* Encog(tm) Core v3.3 - Java Version
* http://www.heatonresearch.com/encog/
* https://github.com/encog/encog-java-core
* Copyright 2008-2014 Heaton Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package org.encog.ml.data.versatile.normalizers.strategies;
import java.io.Serializable;
import org.encog.ml.data.MLData;
import org.encog.ml.data.versatile.columns.ColumnDefinition;
/**
* Defines the interface to a normalization strategy.
*/
public interface NormalizationStrategy extends Serializable {
/**
* Calculate how many elements a column will normalize into.
* @param colDef The column definition.
* @param isInput True, if this is an input column.
* @return The number of elements needed to normalize this column.
*/
int normalizedSize(ColumnDefinition colDef, boolean isInput);
/**
* Normalize a column, with a string input.
* @param colDef The column definition.
* @param isInput True, if this is an input column.
* @param value The value to normalize.
* @param outpuData The output data.
* @param outputColumn The element to begin outputing to.
* @return The new output element, advanced by the correct amount.
*/
int normalizeColumn(ColumnDefinition colDef, boolean isInput, String value,
double[] outpuData, int outputColumn);
/**
* Normalize a column, with a double input.
* @param colDef The column definition.
* @param isInput True, if this is an input column.
* @param output The output data.
* @param idx The element to begin outputing to.
* @return The new output element, advanced by the correct amount.
*/
String denormalizeColumn(ColumnDefinition colDef, boolean isInput, MLData output,
int idx);
/**
* Normalize a column, with a double value.
* @param colDef The column definition.
* @param isInput True, if this is an input column.
* @param value The value to normalize.
* @param outpuData The output data.
* @param outputColumn The element to begin outputing to.
* @return The new output element, advanced by the correct amount.
*/
int normalizeColumn(ColumnDefinition colDef, boolean isInput, double value,
double[] outpuData, int outputColumn);
}
|
apache-2.0
|
electrum/presto
|
core/trino-main/src/test/java/io/trino/type/TestIntervalDayTimeType.java
|
1846
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.type;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import static io.trino.type.IntervalDayTimeType.INTERVAL_DAY_TIME;
public class TestIntervalDayTimeType
extends AbstractTestType
{
public TestIntervalDayTimeType()
{
super(INTERVAL_DAY_TIME, SqlIntervalDayTime.class, createTestBlock());
}
public static Block createTestBlock()
{
BlockBuilder blockBuilder = INTERVAL_DAY_TIME.createBlockBuilder(null, 15);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 1111);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 1111);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 1111);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 2222);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 2222);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 2222);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 2222);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 2222);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 3333);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 3333);
INTERVAL_DAY_TIME.writeLong(blockBuilder, 4444);
return blockBuilder.build();
}
@Override
protected Object getGreaterValue(Object value)
{
return ((Long) value) + 1;
}
}
|
apache-2.0
|
dldinternet/resty-gwt
|
restygwt/src/test/java/org/fusesource/restygwt/server/event/EchoServlet.java
|
4528
|
/**
* Copyright (C) 2009-2012 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.restygwt.server.event;
import java.io.IOException;
import java.util.Enumeration;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* servlet to reflect the incoming request
*
* @author <a href="mailto:andi.balke@gmail.com">andi</<a>
*/
public class EchoServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Logger log = Logger.getLogger(EchoServlet.class.getName());
private final String CONTENT_TYPE = "application/json";
private final String RESPONSE_BODY_HEADER_NAME = "X-Echo-Body";
private final String RESPONSE_CODE_HEADER_NAME = "X-Echo-Code";
private final String RESPONSETIME_IN_SEC_HEADER_NAME = "X-Response-Time";
@Override
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws IOException {
if (log.isLoggable(Level.FINE)) {
log.fine("path: " + request.getPathTranslated());
@SuppressWarnings("unchecked")
Enumeration<String> headerNames = request.getHeaderNames();
StringBuilder sb = new StringBuilder();
sb.append("\n");
sb.append("URI : ").append(request.getRequestURI()).append("\n");
sb.append("Method : ").append(request.getMethod()).append("\n");
sb.append("Headers:\n");
sb.append("========\n");
while(headerNames.hasMoreElements()) {
final String s = headerNames.nextElement();
sb.append(" ").append(s).append(": ").append(request.getHeader(s)).append("\n");
}
sb.append("========\n");
sb.append("Body :\n");
sb.append("========\n");
String line = null;
do {
line = request.getReader().readLine();
if (null != line)
sb.append(line).append("\n");
} while(null != line);
sb.append("========\n");
log.fine(sb.toString());
}
response.setContentType(CONTENT_TYPE);
int statusCode = HttpServletResponse.SC_OK;
if (null != request.getHeader(RESPONSE_CODE_HEADER_NAME)) {
statusCode = Integer.parseInt(request.getHeader(RESPONSE_CODE_HEADER_NAME));
}
response.setStatus(statusCode);
String out = "";
if (null != request.getHeader(RESPONSE_BODY_HEADER_NAME)) {
out = request.getHeader(RESPONSE_BODY_HEADER_NAME);
response.getWriter().print(out);
}
// wait if necessary
if (null != request.getHeader(RESPONSETIME_IN_SEC_HEADER_NAME)) {
try {
final int waitingTime =
Integer.parseInt(request.getHeader(RESPONSETIME_IN_SEC_HEADER_NAME)) * 1000;
log.fine("need to wait for: " + waitingTime + " milliseconds");
Thread.sleep(waitingTime);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
log.fine("respond: (" + statusCode + ") `" + out + "´");
}
@Override
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws IOException {
doPost(request, response);
}
@Override
protected void doDelete(HttpServletRequest request,
HttpServletResponse response) throws IOException {
doPost(request, response);
}
@Override
protected void doPut(HttpServletRequest request,
HttpServletResponse response) throws IOException {
doPost(request, response);
}
}
|
apache-2.0
|
ameybarve15/incubator-geode
|
gemfire-core/src/main/java/com/gemstone/gemfire/internal/tools/gfsh/app/command/task/data/PartitionAttributeInfo.java
|
2542
|
package com.gemstone.gemfire.internal.tools.gfsh.app.command.task.data;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import com.gemstone.gemfire.DataSerializable;
import com.gemstone.gemfire.cache.PartitionAttributes;
/**
* A data class that contains partition region attribute information.
* @author dpark
*
*/
public class PartitionAttributeInfo implements DataSerializable
{
private static final long serialVersionUID = 1L;
private long versionId = serialVersionUID;
private String regionPath;
private int redundantCopies;
private int totalNumBuckets;
private List partitionList = new ArrayList();
public PartitionAttributeInfo() {}
public PartitionAttributeInfo(PartitionAttributes attr)
{
}
public void addPartition(Partition partition)
{
partitionList.add(partition);
}
public List getPartitionList()
{
return partitionList;
}
public String getRegionPath()
{
return regionPath;
}
public int getRedundantCopies()
{
return redundantCopies;
}
public int getTotalNumBuckets()
{
return totalNumBuckets;
}
public void fromData(DataInput in) throws IOException, ClassNotFoundException
{
versionId = in.readLong();
regionPath = in.readUTF();
redundantCopies = in.readInt();
totalNumBuckets = in.readInt();
partitionList = new ArrayList();
int size = in.readInt();
for (int i = 0; i < size; i++) {
Partition part = new Partition();
part.memberName = in.readUTF();
part.localMaxMemory = in.readInt();
part.toalMaxMemory = in.readLong();
partitionList.add(part);
}
}
public void toData(DataOutput out) throws IOException
{
out.writeLong(versionId);
out.writeUTF(regionPath);
out.writeInt(redundantCopies);
out.writeInt(totalNumBuckets);
int size = partitionList.size();
out.writeInt(size);
for (int i = 0; i < size; i++) {
Partition part = (Partition)partitionList.get(i);
out.writeUTF(part.memberName);
out.writeInt(part.localMaxMemory);
out.writeLong(part.toalMaxMemory);
}
}
public static class Partition
{
public Partition() {}
private String memberName;
private int localMaxMemory ;
private long toalMaxMemory;
public String getMemberName()
{
return memberName;
}
public int getLocalMaxMemory()
{
return localMaxMemory;
}
public long getToalMaxMemory()
{
return toalMaxMemory;
}
}
}
|
apache-2.0
|
jprante/elasticsearch
|
core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java
|
14087
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
import java.util.Objects;
/**
* A Query that does fuzzy matching for a specific value.
*/
public class FuzzyQueryBuilder extends AbstractQueryBuilder<FuzzyQueryBuilder> implements MultiTermQueryBuilder {
public static final String NAME = "fuzzy";
/** Default maximum edit distance. Defaults to AUTO. */
public static final Fuzziness DEFAULT_FUZZINESS = Fuzziness.AUTO;
/** Default number of initial characters which will not be “fuzzified”. Defaults to 0. */
public static final int DEFAULT_PREFIX_LENGTH = FuzzyQuery.defaultPrefixLength;
/** Default maximum number of terms that the fuzzy query will expand to. Defaults to 50. */
public static final int DEFAULT_MAX_EXPANSIONS = FuzzyQuery.defaultMaxExpansions;
/** Default as to whether transpositions should be treated as a primitive edit operation,
* instead of classic Levenshtein algorithm. Defaults to false. */
public static final boolean DEFAULT_TRANSPOSITIONS = false;
private static final ParseField TERM_FIELD = new ParseField("term");
private static final ParseField VALUE_FIELD = new ParseField("value");
private static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length");
private static final ParseField MAX_EXPANSIONS_FIELD = new ParseField("max_expansions");
private static final ParseField TRANSPOSITIONS_FIELD = new ParseField("transpositions");
private static final ParseField REWRITE_FIELD = new ParseField("rewrite");
private final String fieldName;
private final Object value;
private Fuzziness fuzziness = DEFAULT_FUZZINESS;
private int prefixLength = DEFAULT_PREFIX_LENGTH;
private int maxExpansions = DEFAULT_MAX_EXPANSIONS;
//LUCENE 4 UPGRADE we need a testcase for this + documentation
private boolean transpositions = DEFAULT_TRANSPOSITIONS;
private String rewrite;
/**
* Constructs a new fuzzy query.
*
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String fieldName, String value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String fieldName, int value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String fieldName, long value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String fieldName, float value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String fieldName, double value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String fieldName, boolean value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param fieldName The name of the field
* @param value The value of the term
*/
public FuzzyQueryBuilder(String fieldName, Object value) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("field name cannot be null or empty");
}
if (value == null) {
throw new IllegalArgumentException("query value cannot be null");
}
this.fieldName = fieldName;
this.value = convertToBytesRefIfString(value);
}
/**
* Read from a stream.
*/
public FuzzyQueryBuilder(StreamInput in) throws IOException {
super(in);
fieldName = in.readString();
value = in.readGenericValue();
fuzziness = new Fuzziness(in);
prefixLength = in.readVInt();
maxExpansions = in.readVInt();
transpositions = in.readBoolean();
rewrite = in.readOptionalString();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(this.fieldName);
out.writeGenericValue(this.value);
this.fuzziness.writeTo(out);
out.writeVInt(this.prefixLength);
out.writeVInt(this.maxExpansions);
out.writeBoolean(this.transpositions);
out.writeOptionalString(this.rewrite);
}
public String fieldName() {
return this.fieldName;
}
public Object value() {
return convertToStringIfBytesRef(this.value);
}
public FuzzyQueryBuilder fuzziness(Fuzziness fuzziness) {
this.fuzziness = (fuzziness == null) ? DEFAULT_FUZZINESS : fuzziness;
return this;
}
public Fuzziness fuzziness() {
return this.fuzziness;
}
public FuzzyQueryBuilder prefixLength(int prefixLength) {
this.prefixLength = prefixLength;
return this;
}
public int prefixLength() {
return this.prefixLength;
}
public FuzzyQueryBuilder maxExpansions(int maxExpansions) {
this.maxExpansions = maxExpansions;
return this;
}
public int maxExpansions() {
return this.maxExpansions;
}
public FuzzyQueryBuilder transpositions(boolean transpositions) {
this.transpositions = transpositions;
return this;
}
public boolean transpositions() {
return this.transpositions;
}
public FuzzyQueryBuilder rewrite(String rewrite) {
this.rewrite = rewrite;
return this;
}
public String rewrite() {
return this.rewrite;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.startObject(fieldName);
builder.field(VALUE_FIELD.getPreferredName(), convertToStringIfBytesRef(this.value));
fuzziness.toXContent(builder, params);
builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength);
builder.field(MAX_EXPANSIONS_FIELD.getPreferredName(), maxExpansions);
builder.field(TRANSPOSITIONS_FIELD.getPreferredName(), transpositions);
if (rewrite != null) {
builder.field(REWRITE_FIELD.getPreferredName(), rewrite);
}
printBoostAndQueryName(builder);
builder.endObject();
builder.endObject();
}
public static FuzzyQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
String fieldName = null;
Object value = null;
Fuzziness fuzziness = FuzzyQueryBuilder.DEFAULT_FUZZINESS;
int prefixLength = FuzzyQueryBuilder.DEFAULT_PREFIX_LENGTH;
int maxExpansions = FuzzyQueryBuilder.DEFAULT_MAX_EXPANSIONS;
boolean transpositions = FuzzyQueryBuilder.DEFAULT_TRANSPOSITIONS;
String rewrite = null;
String queryName = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_OBJECT) {
throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, currentFieldName);
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (TERM_FIELD.match(currentFieldName)) {
value = parser.objectBytes();
} else if (VALUE_FIELD.match(currentFieldName)) {
value = parser.objectBytes();
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName)) {
boost = parser.floatValue();
} else if (Fuzziness.FIELD.match(currentFieldName)) {
fuzziness = Fuzziness.parse(parser);
} else if (PREFIX_LENGTH_FIELD.match(currentFieldName)) {
prefixLength = parser.intValue();
} else if (MAX_EXPANSIONS_FIELD.match(currentFieldName)) {
maxExpansions = parser.intValue();
} else if (TRANSPOSITIONS_FIELD.match(currentFieldName)) {
transpositions = parser.booleanValue();
} else if (REWRITE_FIELD.match(currentFieldName)) {
rewrite = parser.textOrNull();
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),
"[fuzzy] query does not support [" + currentFieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"[" + NAME + "] unexpected token [" + token + "] after [" + currentFieldName + "]");
}
}
} else {
throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, parser.currentName());
fieldName = parser.currentName();
value = parser.objectBytes();
}
}
return new FuzzyQueryBuilder(fieldName, value)
.fuzziness(fuzziness)
.prefixLength(prefixLength)
.maxExpansions(maxExpansions)
.transpositions(transpositions)
.rewrite(rewrite)
.boost(boost)
.queryName(queryName);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
Query query = null;
String rewrite = this.rewrite;
if (rewrite == null && context.isFilter()) {
rewrite = QueryParsers.CONSTANT_SCORE.getPreferredName();
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
}
if (query == null) {
int maxEdits = fuzziness.asDistance(BytesRefs.toString(value));
query = new FuzzyQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), maxEdits, prefixLength, maxExpansions, transpositions);
}
if (query instanceof MultiTermQuery) {
MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(rewrite, null);
QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod);
}
return query;
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, value, fuzziness, prefixLength, maxExpansions, transpositions, rewrite);
}
@Override
protected boolean doEquals(FuzzyQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(value, other.value) &&
Objects.equals(fuzziness, other.fuzziness) &&
Objects.equals(prefixLength, other.prefixLength) &&
Objects.equals(maxExpansions, other.maxExpansions) &&
Objects.equals(transpositions, other.transpositions) &&
Objects.equals(rewrite, other.rewrite);
}
}
|
apache-2.0
|
Kreolwolf1/Elastic
|
src/main/java/org/elasticsearch/common/io/stream/BytesStreamOutput.java
|
3508
|
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.io.stream;
import org.elasticsearch.common.Bytes;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.BytesStream;
import java.io.IOException;
import java.util.Arrays;
/**
*
*/
public class BytesStreamOutput extends StreamOutput implements BytesStream {
/**
* The buffer where data is stored.
*/
protected byte buf[];
/**
* The number of valid bytes in the buffer.
*/
protected int count;
public BytesStreamOutput() {
this(1024);
}
public BytesStreamOutput(int size) {
this.buf = new byte[size];
}
@Override
public boolean seekPositionSupported() {
return true;
}
@Override
public long position() throws IOException {
return count;
}
@Override
public void seek(long position) throws IOException {
if (position > Integer.MAX_VALUE) {
throw new UnsupportedOperationException();
}
count = (int) position;
}
@Override
public void writeByte(byte b) throws IOException {
int newcount = count + 1;
if (newcount > buf.length) {
buf = Arrays.copyOf(buf, Bytes.oversize(newcount, 1));
}
buf[count] = b;
count = newcount;
}
public void skip(int length) {
int newcount = count + length;
if (newcount > buf.length) {
buf = Arrays.copyOf(buf, Bytes.oversize(newcount, 1));
}
count = newcount;
}
@Override
public void writeBytes(byte[] b, int offset, int length) throws IOException {
if (length == 0) {
return;
}
int newcount = count + length;
if (newcount > buf.length) {
buf = Arrays.copyOf(buf, Bytes.oversize(newcount, 1));
}
System.arraycopy(b, offset, buf, count, length);
count = newcount;
}
public void seek(int seekTo) {
count = seekTo;
}
public void reset() {
count = 0;
}
@Override
public void flush() throws IOException {
// nothing to do there
}
@Override
public void close() throws IOException {
// nothing to do here
}
@Override
public BytesReference bytes() {
return new BytesArray(buf, 0, count);
}
/**
* Returns the current size of the buffer.
*
* @return the value of the <code>count</code> field, which is the number
* of valid bytes in this output stream.
* @see java.io.ByteArrayOutputStream#count
*/
public int size() {
return count;
}
}
|
apache-2.0
|
vdr007/ThriftyPaxos
|
src/applications/h2/src/test/org/h2/test/unit/TestPattern.java
|
3885
|
/*
* Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.test.unit;
import java.text.Collator;
import org.h2.expression.CompareLike;
import org.h2.test.TestBase;
import org.h2.value.CompareMode;
/**
* Tests LIKE pattern matching.
*/
public class TestPattern extends TestBase {
/**
* Run just this test.
*
* @param a ignored
*/
public static void main(String... a) throws Exception {
TestBase.createCaller().init().test();
}
@Override
public void test() {
testCompareModeReuse();
testPattern();
}
private void testCompareModeReuse() {
CompareMode mode1, mode2;
mode1 = CompareMode.getInstance(null, 0);
mode2 = CompareMode.getInstance(null, 0);
assertTrue(mode1 == mode2);
mode1 = CompareMode.getInstance("DE", Collator.SECONDARY);
assertFalse(mode1 == mode2);
mode2 = CompareMode.getInstance("DE", Collator.SECONDARY);
assertTrue(mode1 == mode2);
}
private void testPattern() {
CompareMode mode = CompareMode.getInstance(null, 0);
CompareLike comp = new CompareLike(mode, "\\", null, null, null, false);
test(comp, "B", "%_");
test(comp, "A", "A%");
test(comp, "A", "A%%");
test(comp, "A_A", "%\\_%");
for (int i = 0; i < 10000; i++) {
String pattern = getRandomPattern();
String value = getRandomValue();
test(comp, value, pattern);
}
}
private void test(CompareLike comp, String value, String pattern) {
String regexp = initPatternRegexp(pattern, '\\');
boolean resultRegexp = value.matches(regexp);
boolean result = comp.test(pattern, value, '\\');
if (result != resultRegexp) {
fail("Error: >" + value + "< LIKE >" + pattern + "< result=" +
result + " resultReg=" + resultRegexp);
}
}
private static String getRandomValue() {
StringBuilder buff = new StringBuilder();
int len = (int) (Math.random() * 10);
String s = "AB_%\\";
for (int i = 0; i < len; i++) {
buff.append(s.charAt((int) (Math.random() * s.length())));
}
return buff.toString();
}
private static String getRandomPattern() {
StringBuilder buff = new StringBuilder();
int len = (int) (Math.random() * 4);
String s = "A%_\\";
for (int i = 0; i < len; i++) {
char c = s.charAt((int) (Math.random() * s.length()));
if ((c == '_' || c == '%') && Math.random() > 0.5) {
buff.append('\\');
} else if (c == '\\') {
buff.append(c);
}
buff.append(c);
}
return buff.toString();
}
private String initPatternRegexp(String pattern, char escape) {
int len = pattern.length();
StringBuilder buff = new StringBuilder();
for (int i = 0; i < len; i++) {
char c = pattern.charAt(i);
if (escape == c) {
if (i >= len) {
fail("escape can't be last char");
}
c = pattern.charAt(++i);
buff.append('\\');
buff.append(c);
} else if (c == '%') {
buff.append(".*");
} else if (c == '_') {
buff.append('.');
} else if (c == '\\') {
buff.append("\\\\");
} else {
buff.append(c);
}
// TODO regexp: there are other chars that need escaping
}
String regexp = buff.toString();
// System.out.println("regexp = " + regexp);
return regexp;
}
}
|
apache-2.0
|
stoksey69/googleads-java-lib
|
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201502/AudienceSegmentAudienceSegmentType.java
|
1665
|
package com.google.api.ads.dfp.jaxws.v201502;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for AudienceSegment.AudienceSegmentType.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="AudienceSegment.AudienceSegmentType">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="FIRST_PARTY"/>
* <enumeration value="SHARED"/>
* <enumeration value="THIRD_PARTY"/>
* <enumeration value="UNKNOWN"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "AudienceSegment.AudienceSegmentType")
@XmlEnum
public enum AudienceSegmentAudienceSegmentType {
/**
*
* First party segments created and owned by the publisher.
*
*
*/
FIRST_PARTY,
/**
*
* First party segments shared by other clients.
*
*
*/
SHARED,
/**
*
* Third party segments licensed by the publisher from data providers. This doesn't include
* Google-provided licensed segments.
*
*
*/
THIRD_PARTY,
/**
*
* The value returned if the actual value is not exposed by the requested API version.
*
*
*/
UNKNOWN;
public String value() {
return name();
}
public static AudienceSegmentAudienceSegmentType fromValue(String v) {
return valueOf(v);
}
}
|
apache-2.0
|
forplay/forplay
|
core/gwtbox2d/org/jbox2d/pooling/PoolingStackAABB.java
|
1597
|
package org.jbox2d.pooling;
import org.jbox2d.collision.AABB;
public class PoolingStackAABB extends PoolingStack<AABB>{
private final AABB[] pool;
private int index;
private final int size;
private final PoolContainer<AABB> container;
public PoolingStackAABB(int size) {
this.size = size;
this.pool = new AABB[size];
this.index = 0;
this.container = new PoolContainer<AABB>();
for (int i = 0; i < size; i++) {
pool[i] = new AABB();
}
}
@Override
public AABB pop() {
assert(index < size) : "End of stack reached, there is probably a leak somewhere";
return pool[index++];
}
@Override @SuppressWarnings("fallthrough")
public org.jbox2d.pooling.PoolingStack.PoolContainer<AABB> pop(int argNum) {
assert(index + argNum < size) : "End of stack reached, there is probably a leak somewhere";
switch (argNum) {
case 9:
container.p8 = pool[index++];
case 8:
container.p7 = pool[index++];
case 7:
container.p6 = pool[index++];
case 6:
container.p5 = pool[index++];
case 5:
container.p4 = pool[index++];
case 4:
container.p3 = pool[index++];
case 3:
container.p2 = pool[index++];
case 2:
container.p1 = pool[index++];
case 1:
container.p0 = pool[index++];
break;
default:
assert(false);
}
return container;
}
@Override
public void push(int argNum) {
index -= argNum;
assert (index >= 0) : "Beginning of stack reached, push/pops are unmatched";
}
}
|
apache-2.0
|
masonmei/pinpoint
|
collector/src/main/java/com/navercorp/pinpoint/collector/cluster/route/RouteStatus.java
|
1983
|
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.collector.cluster.route;
/**
* @author koo.taejin
*/
public enum RouteStatus {
OK(0, "OK"),
BAD_REQUEST(400, "Bad Request"),
NOT_FOUND(404, " Target Route Agent Not Found."),
NOT_ACCEPTABLE(406, "Target Route Agent Not Acceptable."),
NOT_ACCEPTABLE_UNKNOWN(450, "Target Route Agent Not Acceptable."),
NOT_ACCEPTABLE_COMMAND(451, "Target Route Agent Not Acceptable command."),
NOT_ACCEPTABLE_AGENT_TYPE(452, "Target Route Agent Not Acceptable agent type.."),
AGENT_TIMEOUT(504, "Target Route Agent Timeout"),
CLOSED(606, "Target Route Agent Closed.");
private final int value;
private final String reasonPhrase;
private RouteStatus(int value, String reasonPhrase) {
this.value = value;
this.reasonPhrase = reasonPhrase;
}
public int getValue() {
return value;
}
public String getReasonPhrase() {
return reasonPhrase;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(this.getClass().getSimpleName());
sb.append("{");
sb.append("code=").append(getValue()).append(",");
sb.append("message=").append(getReasonPhrase());
sb.append('}');
return sb.toString();
}
}
|
apache-2.0
|
mogoweb/365browser
|
app/src/main/java/org/chromium/chrome/browser/compositor/scene_layer/SceneLayer.java
|
1487
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.compositor.scene_layer;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
/**
* Java representation of a scene layer.
*/
@JNINamespace("android")
public class SceneLayer {
private long mNativePtr;
/**
* Builds an instance of a {@link SceneLayer}.
*/
public SceneLayer() {
initializeNative();
}
/**
* Initializes the native component of a {@link SceneLayer}. Must be
* overridden to have a custom native component.
*/
protected void initializeNative() {
if (mNativePtr == 0) {
mNativePtr = nativeInit();
}
assert mNativePtr != 0;
}
/**
* Destroys this object and the corresponding native component.
*/
public void destroy() {
assert mNativePtr != 0;
nativeDestroy(mNativePtr);
assert mNativePtr == 0;
}
@CalledByNative
private void setNativePtr(long nativeSceneLayerPtr) {
assert mNativePtr == 0 || nativeSceneLayerPtr == 0;
mNativePtr = nativeSceneLayerPtr;
}
@CalledByNative
private long getNativePtr() {
return mNativePtr;
}
private native long nativeInit();
private native void nativeDestroy(long nativeSceneLayer);
}
|
apache-2.0
|
smanvi-pivotal/geode
|
geode-core/src/test/java/org/apache/geode/sequence/HydraLineMapper.java
|
4126
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.sequence;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.geode.sequence.LineMapper;
/**
*
*/
public class HydraLineMapper implements LineMapper {
private static final Pattern VM_NAME_PATTERN = Pattern.compile("(vm_\\d+).*_(\\d+)(_end)?\\.log");
private static final Pattern DISK_DIR_PATTERN = Pattern.compile("vm_(\\d+).*_disk_1");
private final Map<String, String> processIdToVMName = new HashMap<String, String>();
private final DefaultLineMapper defaultMapper = new DefaultLineMapper();
public HydraLineMapper(File[] graphFiles) {
File firstFile = graphFiles[0];
File directory = firstFile.getParentFile();
if (directory == null || !new File(directory, "latest.prop").exists()) {
directory = new File(".");
}
String[] files = directory.list();
for (String file : files) {
Matcher matcher = VM_NAME_PATTERN.matcher(file);
if (matcher.matches()) {
processIdToVMName.put(matcher.group(2), matcher.group(1));
}
}
for (String file : files) {
Matcher matcher = DISK_DIR_PATTERN.matcher(file);
if (matcher.matches()) {
String storeId = getDiskStoreId(file);
if (storeId != null) {
processIdToVMName.put(storeId, "disk_" + matcher.group(1));
}
}
}
}
private String getDiskStoreId(String diskStoreDir) {
File dir = new File(diskStoreDir);
String[] files = dir.list();
for (String fileName : files) {
if (fileName.endsWith(".if")) {
try {
return getDiskStoreIdFromInitFile(dir, fileName);
} catch (Exception e) {
return null;
}
}
}
return null;
}
private String getDiskStoreIdFromInitFile(File dir, String fileName)
throws FileNotFoundException, IOException {
FileInputStream fis = new FileInputStream(new File(dir, fileName));
try {
byte[] bytes = new byte[1 + 8 + 8];
fis.read(bytes);
ByteBuffer buffer = ByteBuffer.wrap(bytes);
// Skip the record type.
buffer.get();
long least = buffer.getLong();
long most = buffer.getLong();
UUID id = new UUID(most, least);
return id.toString();
} finally {
fis.close();
}
}
public String getShortNameForLine(String lineName) {
String name = defaultMapper.getShortNameForLine(lineName);
if (processIdToVMName.containsKey(name)) {
return processIdToVMName.get(name);
} else {
return name;
}
}
public static boolean isInHydraRun(File[] graphFiles) {
if (graphFiles.length == 0) {
return false;
}
File firstFile = graphFiles[0];
File parentFile = firstFile.getParentFile();
for (File file : graphFiles) {
if (parentFile == null && file.getParentFile() == null) {
return true;
}
if (parentFile == null || file.getParentFile() == null
|| !file.getParentFile().equals(parentFile)) {
return false;
}
}
return new File(parentFile, "latest.prop").exists() || new File("latest.prop").exists();
}
}
|
apache-2.0
|
Thopap/camel
|
components/camel-salesforce/camel-salesforce-maven-plugin/src/test/java/org/apache/camel/maven/CamelSalesforceMojoIntegrationTest.java
|
5164
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.maven;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.module.jsonSchema.JsonSchema;
import com.fasterxml.jackson.module.jsonSchema.types.ObjectSchema;
import org.apache.camel.component.salesforce.SalesforceEndpointConfig;
import org.apache.camel.component.salesforce.SalesforceLoginConfig;
import org.apache.camel.component.salesforce.api.utils.JsonUtils;
import org.apache.maven.plugin.logging.SystemStreamLog;
import org.junit.Assert;
import org.junit.Test;
public class CamelSalesforceMojoIntegrationTest {
private static final String TEST_LOGIN_PROPERTIES = "../test-salesforce-login.properties";
@Test
public void testExecute() throws Exception {
CamelSalesforceMojo mojo = createMojo();
// generate code
mojo.execute();
// validate generated code
// check that it was generated
Assert.assertTrue("Output directory was not created", mojo.outputDirectory.exists());
// TODO check that the generated code compiles
}
@Test
public void testExecuteJsonSchema() throws Exception {
CamelSalesforceMojo mojo = createMojo();
mojo.jsonSchema = true;
mojo.jsonSchemaFilename = "test-schema.json";
mojo.jsonSchemaId = JsonUtils.DEFAULT_ID_PREFIX;
// generate code
mojo.execute();
// validate generated schema
File schemaFile = mojo.outputDirectory.toPath().resolve("test-schema.json").toFile();
Assert.assertTrue("Output file was not created",
schemaFile.exists());
ObjectMapper objectMapper = JsonUtils.createObjectMapper();
JsonSchema jsonSchema = objectMapper.readValue(schemaFile, JsonSchema.class);
Assert.assertTrue("Expected root JSON schema with oneOf element",
jsonSchema.isObjectSchema() && !((ObjectSchema)jsonSchema).getOneOf().isEmpty());
}
protected CamelSalesforceMojo createMojo() throws IOException {
CamelSalesforceMojo mojo = new CamelSalesforceMojo();
mojo.setLog(new SystemStreamLog());
// set login properties
setLoginProperties(mojo);
// set defaults
mojo.version = System.getProperty("apiVersion", SalesforceEndpointConfig.DEFAULT_VERSION);
mojo.loginUrl = System.getProperty("loginUrl", SalesforceLoginConfig.DEFAULT_LOGIN_URL);
mojo.outputDirectory = new File("target/generated-sources/camel-salesforce");
mojo.packageName = "org.apache.camel.salesforce.dto";
// set code generation properties
mojo.includePattern = "(.*__c)|(PushTopic)|(Document)|(Account)";
// remove generated code directory
if (mojo.outputDirectory.exists()) {
// remove old files
for (File file : mojo.outputDirectory.listFiles()) {
file.delete();
}
mojo.outputDirectory.delete();
}
return mojo;
}
private void setLoginProperties(CamelSalesforceMojo mojo) throws IOException {
// load test-salesforce-login properties
Properties properties = new Properties();
InputStream stream = null;
try {
stream = new FileInputStream(TEST_LOGIN_PROPERTIES);
properties.load(stream);
mojo.clientId = properties.getProperty("salesforce.client.id");
mojo.clientSecret = properties.getProperty("salesforce.client.secret");
mojo.userName = properties.getProperty("salesforce.username");
mojo.password = properties.getProperty("salesforce.password");
} catch (FileNotFoundException e) {
throw new FileNotFoundException("Create a properties file named "
+ TEST_LOGIN_PROPERTIES + " with clientId, clientSecret, userName, password"
+ " for a Salesforce account with Merchandise and Invoice objects from Salesforce Guides.");
} finally {
if (stream != null) {
try {
stream.close();
} catch (IOException ignore) {
// noop
}
}
}
}
}
|
apache-2.0
|
rodsol/relex
|
src/java/relex/logic/Loader.java
|
4466
|
/*
* Copyright 2013 OpenCog Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Alex van der Peet <alex.van.der.peet@gmail.com>
*/
package relex.logic;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Scanner;
// Description: Class that takes a ReLex2Logic rule file and parses these into a Rule & Criterium structure that can be applied to the
// root FeatureNode of a dependency graph.
// File format example:
/*
[SVO] {2} <SV, SVP> _subj($y, $x) & _obj($y, $z) => (SVO-rule $x (get_instance_name $x word_index sentence_index) $y (get_instance_name $y word_index sentence_index) $z (get_instance_name $z word_index sentence_index))
[AMOD] {3} <> _amod($N, $A) => (amod-rule $N (get_instance_name $N word_index sentence_index) $A (get_instance_name $A word_index sentence_index))
[ADVMOD] {4} <> _advmod($V, $ADV) => (advmod-rule $V (get_instance_name $V word_index sentence_index) $ADV (get_instance_name $ADV word_index sentence_index))
[TENSEPOS] {5} <> tense($W, $Tense) & pos($W, verb) => (tense-rule $W (get_instance_name $W word_index sentence_index) $Tense)
[DET] {6} <> _det($W, those) => (those-rule $W (get_instance_name $W word_index sentence_index) choose_var_name)
[NEGFLAG] {7} <> NEGATIVE-FLAG($V, T) => (negative-rule $V (get_instance_name $V word_index sentence_index))
[POSS1A] {8} <POSS1B, POSS2> _poss($N, $W) & pos($W, adj) => (possesive-rule $N (get_instance_name $N word_index sentence_index) $W (get_instance_name $W word_index sentence_index))
[POSS1B] {8} <POSS1A, POSS2> _poss($N, $W) & pos($W, noun) & person-FLAG($W, T) => (possesive-rule $N (get_instance_name $N word_index sentence_index) $W (get_instance_name $W word_index sentence_index))
[POSS2] {8} <POSS1A, POSS1B> _poss($N, $W) & pos($W, noun) => (possesive-rule $N (get_instance_name $V word_index sentence_index) $W (get_instance_name $W word_index sentence_index))
*/
/** Loads a text file with ReLex2Logic rules into a RuleSet class with Rule objects.
* @author Alex van der Peet <alex.van.der.peet@gmail.com>
* @version 1.0 (current version number of program)
* @since 2013-11-08 (the version of the package this class was first added to)
*/
public class Loader
{
/**
* The rules once they are loaded
*/
private RuleSet _relex2SchemeRuleSet = new RuleSet();
/**
* Processes a rule file and loads them into _relex2SchemeRuleSet
* @param ruleFile The full path to the rule file.
* @return Boolean indicating whether the rules were loaded succesfully.
*/
public Boolean loadRules(String ruleFile)
{
Boolean loadSuccesful = false;
File file = new File(ruleFile);
Scanner input = null;
try {
input = new Scanner(file);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (input != null)
{
// Parse file
int iRules = 0;
while (input.hasNext())
{
// Get line
String nextLine = input.nextLine();
if (null == nextLine) break;
if (!nextLine.equals(""))
{
if (!nextLine.substring(0, 1).equals("#"))
{
Rule newRule = new Rule(nextLine);
_relex2SchemeRuleSet.addRule(newRule);
iRules++;
}
}
// Parse line
// (SVO-rule $x (get_instance_name $x word_index sentence_index) $y (get_instance_name $y word_index sentence_index) $z (get_instance_name $z word_index sentence_index))
}
input.close();
System.out.println("Loaded " + iRules + " ReLex2Logic rule(s) succesfully.");
loadSuccesful = true;
}
return loadSuccesful;
}
/**
* Get an ‘unused’ set of the rules, could be used later for batch processing
* @return A RuleSet object with a fresh set of rules.
*/
public RuleSet getFreshRuleSet()
{
RuleSet freshRuleSet = new RuleSet();
for (Rule rule: _relex2SchemeRuleSet.getRules()) {
freshRuleSet.addRule(new Rule(rule.getRuleString()));
}
return freshRuleSet;
}
}
|
apache-2.0
|
goodwinnk/intellij-community
|
java/testFramework/src/com/intellij/ide/util/frameworkSupport/FrameworkSupportProviderTestCase.java
|
6135
|
package com.intellij.ide.util.frameworkSupport;
import com.intellij.facet.Facet;
import com.intellij.facet.FacetManager;
import com.intellij.facet.FacetTypeId;
import com.intellij.facet.ui.FacetBasedFrameworkSupportProvider;
import com.intellij.framework.FrameworkType;
import com.intellij.framework.addSupport.FrameworkSupportInModuleConfigurable;
import com.intellij.framework.addSupport.FrameworkSupportInModuleProvider;
import com.intellij.ide.util.newProjectWizard.FrameworkSupportNode;
import com.intellij.ide.util.newProjectWizard.OldFrameworkSupportProviderWrapper;
import com.intellij.ide.util.newProjectWizard.impl.FrameworkSupportCommunicator;
import com.intellij.ide.util.newProjectWizard.impl.FrameworkSupportModelBase;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.IdeaModifiableModelsProvider;
import com.intellij.openapi.roots.ModifiableRootModel;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesContainerFactory;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.testFramework.IdeaTestCase;
import com.intellij.testFramework.PsiTestUtil;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.util.*;
/**
* @author nik
*/
public abstract class FrameworkSupportProviderTestCase extends IdeaTestCase {
private FrameworkSupportModelBase myFrameworkSupportModel;
private Map<FrameworkType, FrameworkSupportInModuleConfigurable> myConfigurables;
private Map<FrameworkType, FrameworkSupportNode> myNodes;
@Override
protected void setUp() throws Exception {
super.setUp();
final Project project = getProject();
myFrameworkSupportModel = new FrameworkSupportModelImpl(project, "", LibrariesContainerFactory.createContainer(project));
myNodes = new LinkedHashMap<>();
final List<FrameworkSupportInModuleProvider> providers = FrameworkSupportUtil.getAllProviders();
Collections.sort(providers, FrameworkSupportUtil.getFrameworkSupportProvidersComparator(providers));
for (FrameworkSupportInModuleProvider provider : providers) {
final FrameworkSupportNode node = new FrameworkSupportNode(provider, null, myFrameworkSupportModel, getTestRootDisposable());
myNodes.put(provider.getFrameworkType(), node);
myFrameworkSupportModel.registerComponent(provider, node);
}
myConfigurables = new HashMap<>();
}
protected void addSupport() {
try {
WriteCommandAction.writeCommandAction(getProject()).run(() -> {
final VirtualFile root = getVirtualFile(createTempDir("contentRoot"));
PsiTestUtil.addContentRoot(myModule, root);
final ModifiableRootModel model = ModuleRootManager.getInstance(myModule).getModifiableModel();
try {
List<FrameworkSupportConfigurable> selectedConfigurables = new ArrayList<>();
final IdeaModifiableModelsProvider modelsProvider = new IdeaModifiableModelsProvider();
for (FrameworkSupportNode node : myNodes.values()) {
if (node.isChecked()) {
final FrameworkSupportInModuleConfigurable configurable = getOrCreateConfigurable(node.getUserObject());
configurable.addSupport(myModule, model, modelsProvider);
if (configurable instanceof OldFrameworkSupportProviderWrapper.FrameworkSupportConfigurableWrapper) {
selectedConfigurables
.add(((OldFrameworkSupportProviderWrapper.FrameworkSupportConfigurableWrapper)configurable).getConfigurable());
}
}
}
for (FrameworkSupportCommunicator communicator : FrameworkSupportCommunicator.EP_NAME.getExtensions()) {
communicator.onFrameworkSupportAdded(myModule, model, selectedConfigurables, myFrameworkSupportModel);
}
}
finally {
model.commit();
}
for (FrameworkSupportInModuleConfigurable configurable : myConfigurables.values()) {
Disposer.dispose(configurable);
}
});
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
protected FrameworkSupportInModuleConfigurable selectFramework(@NotNull FacetTypeId<?> id) {
return selectFramework(FacetBasedFrameworkSupportProvider.getProviderId(id));
}
protected FrameworkSupportInModuleConfigurable selectFramework(@NotNull String id) {
final FrameworkSupportInModuleProvider provider = FrameworkSupportUtil.findProvider(id, FrameworkSupportUtil.getAllProviders());
if (provider != null) {
return selectFramework(provider);
}
fail("Framework provider with id='" + id + "' not found");
return null;
}
protected FrameworkSupportInModuleConfigurable selectFramework(@NotNull FrameworkSupportInModuleProvider provider) {
final FrameworkSupportInModuleConfigurable configurable = getOrCreateConfigurable(provider);
myNodes.get(provider.getFrameworkType()).setChecked(true);
configurable.onFrameworkSelectionChanged(true);
return configurable;
}
private FrameworkSupportInModuleConfigurable getOrCreateConfigurable(FrameworkSupportInModuleProvider provider) {
FrameworkSupportInModuleConfigurable configurable = myConfigurables.get(provider.getFrameworkType());
if (configurable == null) {
configurable = provider.createConfigurable(myFrameworkSupportModel);
myConfigurables.put(provider.getFrameworkType(), configurable);
}
return configurable;
}
protected void selectVersion(FrameworkType frameworkType, com.intellij.framework.FrameworkVersion version) {
myFrameworkSupportModel.setSelectedVersion(frameworkType.getId(), version);
}
@NotNull
protected <F extends Facet> F getFacet(FacetTypeId<F> id) {
final F facet = FacetManager.getInstance(myModule).getFacetByType(id);
assertNotNull(id + " facet not found", facet);
return facet;
}
protected VirtualFile getContentRoot() {
return ModuleRootManager.getInstance(myModule).getContentRoots()[0];
}
}
|
apache-2.0
|
Distrotech/fop
|
src/java/org/apache/fop/afp/ptoca/PtocaConstants.java
|
2014
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.afp.ptoca;
/**
* A collection of PTOCA constants.
*/
public interface PtocaConstants {
/**
* "Escape" sequence for normal PTOCA command sequences.
*/
byte[] ESCAPE = new byte[] {0x2B, (byte)0xD3};
/** Bit to set for chained control sequences */
byte CHAIN_BIT = 1;
/** Set Intercharacter Adjustment */
byte SIA = (byte)0xC2;
/** Set Variable Space Character Increment */
byte SVI = (byte)0xC4;
/** Absolute Move Inline */
byte AMI = (byte)0xC6;
/** Relative Move Inline */
byte RMI = (byte)0xC8;
/** Absolute Move Baseline */
byte AMB = (byte)0xD2;
/** Transparent Data */
byte TRN = (byte)0xDA;
/** Draw I-axis Rule */
byte DIR = (byte)0xE4;
/** Draw B-axis Rule */
byte DBR = (byte)0xE6;
/** Set Extended Text Color */
byte SEC = (byte)0x80;
/** Set Coded Font Local */
byte SCFL = (byte)0xF0;
/** Set Text Orientation */
byte STO = (byte)0xF6;
/** No Operation */
byte NOP = (byte)0xF8;
/** Maximum size of transparent data chunks */
int TRANSPARENT_DATA_MAX_SIZE = 253; // max length = 255 (minus the ControlSequence length)
}
|
apache-2.0
|
subhrajyotim/camunda-bpm-platform
|
engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/util/container/TomcatServerBootstrap.java
|
3731
|
package org.camunda.bpm.engine.rest.util.container;
import java.io.File;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.startup.Tomcat;
import org.camunda.bpm.engine.rest.spi.ProcessEngineProvider;
import org.camunda.bpm.engine.rest.spi.impl.MockedProcessEngineProvider;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.ClassLoaderAsset;
import org.jboss.shrinkwrap.api.exporter.ZipExporter;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.jboss.shrinkwrap.resolver.api.maven.Maven;
import org.jboss.shrinkwrap.resolver.api.maven.PomEquippedResolveStage;
import org.jboss.shrinkwrap.resolver.api.maven.ScopeType;
import org.jboss.shrinkwrap.resolver.api.maven.coordinate.MavenDependencies;
public abstract class TomcatServerBootstrap extends EmbeddedServerBootstrap {
private Tomcat tomcat;
private String workingDir;
private String webXmlPath;
public TomcatServerBootstrap(String webXmlPath) {
this.webXmlPath = webXmlPath;
}
public void start() {
Properties serverProperties = readProperties();
int port = Integer.parseInt(serverProperties.getProperty(PORT_PROPERTY));
tomcat = new Tomcat();
tomcat.setPort(port);
tomcat.setBaseDir(getWorkingDir());
tomcat.getHost().setAppBase(getWorkingDir());
tomcat.getHost().setAutoDeploy(true);
tomcat.getHost().setDeployOnStartup(true);
String contextPath = "/" + getContextPath();
PomEquippedResolveStage resolver = Maven.configureResolver()
.useLegacyLocalRepo(true).workOffline().loadPomFromFile("pom.xml");
WebArchive wa = ShrinkWrap.create(WebArchive.class, "rest-test.war").setWebXML(webXmlPath)
.addAsLibraries(resolver.resolve("org.codehaus.jackson:jackson-jaxrs:1.6.5").withTransitivity().asFile())
.addAsLibraries(resolver.addDependencies(
MavenDependencies.createDependency("org.mockito:mockito-core", ScopeType.TEST, false,
MavenDependencies.createExclusion("org.hamcrest:hamcrest-core"))).resolve()
.withTransitivity().asFile())
.addAsServiceProvider(ProcessEngineProvider.class, MockedProcessEngineProvider.class)
.add(new ClassLoaderAsset("runtime/tomcat/context.xml"), "META-INF/context.xml")
.addPackages(true, "org.camunda.bpm.engine.rest");
addRuntimeSpecificLibraries(wa, resolver);
wa.setWebXML(webXmlPath);
String webAppPath = getWorkingDir() + "/" + getContextPath() + ".war";
wa.as(ZipExporter.class).exportTo(new File(webAppPath), true);
tomcat.addWebapp(tomcat.getHost(), contextPath, webAppPath);
try {
tomcat.start();
} catch (LifecycleException e) {
throw new RuntimeException(e);
}
}
protected abstract void addRuntimeSpecificLibraries(WebArchive wa, PomEquippedResolveStage resolver);
private String getContextPath() {
return "rest-test";
}
public void stop() {
try {
try {
tomcat.stop();
} catch (Exception e) {
Logger.getLogger(getClass().getName()).log(Level.WARNING, "Failed to stop tomcat instance", e);
}
try {
tomcat.destroy();
} catch (Exception e) {
Logger.getLogger(getClass().getName()).log(Level.WARNING, "Failed to destroy instance", e);
}
tomcat = null;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public String getWorkingDir() {
if (workingDir == null) {
workingDir = System.getProperty("java.io.tmpdir");
}
return workingDir;
}
public void setWorkingDir(String workingDir) {
this.workingDir = workingDir;
}
}
|
apache-2.0
|
hugs/selenium
|
selenium/test/java/com/thoughtworks/selenium/corebased/TestOpen.java
|
1567
|
package com.thoughtworks.selenium.corebased;
import com.thoughtworks.selenium.*;
import org.testng.annotations.*;
import static org.testng.Assert.*;
import java.util.regex.Pattern;
public class TestOpen extends SeleneseTestNgHelper {
@Test public void testOpen() throws Exception {
selenium.open("../tests/html/test_open.html");
verifyTrue(selenium.getLocation().matches("^[\\s\\S]*/tests/html/test_open\\.html$"));
// Should really split these verifications into their own test file.
verifyTrue(Pattern.compile(".*/tests/html/[Tt]est_open.html").matcher(selenium.getLocation()).find());
verifyFalse(selenium.getLocation().matches("^[\\s\\S]*/foo\\.html$"));
verifyTrue(selenium.isTextPresent("glob:This is a test of the open command."));
verifyTrue(selenium.isTextPresent("This is a test of the open command."));
verifyTrue(selenium.isTextPresent("exact:This is a test of"));
verifyTrue(selenium.isTextPresent("regexp:This is a test of"));
verifyTrue(selenium.isTextPresent("regexp:T*his is a test of"));
verifyFalse(selenium.isTextPresent("exact:XXXXThis is a test of"));
verifyFalse(selenium.isTextPresent("regexp:ThXXXXXXXXXis is a test of"));
selenium.open("../tests/html/test_page.slow.html");
verifyTrue(selenium.getLocation().matches("^[\\s\\S]*/tests/html/test_page\\.slow\\.html$"));
verifyEquals(selenium.getTitle(), "Slow Loading Page");
selenium.setTimeout("5000");
selenium.open("../tests/html/test_open.html");
selenium.open("../tests/html/test_open.html");
selenium.open("../tests/html/test_open.html");
}
}
|
apache-2.0
|
yuri0x7c1/ofbiz-explorer
|
src/test/resources/apache-ofbiz-17.12.04/framework/entity/src/main/java/org/apache/ofbiz/entity/GenericResultSetClosedException.java
|
1415
|
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.entity;
/**
* GenericResultSetClosedException
*
*/
@SuppressWarnings("serial")
public class GenericResultSetClosedException extends GenericEntityException {
public GenericResultSetClosedException() {
super();
}
public GenericResultSetClosedException(String str) {
super(str);
}
public GenericResultSetClosedException(String str, Throwable nested) {
super(str, nested);
}
}
|
apache-2.0
|
hasithaa/wso2-ode
|
bpel-runtime/src/test/java/org/apache/ode/bpel/elang/URIResolverTest.java
|
3600
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.bpel.elang;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.matchers.JUnitMatchers.containsString;
import java.io.File;
import java.net.URI;
import javax.xml.transform.Source;
import org.apache.ode.bpel.elang.xpath10.o.OXPath10Expression;
import org.apache.ode.utils.DOMUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.w3c.dom.Document;
public class URIResolverTest {
@Test
public void testResolveExistingFile() throws Exception {
OXPath10Expression expr = new OXPath10Expression(null, null, null, null);
URI baseResourceURI = getClass().getResource("/xpath20/").toURI();
XslRuntimeUriResolver resolver = new XslRuntimeUriResolver(expr, baseResourceURI);
Source source = resolver.resolve("variables.xml", null);
Document doc = DOMUtils.sourceToDOM(source);
assertThat(DOMUtils.domToString(doc), containsString("<variables>"));
}
@Test
public void testResolveNonExistingFile() throws Exception {
OXPath10Expression expr = new OXPath10Expression(null, null, null, null);
URI baseResourceURI = getClass().getResource("/xpath20/").toURI();
XslRuntimeUriResolver resolver = new XslRuntimeUriResolver(expr, baseResourceURI);
assertNull(resolver.resolve("variablesa.xml", null));
}
@Test
public void testEncoding() throws Exception {
Document original = DOMUtils.parse(getClass().getResourceAsStream("/xslt/test.xml"));
OXPath10Expression expr = new OXPath10Expression(null, null, null, null);
URI baseResourceURI = getClass().getResource("/xslt/").toURI();
XslRuntimeUriResolver resolver = new XslRuntimeUriResolver(expr, baseResourceURI);
Document doc = DOMUtils.sourceToDOM(resolver.resolve("test.xml", null));
assertArrayEquals(original.getDocumentElement().getTextContent().trim().getBytes(), doc.getDocumentElement().getTextContent().trim().getBytes());
}
@Test
public void testResolveURL() throws Exception {
OXPath10Expression expr = new OXPath10Expression(null, null, null, null);
URI baseResourceURI = getClass().getResource("/xpath20/").toURI();
XslRuntimeUriResolver resolver = new XslRuntimeUriResolver(expr, baseResourceURI);
// the local XSD file (in bpel-schemas module) is located using the current directory
File file = new File("../bpel-schemas/src/main/xsd/pmapi.xsd");
Source source = resolver.resolve(file.toURI().toString(), null);
Document doc = DOMUtils.sourceToDOM(source);
assertThat(DOMUtils.domToString(doc), containsString("activity-info"));
}
}
|
apache-2.0
|
tieusangaka/Paralloid
|
paralloidexample/src/main/java/uk/co/chrisjenx/paralloidexample/ParallaxViewDownFragment.java
|
1360
|
package uk.co.chrisjenx.paralloidexample;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.ScrollView;
import uk.co.chrisjenx.paralloid.Parallaxor;
import uk.co.chrisjenx.paralloid.transform.InvertTransformer;
/**
* A dummy fragment representing a section of the app, but that simply
* displays dummy text.
*/
public class ParallaxViewDownFragment extends Fragment {
/**
* The fragment argument representing the section number for this
* fragment.
*/
public static final String ARG_SECTION_NUMBER = "section_number";
public ParallaxViewDownFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_invert_transformer, container, false);
ImageView imageView = (ImageView) rootView.findViewById(R.id.image_view);
ScrollView scrollView = (ScrollView) rootView.findViewById(R.id.scroll_view);
if (scrollView instanceof Parallaxor) {
((Parallaxor) scrollView).parallaxViewBy(imageView, new InvertTransformer(), 0.35f);
}
return rootView;
}
}
|
apache-2.0
|
jmluy/elasticsearch
|
client/rest-high-level/src/test/java/org/elasticsearch/client/IngestRequestConvertersTests.java
|
5986
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.action.ingest.DeletePipelineRequest;
import org.elasticsearch.action.ingest.GetPipelineRequest;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentType;
import org.junit.Assert;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.StringJoiner;
public class IngestRequestConvertersTests extends ESTestCase {
public void testPutPipeline() throws IOException {
String pipelineId = "some_pipeline_id";
PutPipelineRequest request = new PutPipelineRequest(
"some_pipeline_id",
new BytesArray("{}".getBytes(StandardCharsets.UTF_8)),
XContentType.JSON
);
Map<String, String> expectedParams = new HashMap<>();
RequestConvertersTests.setRandomMasterTimeout(request, expectedParams);
RequestConvertersTests.setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
Request expectedRequest = IngestRequestConverters.putPipeline(request);
StringJoiner endpoint = new StringJoiner("/", "/", "");
endpoint.add("_ingest/pipeline");
endpoint.add(pipelineId);
Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
Assert.assertEquals(HttpPut.METHOD_NAME, expectedRequest.getMethod());
Assert.assertEquals(expectedParams, expectedRequest.getParameters());
}
public void testGetPipeline() {
String pipelineId = "some_pipeline_id";
Map<String, String> expectedParams = new HashMap<>();
GetPipelineRequest request = new GetPipelineRequest("some_pipeline_id");
RequestConvertersTests.setRandomMasterTimeout(request, expectedParams);
Request expectedRequest = IngestRequestConverters.getPipeline(request);
StringJoiner endpoint = new StringJoiner("/", "/", "");
endpoint.add("_ingest/pipeline");
endpoint.add(pipelineId);
Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
Assert.assertEquals(HttpGet.METHOD_NAME, expectedRequest.getMethod());
Assert.assertEquals(expectedParams, expectedRequest.getParameters());
}
public void testDeletePipeline() {
String pipelineId = "some_pipeline_id";
Map<String, String> expectedParams = new HashMap<>();
DeletePipelineRequest request = new DeletePipelineRequest(pipelineId);
RequestConvertersTests.setRandomMasterTimeout(request, expectedParams);
RequestConvertersTests.setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
Request expectedRequest = IngestRequestConverters.deletePipeline(request);
StringJoiner endpoint = new StringJoiner("/", "/", "");
endpoint.add("_ingest/pipeline");
endpoint.add(pipelineId);
Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
Assert.assertEquals(HttpDelete.METHOD_NAME, expectedRequest.getMethod());
Assert.assertEquals(expectedParams, expectedRequest.getParameters());
}
public void testSimulatePipeline() throws IOException {
String pipelineId = ESTestCase.randomBoolean() ? "some_pipeline_id" : null;
boolean verbose = ESTestCase.randomBoolean();
String json = "{"
+ " \"pipeline\": {"
+ " \"description\": \"_description\","
+ " \"processors\": ["
+ " {"
+ " \"set\": {"
+ " \"field\": \"field2\","
+ " \"value\": \"_value\""
+ " }"
+ " }"
+ " ]"
+ " },"
+ " \"docs\": ["
+ " {"
+ " \"_index\": \"index\","
+ " \"_type\": \"_doc\","
+ " \"_id\": \"id\","
+ " \"_source\": {"
+ " \"foo\": \"rab\""
+ " }"
+ " }"
+ " ]"
+ "}";
SimulatePipelineRequest request = new SimulatePipelineRequest(
new BytesArray(json.getBytes(StandardCharsets.UTF_8)),
XContentType.JSON
);
request.setId(pipelineId);
request.setVerbose(verbose);
Map<String, String> expectedParams = new HashMap<>();
expectedParams.put("verbose", Boolean.toString(verbose));
Request expectedRequest = IngestRequestConverters.simulatePipeline(request);
StringJoiner endpoint = new StringJoiner("/", "/", "");
endpoint.add("_ingest/pipeline");
if (pipelineId != null && pipelineId.isEmpty() == false) {
endpoint.add(pipelineId);
}
endpoint.add("_simulate");
Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
Assert.assertEquals(HttpPost.METHOD_NAME, expectedRequest.getMethod());
Assert.assertEquals(expectedParams, expectedRequest.getParameters());
RequestConvertersTests.assertToXContentBody(request, expectedRequest.getEntity());
}
}
|
apache-2.0
|
googleapis/google-api-java-client-services
|
clients/google-api-services-cloudfunctions/v1/1.31.0/com/google/api/services/cloudfunctions/v1/model/ListOperationsResponse.java
|
2919
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.cloudfunctions.v1.model;
/**
* The response message for Operations.ListOperations.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Functions API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class ListOperationsResponse extends com.google.api.client.json.GenericJson {
/**
* The standard List next-page token.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String nextPageToken;
/**
* A list of operations that matches the specified filter in the request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Operation> operations;
/**
* The standard List next-page token.
* @return value or {@code null} for none
*/
public java.lang.String getNextPageToken() {
return nextPageToken;
}
/**
* The standard List next-page token.
* @param nextPageToken nextPageToken or {@code null} for none
*/
public ListOperationsResponse setNextPageToken(java.lang.String nextPageToken) {
this.nextPageToken = nextPageToken;
return this;
}
/**
* A list of operations that matches the specified filter in the request.
* @return value or {@code null} for none
*/
public java.util.List<Operation> getOperations() {
return operations;
}
/**
* A list of operations that matches the specified filter in the request.
* @param operations operations or {@code null} for none
*/
public ListOperationsResponse setOperations(java.util.List<Operation> operations) {
this.operations = operations;
return this;
}
@Override
public ListOperationsResponse set(String fieldName, Object value) {
return (ListOperationsResponse) super.set(fieldName, value);
}
@Override
public ListOperationsResponse clone() {
return (ListOperationsResponse) super.clone();
}
}
|
apache-2.0
|
pellcorp/sql-parser
|
src/test/java/io/crate/sql/parser/TreeAssertions.java
|
3256
|
/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.sql.parser;
import io.crate.sql.tree.*;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import javax.annotation.Nullable;
import java.util.List;
import static io.crate.sql.SqlFormatter.formatSql;
import static io.crate.sql.parser.SqlParser.createStatement;
import static java.lang.String.format;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.fail;
public final class TreeAssertions
{
private TreeAssertions() {}
public static void assertFormattedSql(Node expected)
{
// TODO: support formatting all statement types
if (!(expected instanceof Query || expected instanceof CreateTable)) {
return;
}
String formatted = formatSql(expected);
// verify round-trip of formatting already-formatted SQL
assertEquals(formatSql(createStatement(formatted)), formatted);
// compare parsed tree with parsed tree of formatted SQL
Statement actual = createStatement(formatted);
if (!actual.equals(expected)) {
// simplify finding the non-equal part of the tree
assertListEquals(linearizeTree(actual), linearizeTree(expected));
}
assertEquals(actual, expected);
}
private static List<Node> linearizeTree(Node tree)
{
final ImmutableList.Builder<Node> nodes = ImmutableList.builder();
new DefaultTraversalVisitor<Node, Void>()
{
@Override
public Node process(Node node, @Nullable Void context)
{
Node result = super.process(node, context);
nodes.add(node);
return result;
}
}.process(tree, null);
return nodes.build();
}
private static <T> void assertListEquals(List<T> actual, List<T> expected)
{
if (actual.size() != expected.size()) {
Joiner joiner = Joiner.on("\n ");
fail(format("Lists not equal%nActual [%s]:%n %s%nExpected [%s]:%n %s",
actual.size(), joiner.join(actual),
expected.size(), joiner.join(expected)));
}
assertEquals(actual, expected);
}
}
|
apache-2.0
|
bmwshop/brooklyn
|
usage/camp/src/main/java/io/brooklyn/camp/brooklyn/spi/lookup/PlatformComponentBrooklynLookup.java
|
1470
|
package io.brooklyn.camp.brooklyn.spi.lookup;
import io.brooklyn.camp.spi.PlatformComponent;
import io.brooklyn.camp.spi.PlatformComponent.Builder;
import io.brooklyn.camp.spi.PlatformRootSummary;
import io.brooklyn.camp.spi.collection.ResolvableLink;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import brooklyn.entity.Entity;
import brooklyn.management.ManagementContext;
public class PlatformComponentBrooklynLookup extends AbstractBrooklynResourceLookup<PlatformComponent> {
public PlatformComponentBrooklynLookup(PlatformRootSummary root, ManagementContext bmc) {
super(root, bmc);
}
@Override
public PlatformComponent get(String id) {
Entity entity = bmc.getEntityManager().getEntity(id);
Builder<? extends PlatformComponent> builder = PlatformComponent.builder()
.created(new Date(entity.getCreationTime()))
.id(entity.getId())
.name(entity.getDisplayName())
.externalManagementUri(BrooklynUrlLookup.getUrl(bmc, entity));
for (Entity child: entity.getChildren())
// FIXME this walks the whole damn tree!
builder.add( get(child.getId() ));
return builder.build();
}
// platform components are not listed at the top level -- you have to walk the assemblies
@Override
public List<ResolvableLink<PlatformComponent>> links() {
return Collections.emptyList();
}
}
|
apache-2.0
|
shakamunyi/hadoop-20
|
src/test/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java
|
19841
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.common.GenerationStamp;
import org.apache.hadoop.hdfs.server.datanode.FSDataset.FSVolume;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Tests {@link DirectoryScanner} handling of differences
* between blocks on the disk and block in memory.
*/
public class TestDirectoryScanner {
private static final Log LOG = LogFactory.getLog(TestDirectoryScanner.class);
private static final Configuration CONF = new Configuration();
private static final int DEFAULT_GEN_STAMP = 9999;
private MiniDFSCluster cluster;
private Integer nsid;
private FSDataset fds = null;
private DirectoryScanner scanner = null;
private Random rand = new Random();
private Random r = new Random();
/**
* The mock is done to do synchronous file deletion instead of async one
*/
static class FSDatasetAsyncDiscServiceMock extends
FSDatasetAsyncDiskService {
FSDatasetAsyncDiscServiceMock(File[] volumes, Configuration conf) {
super(volumes, conf);
}
@Override
void deleteAsyncFile(FSVolume volume, File file) {
DataNode.LOG.info("Scheduling file " + file.toString() + " for deletion");
new FileDeleteTask(volume, file).run();
}
}
/**
* This class is made to simplify test reading;
* instead of writing
* <code>scanAndAssert(1, 2, 3, 4, 5, 6)</code> - a method with a
* 6 argumetns so it's hard to remember their order,
* it uses the builder pattern to make things more clear
* <code>
* checker()
* .setTotalBlocks(1)
* .setDiffSize(2)
* .setMissingMetaFile(3)
* .setMissingBlockFile(4)
* .setMissingMemoryBlocks(5)
* .setMismatchBlocks(6)
* .scanAndAssert();
* </code>
*/
static class ScanChecker {
long totalBlocks;
int diffSize;
long missingMetaFile;
long missingBlockFile;
long missingMemoryBlocks;
long mismatchBlocks;
DirectoryScanner scanner;
int nsid;
ScanChecker(DirectoryScanner scanner, int nsid) {
this.scanner = scanner;
this.nsid = nsid;
}
public ScanChecker setTotalBlocks(long totalBlocks) {
this.totalBlocks = totalBlocks;
return this;
}
public ScanChecker setDiffSize(int diffSize) {
this.diffSize = diffSize;
return this;
}
public ScanChecker setMissingMetaFile(long missingMetaFile) {
this.missingMetaFile = missingMetaFile;
return this;
}
public ScanChecker setMissingBlockFile(long missingBlockFile) {
this.missingBlockFile = missingBlockFile;
return this;
}
public ScanChecker setMissingMemoryBlocks(long missingMemoryBlocks) {
this.missingMemoryBlocks = missingMemoryBlocks;
return this;
}
public ScanChecker setMismatchBlocks(long mismatchBlocks) {
this.mismatchBlocks = mismatchBlocks;
return this;
}
public ScanChecker setZeroDiff() {
return this.setDiffSize(0)
.setMissingMetaFile(0)
.setMissingBlockFile(0)
.setMissingMemoryBlocks(0)
.setMismatchBlocks(0);
}
/**
* Runs scanner and asserts its results with the predefined values
*/
public void scanAndAssert() {
assertTrue(scanner.getRunStatus());
scanner.run();
assertTrue(scanner.diffsPerNamespace.containsKey(nsid));
LinkedList<DirectoryScanner.ScanDifference> diff = scanner.diffsPerNamespace.get(nsid);
assertTrue(scanner.statsPerNamespace.containsKey(nsid));
DirectoryScanner.Stats stats = scanner.statsPerNamespace.get(nsid);
assertEquals(diffSize, diff.size());
assertEquals(totalBlocks, stats.totalBlocks);
assertEquals(missingMetaFile, stats.missingMetaFile);
assertEquals(missingBlockFile, stats.missingBlockFile);
assertEquals(missingMemoryBlocks, stats.missingMemoryBlocks);
assertEquals(mismatchBlocks, stats.mismatchBlocks);
}
}
public ScanChecker checker() {
// closure to the current instance
return new ScanChecker(scanner, nsid);
}
/** create a file with a length of <code>fileLen</code> */
private void createFile(String fileName, long fileLen) throws IOException {
FileSystem fs = cluster.getFileSystem();
Path filePath = new Path(fileName);
DFSTestUtil.createFile(fs, filePath, fileLen, (short) 1, r.nextLong());
}
/** Truncate a block file */
private long truncateBlockFile() throws IOException {
synchronized (fds) {
for (DatanodeBlockInfo b : TestDirectoryScannerDelta.getBlockInfos(fds, nsid)) {
File f = b.getBlockDataFile().getFile();
File mf = BlockWithChecksumFileWriter.getMetaFile(f, b.getBlock());
// Truncate a block file that has a corresponding metadata file
if (f.exists() && f.length() != 0 && mf.exists()) {
FileOutputStream s = new FileOutputStream(f);
FileChannel channel = s.getChannel();
channel.truncate(0);
LOG.info("Truncated block file " + f.getAbsolutePath());
long blockId = b.getBlock().getBlockId();
s.close();
return blockId;
}
}
}
return 0;
}
/** Delete a block file */
private long deleteBlockFile() {
synchronized(fds) {
for (DatanodeBlockInfo b : TestDirectoryScannerDelta.getBlockInfos(fds, nsid)) {
File f = b.getBlockDataFile().getFile();
File mf = BlockWithChecksumFileWriter.getMetaFile(f, b.getBlock());
// Delete a block file that has corresponding metadata file
if (f.exists() && mf.exists() && f.delete()) {
LOG.info("Deleting block file " + f.getAbsolutePath());
return b.getBlock().getBlockId();
}
}
} // sync
throw new IllegalStateException("Cannot complete a block file deletion");
}
/** Delete block meta file */
private long deleteMetaFile() {
synchronized(fds) {
for (DatanodeBlockInfo b : TestDirectoryScannerDelta.getBlockInfos(fds, nsid)) {
File file = BlockWithChecksumFileWriter.getMetaFile(b
.getBlockDataFile().getFile(), b.getBlock());
// Delete a metadata file
if (file.exists() && file.delete()) {
LOG.info("Deleting metadata file " + file.getAbsolutePath());
return b.getBlock().getBlockId();
}
}
} // sync
throw new IllegalStateException("cannot complete a metafile deletion");
}
/** Get a random blockId that is not used already */
private long getFreeBlockId() {
long id = rand.nextLong();
while (true) {
id = rand.nextLong();
if (fds.volumeMap.get(
nsid, new Block(id, 0, GenerationStamp.WILDCARD_STAMP)) == null) {
break;
}
}
return id;
}
private String getBlockFile(long id) {
return Block.BLOCK_FILE_PREFIX + id;
}
private String getMetaFile(long id) {
return Block.BLOCK_FILE_PREFIX + id + "_" + DEFAULT_GEN_STAMP
+ Block.METADATA_EXTENSION;
}
/** Create a block file in a random volume*/
private long createBlockFile() throws IOException {
FSVolume[] volumes = fds.volumes.getVolumes();
int index = rand.nextInt(volumes.length - 1);
long id = getFreeBlockId();
File finalizedDir = volumes[index].getNamespaceSlice(nsid).getCurrentDir();
File file = new File(finalizedDir, getBlockFile(id));
if (file.createNewFile()) {
LOG.info("Created block file " + file.getName());
}
return id;
}
/** Create a metafile in a random volume*/
private long createMetaFile() throws IOException {
FSVolume[] volumes = fds.volumes.getVolumes();
int index = rand.nextInt(volumes.length - 1);
long id = getFreeBlockId();
File finalizedDir = volumes[index].getNamespaceSlice(nsid).getCurrentDir();
File file = new File(finalizedDir, getMetaFile(id));
if (file.createNewFile()) {
LOG.info("Created metafile " + file.getName());
}
return id;
}
/** Create block file and corresponding metafile in a rondom volume */
private long createBlockMetaFile() throws IOException {
FSVolume[] volumes = fds.volumes.getVolumes();
int index = rand.nextInt(volumes.length - 1);
long id = getFreeBlockId();
File finalizedDir = volumes[index].getNamespaceSlice(nsid).getCurrentDir();
File file = new File(finalizedDir, getBlockFile(id));
if (file.createNewFile()) {
LOG.info("Created block file " + file.getName());
// Create files with same prefix as block file but extension names
// such that during sorting, these files appear around meta file
// to test how DirectoryScanner handles extraneous files
String name1 = file.getAbsolutePath() + ".l";
String name2 = file.getAbsolutePath() + ".n";
file = new File(name1);
if (file.createNewFile()) {
LOG.info("Created extraneous file " + name1);
}
file = new File(name2);
if (file.createNewFile()) {
LOG.info("Created extraneous file " + name2);
}
file = new File(finalizedDir, getMetaFile(id));
if (file.createNewFile()) {
LOG.info("Created metafile " + file.getName());
}
}
return id;
}
@Test
public void testDirectoryScanner() throws Exception {
// Run the test with and without parallel scanning
for (int parallelism = 1; parallelism < 3; parallelism++) {
runTest(parallelism);
}
}
public void runTest(int parallelism) throws Exception {
CONF.setLong("dfs.block.size", 100);
CONF.setInt("io.bytes.per.checksum", 1);
CONF.setLong("dfs.heartbeat.interval", 1L);
CONF.setInt("dfs.datanode.directoryscan.interval", 1000);
CONF.setBoolean("dfs.use.inline.checksum", false);
try {
cluster = new MiniDFSCluster(CONF, 1, true, null);
cluster.waitActive();
nsid = cluster.getNameNode().getNamesystem().getNamespaceId();
fds = (FSDataset) cluster.getDataNodes().get(0).getFSDataset();
CONF.setInt(DirectoryScanner.DFS_DATANODE_DIRECTORYSCAN_THREADS_KEY,
parallelism);
// setting up mock that removes files immediately
List<File> volumes = new ArrayList<File>();
for (FSVolume vol : fds.volumes.getVolumes()) {
volumes.add(vol.getDir());
}
fds.asyncDiskService = new FSDatasetAsyncDiscServiceMock(
volumes.toArray(new File[volumes.size()]), CONF);
DataNode dn = cluster.getDataNodes().get(0);
scanner = dn.directoryScanner;
// Add file with 100 blocks
long totalBlocks = 100;
createFile("/tmp/t1", CONF.getLong("dfs.block.size", 100) * totalBlocks);
// Test1: No difference between in-memory and disk
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test2: block metafile is missing
long blockId = deleteMetaFile();
checker()
.setTotalBlocks(totalBlocks)
.setDiffSize(1)
.setMissingMetaFile(1)
.setMissingBlockFile(0)
.setMissingMemoryBlocks(0)
.setMismatchBlocks(1)
.scanAndAssert();
verifyGenStamp(blockId, Block.GRANDFATHER_GENERATION_STAMP);
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test3: block file is missing
blockId = deleteBlockFile();
checker()
.setTotalBlocks(totalBlocks)
.setDiffSize(1)
.setMissingMetaFile(0)
.setMissingBlockFile(1)
.setMissingMemoryBlocks(0)
.setMismatchBlocks(0)
.scanAndAssert();
totalBlocks--;
verifyDeletion(blockId);
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test4: A block file exists for which there is no metafile and
// a block in memory
blockId = createBlockFile();
totalBlocks++;
checker()
.setTotalBlocks(totalBlocks)
.setDiffSize(1)
.setMissingMetaFile(1)
.setMissingBlockFile(0)
.setMissingMemoryBlocks(1)
.setMismatchBlocks(0)
.scanAndAssert();
verifyAddition(blockId, Block.GRANDFATHER_GENERATION_STAMP, 0);
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test5: A metafile exists for which there is no block file and
// a block in memory
blockId = createMetaFile();
checker()
.setTotalBlocks(totalBlocks+1)
.setDiffSize(1)
.setMissingMetaFile(0)
.setMissingBlockFile(1)
.setMissingMemoryBlocks(1)
.setMismatchBlocks(0)
.scanAndAssert();
File metafile = new File(getMetaFile(blockId));
assertTrue(!metafile.exists());
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test6: A block file and metafile exists for which there is no block in
// memory
blockId = createBlockMetaFile();
totalBlocks++;
checker()
.setTotalBlocks(totalBlocks)
.setDiffSize(1)
.setMissingMetaFile(0)
.setMissingBlockFile(0)
.setMissingMemoryBlocks(1)
.setMismatchBlocks(0)
.scanAndAssert();
verifyAddition(blockId, DEFAULT_GEN_STAMP, 0);
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test7: Delete bunch of metafiles
for (int i = 0; i < 10; i++) {
blockId = deleteMetaFile();
}
checker()
.setTotalBlocks(totalBlocks)
.setDiffSize(10)
.setMissingMetaFile(10)
.setMissingBlockFile(0)
.setMissingMemoryBlocks(0)
.setMismatchBlocks(10)
.scanAndAssert();
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test8: Delete bunch of block files
for (int i = 0; i < 10; i++) {
blockId = deleteBlockFile();
}
checker()
.setTotalBlocks(totalBlocks)
.setDiffSize(10)
.setMissingMetaFile(0)
.setMissingBlockFile(10)
.setMissingMemoryBlocks(0)
.setMismatchBlocks(0)
.scanAndAssert();
totalBlocks -= 10;
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test9: create a bunch of blocks files
for (int i = 0; i < 10 ; i++) {
blockId = createBlockFile();
}
totalBlocks += 10;
checker()
.setTotalBlocks(totalBlocks)
.setDiffSize(10)
.setMissingMetaFile(10)
.setMissingBlockFile(0)
.setMissingMemoryBlocks(10)
.setMismatchBlocks(0)
.scanAndAssert();
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test10: create a bunch of metafiles
for (int i = 0; i < 10 ; i++) {
blockId = createMetaFile();
}
checker()
.setTotalBlocks(totalBlocks+10)
.setDiffSize(10)
.setMissingMetaFile(0)
.setMissingBlockFile(10)
.setMissingMemoryBlocks(10)
.setMismatchBlocks(0)
.scanAndAssert();
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test11: create a bunch block files and meta files
for (int i = 0; i < 10 ; i++) {
blockId = createBlockMetaFile();
}
totalBlocks += 10;
checker()
.setTotalBlocks(totalBlocks)
.setDiffSize(10)
.setMissingMetaFile(0)
.setMissingBlockFile(0)
.setMissingMemoryBlocks(10)
.setMismatchBlocks(0)
.scanAndAssert();
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test12: truncate block files to test block length mismatch
for (int i = 0; i < 10 ; i++) {
truncateBlockFile();
}
checker()
.setTotalBlocks(totalBlocks)
.setDiffSize(10)
.setMissingMetaFile(0)
.setMissingBlockFile(0)
.setMissingMemoryBlocks(0)
.setMismatchBlocks(10)
.scanAndAssert();
checker()
.setTotalBlocks(totalBlocks)
.setZeroDiff()
.scanAndAssert();
// Test13: all the conditions combined
createMetaFile();
createBlockFile();
createBlockMetaFile();
deleteMetaFile();
deleteBlockFile();
truncateBlockFile();
checker()
.setTotalBlocks(totalBlocks+3)
.setDiffSize(6)
.setMissingMetaFile(2)
.setMissingBlockFile(2)
.setMissingMemoryBlocks(3)
.setMismatchBlocks(2)
.scanAndAssert();
checker()
.setTotalBlocks(totalBlocks+1)
.setZeroDiff()
.scanAndAssert();
totalBlocks = 1;
// Test14: validate clean shutdown of DirectoryScanner
scanner.shutdown();
assertFalse(scanner.getRunStatus());
} finally {
scanner.shutdown();
cluster.shutdown();
}
}
private void verifyAddition(long blockId, long genStamp, long size) throws IOException{
final DatanodeBlockInfo replicainfo;
replicainfo = fds.volumeMap.get(nsid, new Block(blockId, 0, GenerationStamp.WILDCARD_STAMP));
assertNotNull(replicainfo);
// Added block has the same file as the one created by the test
File file = new File(getBlockFile(blockId));
assertEquals(file.getName(), fds.getBlockFile(nsid, new Block(blockId)).getName());
// Generation stamp is same as that of created file
LOG.info("------------------: " + genStamp + " : " +
replicainfo.getBlock().getGenerationStamp());
assertEquals(genStamp, replicainfo.getBlock().getGenerationStamp());
// File size matches
assertEquals(size, replicainfo.getBlock().getNumBytes());
}
private void verifyDeletion(long blockId) {
// Ensure block does not exist in memory
assertNull(fds.volumeMap.get(nsid, new Block(blockId, 0, GenerationStamp.WILDCARD_STAMP)));
}
private void verifyGenStamp(long blockId, long genStamp) {
final DatanodeBlockInfo memBlock;
memBlock = fds.volumeMap.get(nsid, new Block(blockId, 0, GenerationStamp.WILDCARD_STAMP));
assertNotNull(memBlock);
assertEquals(genStamp, memBlock.getBlock().getGenerationStamp());
}
}
|
apache-2.0
|
onders86/camel
|
components/camel-aws/src/main/java/org/apache/camel/component/aws/s3/S3Producer.java
|
23309
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.aws.s3;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import com.amazonaws.HttpMethod;
import com.amazonaws.services.cloudfront.model.InvalidArgumentException;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.AbortMultipartUploadRequest;
import com.amazonaws.services.s3.model.AccessControlList;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest;
import com.amazonaws.services.s3.model.CompleteMultipartUploadResult;
import com.amazonaws.services.s3.model.CopyObjectRequest;
import com.amazonaws.services.s3.model.CopyObjectResult;
import com.amazonaws.services.s3.model.DeleteBucketRequest;
import com.amazonaws.services.s3.model.DeleteObjectRequest;
import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadResult;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PartETag;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.SSEAwsKeyManagementParams;
import com.amazonaws.services.s3.model.StorageClass;
import com.amazonaws.services.s3.model.UploadPartRequest;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.WrappedFile;
import org.apache.camel.impl.DefaultProducer;
import org.apache.camel.util.CastUtils;
import org.apache.camel.util.FileUtil;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.camel.component.aws.common.AwsExchangeUtil.getMessageForResponse;
/**
* A Producer which sends messages to the Amazon Web Service Simple Storage
* Service <a href="http://aws.amazon.com/s3/">AWS S3</a>
*/
public class S3Producer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(S3Producer.class);
private transient String s3ProducerToString;
public S3Producer(final Endpoint endpoint) {
super(endpoint);
}
@Override
public void process(final Exchange exchange) throws Exception {
S3Operations operation = determineOperation(exchange);
if (ObjectHelper.isEmpty(operation)) {
if (getConfiguration().isMultiPartUpload()) {
processMultiPart(exchange);
} else {
processSingleOp(exchange);
}
} else {
switch (operation) {
case copyObject:
copyObject(getEndpoint().getS3Client(), exchange);
break;
case deleteObject:
deleteObject(getEndpoint().getS3Client(), exchange);
break;
case listBuckets:
listBuckets(getEndpoint().getS3Client(), exchange);
break;
case deleteBucket:
deleteBucket(getEndpoint().getS3Client(), exchange);
break;
case downloadLink:
createDownloadLink(getEndpoint().getS3Client(), exchange);
break;
case listObjects:
listObjects(getEndpoint().getS3Client(), exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation");
}
}
}
public void processMultiPart(final Exchange exchange) throws Exception {
File filePayload = null;
Object obj = exchange.getIn().getMandatoryBody();
// Need to check if the message body is WrappedFile
if (obj instanceof WrappedFile) {
obj = ((WrappedFile<?>)obj).getFile();
}
if (obj instanceof File) {
filePayload = (File)obj;
} else {
throw new InvalidArgumentException("aws-s3: MultiPart upload requires a File input.");
}
ObjectMetadata objectMetadata = determineMetadata(exchange);
if (objectMetadata.getContentLength() == 0) {
objectMetadata.setContentLength(filePayload.length());
}
final String keyName = determineKey(exchange);
final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(getConfiguration().getBucketName(), keyName, objectMetadata);
String storageClass = determineStorageClass(exchange);
if (storageClass != null) {
initRequest.setStorageClass(StorageClass.fromValue(storageClass));
}
String cannedAcl = exchange.getIn().getHeader(S3Constants.CANNED_ACL, String.class);
if (cannedAcl != null) {
CannedAccessControlList objectAcl = CannedAccessControlList.valueOf(cannedAcl);
initRequest.setCannedACL(objectAcl);
}
AccessControlList acl = exchange.getIn().getHeader(S3Constants.ACL, AccessControlList.class);
if (acl != null) {
// note: if cannedacl and acl are both specified the last one will
// be used. refer to
// PutObjectRequest#setAccessControlList for more details
initRequest.setAccessControlList(acl);
}
if (getConfiguration().isUseAwsKMS()) {
SSEAwsKeyManagementParams keyManagementParams;
if (ObjectHelper.isNotEmpty(getConfiguration().getAwsKMSKeyId())) {
keyManagementParams = new SSEAwsKeyManagementParams(getConfiguration().getAwsKMSKeyId());
} else {
keyManagementParams = new SSEAwsKeyManagementParams();
}
initRequest.setSSEAwsKeyManagementParams(keyManagementParams);
}
LOG.trace("Initiating multipart upload [{}] from exchange [{}]...", initRequest, exchange);
final InitiateMultipartUploadResult initResponse = getEndpoint().getS3Client().initiateMultipartUpload(initRequest);
final long contentLength = objectMetadata.getContentLength();
final List<PartETag> partETags = new ArrayList<>();
long partSize = getConfiguration().getPartSize();
CompleteMultipartUploadResult uploadResult = null;
long filePosition = 0;
try {
for (int part = 1; filePosition < contentLength; part++) {
partSize = Math.min(partSize, contentLength - filePosition);
UploadPartRequest uploadRequest = new UploadPartRequest().withBucketName(getConfiguration().getBucketName()).withKey(keyName)
.withUploadId(initResponse.getUploadId()).withPartNumber(part).withFileOffset(filePosition).withFile(filePayload).withPartSize(partSize);
LOG.trace("Uploading part [{}] for {}", part, keyName);
partETags.add(getEndpoint().getS3Client().uploadPart(uploadRequest).getPartETag());
filePosition += partSize;
}
CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest(getConfiguration().getBucketName(), keyName, initResponse.getUploadId(), partETags);
uploadResult = getEndpoint().getS3Client().completeMultipartUpload(compRequest);
} catch (Exception e) {
getEndpoint().getS3Client().abortMultipartUpload(new AbortMultipartUploadRequest(getConfiguration().getBucketName(), keyName, initResponse.getUploadId()));
throw e;
}
Message message = getMessageForResponse(exchange);
message.setHeader(S3Constants.E_TAG, uploadResult.getETag());
if (uploadResult.getVersionId() != null) {
message.setHeader(S3Constants.VERSION_ID, uploadResult.getVersionId());
}
if (getConfiguration().isDeleteAfterWrite() && filePayload != null) {
FileUtil.deleteFile(filePayload);
}
}
public void processSingleOp(final Exchange exchange) throws Exception {
ObjectMetadata objectMetadata = determineMetadata(exchange);
File filePayload = null;
InputStream is = null;
ByteArrayOutputStream baos = null;
Object obj = exchange.getIn().getMandatoryBody();
PutObjectRequest putObjectRequest = null;
// Need to check if the message body is WrappedFile
if (obj instanceof WrappedFile) {
obj = ((WrappedFile<?>)obj).getFile();
}
if (obj instanceof File) {
filePayload = (File)obj;
is = new FileInputStream(filePayload);
} else {
is = exchange.getIn().getMandatoryBody(InputStream.class);
baos = determineLengthInputStream(is);
objectMetadata.setContentLength(baos.size());
is = new ByteArrayInputStream(baos.toByteArray());
}
String bucketName = exchange.getIn().getHeader(S3Constants.BUCKET_NAME, String.class);
if (bucketName == null) {
LOG.trace("Bucket name is not in header, using default one [{}]...", getConfiguration().getBucketName());
bucketName = getConfiguration().getBucketName();
}
putObjectRequest = new PutObjectRequest(bucketName, determineKey(exchange), is, objectMetadata);
String storageClass = determineStorageClass(exchange);
if (storageClass != null) {
putObjectRequest.setStorageClass(storageClass);
}
String cannedAcl = exchange.getIn().getHeader(S3Constants.CANNED_ACL, String.class);
if (cannedAcl != null) {
CannedAccessControlList objectAcl = CannedAccessControlList.valueOf(cannedAcl);
putObjectRequest.setCannedAcl(objectAcl);
}
AccessControlList acl = exchange.getIn().getHeader(S3Constants.ACL, AccessControlList.class);
if (acl != null) {
// note: if cannedacl and acl are both specified the last one will
// be used. refer to
// PutObjectRequest#setAccessControlList for more details
putObjectRequest.setAccessControlList(acl);
}
if (getConfiguration().isUseAwsKMS()) {
SSEAwsKeyManagementParams keyManagementParams;
if (ObjectHelper.isNotEmpty(getConfiguration().getAwsKMSKeyId())) {
keyManagementParams = new SSEAwsKeyManagementParams(getConfiguration().getAwsKMSKeyId());
} else {
keyManagementParams = new SSEAwsKeyManagementParams();
}
putObjectRequest.setSSEAwsKeyManagementParams(keyManagementParams);
}
LOG.trace("Put object [{}] from exchange [{}]...", putObjectRequest, exchange);
PutObjectResult putObjectResult = getEndpoint().getS3Client().putObject(putObjectRequest);
LOG.trace("Received result [{}]", putObjectResult);
Message message = getMessageForResponse(exchange);
message.setHeader(S3Constants.E_TAG, putObjectResult.getETag());
if (putObjectResult.getVersionId() != null) {
message.setHeader(S3Constants.VERSION_ID, putObjectResult.getVersionId());
}
// close streams
IOHelper.close(putObjectRequest.getInputStream());
IOHelper.close(is);
if (getConfiguration().isDeleteAfterWrite() && filePayload != null) {
FileUtil.deleteFile(filePayload);
}
}
private void copyObject(AmazonS3 s3Client, Exchange exchange) {
String bucketNameDestination;
String destinationKey;
String sourceKey;
String bucketName;
String versionId;
bucketName = exchange.getIn().getHeader(S3Constants.BUCKET_NAME, String.class);
if (ObjectHelper.isEmpty(bucketName)) {
bucketName = getConfiguration().getBucketName();
}
sourceKey = exchange.getIn().getHeader(S3Constants.KEY, String.class);
destinationKey = exchange.getIn().getHeader(S3Constants.DESTINATION_KEY, String.class);
bucketNameDestination = exchange.getIn().getHeader(S3Constants.BUCKET_DESTINATION_NAME, String.class);
versionId = exchange.getIn().getHeader(S3Constants.VERSION_ID, String.class);
if (ObjectHelper.isEmpty(bucketName)) {
throw new IllegalArgumentException("Bucket Name must be specified for copyObject Operation");
}
if (ObjectHelper.isEmpty(bucketNameDestination)) {
throw new IllegalArgumentException("Bucket Name Destination must be specified for copyObject Operation");
}
if (ObjectHelper.isEmpty(sourceKey)) {
throw new IllegalArgumentException("Source Key must be specified for copyObject Operation");
}
if (ObjectHelper.isEmpty(destinationKey)) {
throw new IllegalArgumentException("Destination Key must be specified for copyObject Operation");
}
CopyObjectRequest copyObjectRequest;
if (ObjectHelper.isEmpty(versionId)) {
copyObjectRequest = new CopyObjectRequest(bucketName, sourceKey, bucketNameDestination, destinationKey);
} else {
copyObjectRequest = new CopyObjectRequest(bucketName, sourceKey, versionId, bucketNameDestination, destinationKey);
}
if (getConfiguration().isUseAwsKMS()) {
SSEAwsKeyManagementParams keyManagementParams;
if (ObjectHelper.isNotEmpty(getConfiguration().getAwsKMSKeyId())) {
keyManagementParams = new SSEAwsKeyManagementParams(getConfiguration().getAwsKMSKeyId());
} else {
keyManagementParams = new SSEAwsKeyManagementParams();
}
copyObjectRequest.setSSEAwsKeyManagementParams(keyManagementParams);
}
CopyObjectResult copyObjectResult = s3Client.copyObject(copyObjectRequest);
Message message = getMessageForResponse(exchange);
message.setHeader(S3Constants.E_TAG, copyObjectResult.getETag());
if (copyObjectResult.getVersionId() != null) {
message.setHeader(S3Constants.VERSION_ID, copyObjectResult.getVersionId());
}
}
private void deleteObject(AmazonS3 s3Client, Exchange exchange) {
String sourceKey;
String bucketName;
bucketName = exchange.getIn().getHeader(S3Constants.BUCKET_NAME, String.class);
if (ObjectHelper.isEmpty(bucketName)) {
bucketName = getConfiguration().getBucketName();
}
sourceKey = exchange.getIn().getHeader(S3Constants.KEY, String.class);
if (ObjectHelper.isEmpty(bucketName)) {
throw new IllegalArgumentException("Bucket Name must be specified for deleteObject Operation");
}
if (ObjectHelper.isEmpty(sourceKey)) {
throw new IllegalArgumentException("Source Key must be specified for deleteObject Operation");
}
DeleteObjectRequest deleteObjectRequest;
deleteObjectRequest = new DeleteObjectRequest(bucketName, sourceKey);
s3Client.deleteObject(deleteObjectRequest);
Message message = getMessageForResponse(exchange);
message.setBody(true);
}
private void listBuckets(AmazonS3 s3Client, Exchange exchange) {
List<Bucket> bucketsList = s3Client.listBuckets();
Message message = getMessageForResponse(exchange);
message.setBody(bucketsList);
}
private void deleteBucket(AmazonS3 s3Client, Exchange exchange) {
String bucketName;
bucketName = exchange.getIn().getHeader(S3Constants.BUCKET_NAME, String.class);
if (ObjectHelper.isEmpty(bucketName)) {
bucketName = getConfiguration().getBucketName();
}
DeleteBucketRequest deleteBucketRequest = new DeleteBucketRequest(bucketName);
s3Client.deleteBucket(deleteBucketRequest);
}
private void listObjects(AmazonS3 s3Client, Exchange exchange) {
String bucketName;
bucketName = exchange.getIn().getHeader(S3Constants.BUCKET_NAME, String.class);
if (ObjectHelper.isEmpty(bucketName)) {
bucketName = getConfiguration().getBucketName();
}
ObjectListing objectList = s3Client.listObjects(bucketName);
Message message = getMessageForResponse(exchange);
message.setBody(objectList);
}
private S3Operations determineOperation(Exchange exchange) {
S3Operations operation = exchange.getIn().getHeader(S3Constants.S3_OPERATION, S3Operations.class);
if (operation == null) {
operation = getConfiguration().getOperation();
}
return operation;
}
private ObjectMetadata determineMetadata(final Exchange exchange) {
ObjectMetadata objectMetadata = new ObjectMetadata();
Long contentLength = exchange.getIn().getHeader(S3Constants.CONTENT_LENGTH, Long.class);
if (contentLength != null) {
objectMetadata.setContentLength(contentLength);
}
String contentType = exchange.getIn().getHeader(S3Constants.CONTENT_TYPE, String.class);
if (contentType != null) {
objectMetadata.setContentType(contentType);
}
String cacheControl = exchange.getIn().getHeader(S3Constants.CACHE_CONTROL, String.class);
if (cacheControl != null) {
objectMetadata.setCacheControl(cacheControl);
}
String contentDisposition = exchange.getIn().getHeader(S3Constants.CONTENT_DISPOSITION, String.class);
if (contentDisposition != null) {
objectMetadata.setContentDisposition(contentDisposition);
}
String contentEncoding = exchange.getIn().getHeader(S3Constants.CONTENT_ENCODING, String.class);
if (contentEncoding != null) {
objectMetadata.setContentEncoding(contentEncoding);
}
String contentMD5 = exchange.getIn().getHeader(S3Constants.CONTENT_MD5, String.class);
if (contentMD5 != null) {
objectMetadata.setContentMD5(contentMD5);
}
Date lastModified = exchange.getIn().getHeader(S3Constants.LAST_MODIFIED, Date.class);
if (lastModified != null) {
objectMetadata.setLastModified(lastModified);
}
Map<String, String> userMetadata = CastUtils.cast(exchange.getIn().getHeader(S3Constants.USER_METADATA, Map.class));
if (userMetadata != null) {
objectMetadata.setUserMetadata(userMetadata);
}
Map<String, String> s3Headers = CastUtils.cast(exchange.getIn().getHeader(S3Constants.S3_HEADERS, Map.class));
if (s3Headers != null) {
for (Map.Entry<String, String> entry : s3Headers.entrySet()) {
objectMetadata.setHeader(entry.getKey(), entry.getValue());
}
}
String encryption = exchange.getIn().getHeader(S3Constants.SERVER_SIDE_ENCRYPTION, getConfiguration().getServerSideEncryption(), String.class);
if (encryption != null) {
objectMetadata.setSSEAlgorithm(encryption);
}
return objectMetadata;
}
private String determineKey(final Exchange exchange) {
String key = exchange.getIn().getHeader(S3Constants.KEY, String.class);
if (key == null) {
throw new IllegalArgumentException("AWS S3 Key header missing.");
}
return key;
}
private String determineStorageClass(final Exchange exchange) {
String storageClass = exchange.getIn().getHeader(S3Constants.STORAGE_CLASS, String.class);
if (storageClass == null) {
storageClass = getConfiguration().getStorageClass();
}
return storageClass;
}
private ByteArrayOutputStream determineLengthInputStream(InputStream is) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] bytes = new byte[1024];
int count;
while ((count = is.read(bytes)) > 0) {
out.write(bytes, 0, count);
}
return out;
}
private void createDownloadLink(AmazonS3 s3Client, Exchange exchange) {
String bucketName = exchange.getIn().getHeader(S3Constants.BUCKET_NAME, String.class);
if (ObjectHelper.isEmpty(bucketName)) {
bucketName = getConfiguration().getBucketName();
}
if (bucketName == null) {
throw new IllegalArgumentException("AWS S3 Bucket name header is missing.");
}
String key = exchange.getIn().getHeader(S3Constants.KEY, String.class);
if (key == null) {
throw new IllegalArgumentException("AWS S3 Key header is missing.");
}
Date expiration = new Date();
long milliSeconds = expiration.getTime();
Long expirationMillis = exchange.getIn().getHeader(S3Constants.DOWNLOAD_LINK_EXPIRATION, Long.class);
if (expirationMillis != null) {
milliSeconds += expirationMillis;
} else {
milliSeconds += 1000 * 60 * 60; // Default: Add 1 hour.
}
expiration.setTime(milliSeconds);
GeneratePresignedUrlRequest generatePresignedUrlRequest = new GeneratePresignedUrlRequest(bucketName, key);
generatePresignedUrlRequest.setMethod(HttpMethod.GET);
generatePresignedUrlRequest.setExpiration(expiration);
URL url = s3Client.generatePresignedUrl(generatePresignedUrlRequest);
Message message = getMessageForResponse(exchange);
message.setHeader(S3Constants.DOWNLOAD_LINK, url.toString());
}
protected S3Configuration getConfiguration() {
return getEndpoint().getConfiguration();
}
@Override
public String toString() {
if (s3ProducerToString == null) {
s3ProducerToString = "S3Producer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]";
}
return s3ProducerToString;
}
@Override
public S3Endpoint getEndpoint() {
return (S3Endpoint)super.getEndpoint();
}
}
|
apache-2.0
|
andreasnef/fcrepo
|
fcrepo-server/src/main/java/org/fcrepo/server/resourceIndex/TripleGenerator.java
|
890
|
/* The contents of this file are subject to the license and copyright terms
* detailed in the license directory at the root of the source tree (also
* available online at http://fedora-commons.org/license/).
*/
package org.fcrepo.server.resourceIndex;
import java.util.Set;
import org.jrdf.graph.Triple;
import org.fcrepo.server.errors.ResourceIndexException;
import org.fcrepo.server.storage.DOReader;
/**
* Generates RDF triples for Fedora objects.
*
* @author Chris Wilper
*/
public interface TripleGenerator {
/**
* Get triples implied by the given object.
*
* @param reader
* Current object from which to determine triples
* @return Set of triples implied by the objects contents.
* @throws ResourceIndexException
*/
public Set<Triple> getTriplesForObject(DOReader reader)
throws ResourceIndexException;
}
|
apache-2.0
|
EdwardLee03/guava
|
guava-tests/test/com/google/common/util/concurrent/UninterruptiblesTest.java
|
27455
|
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.util.concurrent;
import static com.google.common.util.concurrent.InterruptionUtil.repeatedlyInterruptTestThread;
import static com.google.common.util.concurrent.Uninterruptibles.awaitUninterruptibly;
import static com.google.common.util.concurrent.Uninterruptibles.joinUninterruptibly;
import static com.google.common.util.concurrent.Uninterruptibles.putUninterruptibly;
import static com.google.common.util.concurrent.Uninterruptibles.takeUninterruptibly;
import static com.google.common.util.concurrent.Uninterruptibles.tryAcquireUninterruptibly;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.testing.NullPointerTester;
import com.google.common.testing.TearDown;
import com.google.common.testing.TearDownStack;
import java.util.Date;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import junit.framework.TestCase;
/**
* Tests for {@link Uninterruptibles}.
*
* @author Anthony Zana
*/
public class UninterruptiblesTest extends TestCase {
private static final String EXPECTED_TAKE = "expectedTake";
/** Timeout to use when we don't expect the timeout to expire. */
private static final long LONG_DELAY_MS = 2500;
private static final long SLEEP_SLACK = 2;
private final TearDownStack tearDownStack = new TearDownStack();
// NOTE: All durations in these tests are expressed in milliseconds
@Override
protected void setUp() {
// Clear any previous interrupt before running the test.
if (Thread.currentThread().isInterrupted()) {
throw new AssertionError(
"Thread interrupted on test entry. "
+ "Some test probably didn't clear the interrupt state");
}
tearDownStack.addTearDown(
new TearDown() {
@Override
public void tearDown() {
Thread.interrupted();
}
});
}
@Override
protected void tearDown() {
tearDownStack.runTearDown();
}
public void testNull() throws Exception {
new NullPointerTester()
.setDefault(CountDownLatch.class, new CountDownLatch(0))
.setDefault(Semaphore.class, new Semaphore(999))
.testAllPublicStaticMethods(Uninterruptibles.class);
}
// IncrementableCountDownLatch.await() tests
// CountDownLatch.await() tests
// Condition.await() tests
public void testConditionAwaitTimeoutExceeded() {
Stopwatch stopwatch = Stopwatch.createStarted();
Condition condition = TestCondition.create();
boolean signaledBeforeTimeout = awaitUninterruptibly(condition, 500, MILLISECONDS);
assertFalse(signaledBeforeTimeout);
assertAtLeastTimePassed(stopwatch, 500);
assertNotInterrupted();
}
public void testConditionAwaitTimeoutNotExceeded() {
Stopwatch stopwatch = Stopwatch.createStarted();
Condition condition = TestCondition.createAndSignalAfter(500, MILLISECONDS);
boolean signaledBeforeTimeout = awaitUninterruptibly(condition, 1500, MILLISECONDS);
assertTrue(signaledBeforeTimeout);
assertTimeNotPassed(stopwatch, LONG_DELAY_MS);
assertNotInterrupted();
}
public void testConditionAwaitInterruptedTimeoutExceeded() {
Stopwatch stopwatch = Stopwatch.createStarted();
Condition condition = TestCondition.create();
requestInterruptIn(500);
boolean signaledBeforeTimeout = awaitUninterruptibly(condition, 1000, MILLISECONDS);
assertFalse(signaledBeforeTimeout);
assertAtLeastTimePassed(stopwatch, 1000);
assertInterrupted();
}
public void testConditionAwaitInterruptedTimeoutNotExceeded() {
Stopwatch stopwatch = Stopwatch.createStarted();
Condition condition = TestCondition.createAndSignalAfter(1000, MILLISECONDS);
requestInterruptIn(500);
boolean signaledBeforeTimeout = awaitUninterruptibly(condition, 1500, MILLISECONDS);
assertTrue(signaledBeforeTimeout);
assertTimeNotPassed(stopwatch, LONG_DELAY_MS);
assertInterrupted();
}
// BlockingQueue.put() tests
public void testPutWithNoWait() {
Stopwatch stopwatch = Stopwatch.createStarted();
BlockingQueue<String> queue = new ArrayBlockingQueue<>(999);
putUninterruptibly(queue, "");
assertTimeNotPassed(stopwatch, LONG_DELAY_MS);
assertEquals("", queue.peek());
}
public void testPutNoInterrupt() {
TimedPutQueue queue = TimedPutQueue.createWithDelay(20);
queue.putSuccessfully();
assertNotInterrupted();
}
public void testPutSingleInterrupt() {
TimedPutQueue queue = TimedPutQueue.createWithDelay(50);
requestInterruptIn(10);
queue.putSuccessfully();
assertInterrupted();
}
public void testPutMultiInterrupt() {
TimedPutQueue queue = TimedPutQueue.createWithDelay(100);
repeatedlyInterruptTestThread(20, tearDownStack);
queue.putSuccessfully();
assertInterrupted();
}
// BlockingQueue.take() tests
public void testTakeWithNoWait() {
Stopwatch stopwatch = Stopwatch.createStarted();
BlockingQueue<String> queue = new ArrayBlockingQueue<>(1);
assertTrue(queue.offer(""));
assertEquals("", takeUninterruptibly(queue));
assertTimeNotPassed(stopwatch, LONG_DELAY_MS);
}
public void testTakeNoInterrupt() {
TimedTakeQueue queue = TimedTakeQueue.createWithDelay(20);
queue.takeSuccessfully();
assertNotInterrupted();
}
public void testTakeSingleInterrupt() {
TimedTakeQueue queue = TimedTakeQueue.createWithDelay(50);
requestInterruptIn(10);
queue.takeSuccessfully();
assertInterrupted();
}
public void testTakeMultiInterrupt() {
TimedTakeQueue queue = TimedTakeQueue.createWithDelay(100);
repeatedlyInterruptTestThread(20, tearDownStack);
queue.takeSuccessfully();
assertInterrupted();
}
// join() tests
public void testJoinWithNoWait() throws InterruptedException {
Stopwatch stopwatch = Stopwatch.createStarted();
Thread thread = new Thread(new JoinTarget(15));
thread.start();
thread.join();
assertFalse(thread.isAlive());
joinUninterruptibly(thread);
joinUninterruptibly(thread, 0, MILLISECONDS);
joinUninterruptibly(thread, -42, MILLISECONDS);
joinUninterruptibly(thread, LONG_DELAY_MS, MILLISECONDS);
assertTimeNotPassed(stopwatch, LONG_DELAY_MS);
}
public void testJoinNoInterrupt() {
TimedThread thread = TimedThread.createWithDelay(20);
thread.joinSuccessfully();
assertNotInterrupted();
}
public void testJoinTimeoutNoInterruptNotExpired() {
TimedThread thread = TimedThread.createWithDelay(20);
thread.joinSuccessfully(LONG_DELAY_MS);
assertNotInterrupted();
}
public void testJoinTimeoutNoInterruptExpired() {
TimedThread thread = TimedThread.createWithDelay(LONG_DELAY_MS);
thread.joinUnsuccessfully(30);
assertNotInterrupted();
}
public void testJoinSingleInterrupt() {
TimedThread thread = TimedThread.createWithDelay(50);
requestInterruptIn(10);
thread.joinSuccessfully();
assertInterrupted();
}
public void testJoinTimeoutSingleInterruptNoExpire() {
TimedThread thread = TimedThread.createWithDelay(50);
requestInterruptIn(10);
thread.joinSuccessfully(LONG_DELAY_MS);
assertInterrupted();
}
public void testJoinTimeoutSingleInterruptExpired() {
TimedThread thread = TimedThread.createWithDelay(LONG_DELAY_MS);
requestInterruptIn(10);
thread.joinUnsuccessfully(50);
assertInterrupted();
}
public void testJoinMultiInterrupt() {
TimedThread thread = TimedThread.createWithDelay(100);
repeatedlyInterruptTestThread(20, tearDownStack);
thread.joinSuccessfully();
assertInterrupted();
}
public void testJoinTimeoutMultiInterruptNoExpire() {
TimedThread thread = TimedThread.createWithDelay(100);
repeatedlyInterruptTestThread(20, tearDownStack);
thread.joinSuccessfully(LONG_DELAY_MS);
assertInterrupted();
}
public void testJoinTimeoutMultiInterruptExpired() {
/*
* We don't "need" to schedule a thread completion at all here, but by doing
* so, we come the closest we can to testing that the wait time is
* appropriately decreased on each progressive join() call.
*/
TimedThread thread = TimedThread.createWithDelay(LONG_DELAY_MS);
repeatedlyInterruptTestThread(20, tearDownStack);
thread.joinUnsuccessfully(70);
assertInterrupted();
}
// sleep() Tests
public void testSleepNoInterrupt() {
sleepSuccessfully(10);
}
public void testSleepSingleInterrupt() {
requestInterruptIn(10);
sleepSuccessfully(50);
assertInterrupted();
}
public void testSleepMultiInterrupt() {
repeatedlyInterruptTestThread(10, tearDownStack);
sleepSuccessfully(100);
assertInterrupted();
}
// Semaphore.tryAcquire() tests
public void testTryAcquireWithNoWait() {
Stopwatch stopwatch = Stopwatch.createStarted();
Semaphore semaphore = new Semaphore(99);
assertTrue(tryAcquireUninterruptibly(semaphore, 0, MILLISECONDS));
assertTrue(tryAcquireUninterruptibly(semaphore, -42, MILLISECONDS));
assertTrue(tryAcquireUninterruptibly(semaphore, LONG_DELAY_MS, MILLISECONDS));
assertTimeNotPassed(stopwatch, LONG_DELAY_MS);
}
public void testTryAcquireTimeoutNoInterruptNotExpired() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(20);
semaphore.tryAcquireSuccessfully(LONG_DELAY_MS);
assertNotInterrupted();
}
public void testTryAcquireTimeoutNoInterruptExpired() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(LONG_DELAY_MS);
semaphore.tryAcquireUnsuccessfully(30);
assertNotInterrupted();
}
public void testTryAcquireTimeoutSingleInterruptNoExpire() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(50);
requestInterruptIn(10);
semaphore.tryAcquireSuccessfully(LONG_DELAY_MS);
assertInterrupted();
}
public void testTryAcquireTimeoutSingleInterruptExpired() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(LONG_DELAY_MS);
requestInterruptIn(10);
semaphore.tryAcquireUnsuccessfully(50);
assertInterrupted();
}
public void testTryAcquireTimeoutMultiInterruptNoExpire() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(100);
repeatedlyInterruptTestThread(20, tearDownStack);
semaphore.tryAcquireSuccessfully(LONG_DELAY_MS);
assertInterrupted();
}
public void testTryAcquireTimeoutMultiInterruptExpired() {
/*
* We don't "need" to schedule a release() call at all here, but by doing
* so, we come the closest we can to testing that the wait time is
* appropriately decreased on each progressive tryAcquire() call.
*/
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(LONG_DELAY_MS);
repeatedlyInterruptTestThread(20, tearDownStack);
semaphore.tryAcquireUnsuccessfully(70);
assertInterrupted();
}
public void testTryAcquireWithNoWaitMultiPermit() {
Stopwatch stopwatch = Stopwatch.createStarted();
Semaphore semaphore = new Semaphore(99);
assertTrue(tryAcquireUninterruptibly(semaphore, 10, 0, MILLISECONDS));
assertTrue(tryAcquireUninterruptibly(semaphore, 10, -42, MILLISECONDS));
assertTrue(tryAcquireUninterruptibly(semaphore, 10, LONG_DELAY_MS, MILLISECONDS));
assertTimeNotPassed(stopwatch, LONG_DELAY_MS);
}
public void testTryAcquireTimeoutNoInterruptNotExpiredMultiPermit() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(20);
semaphore.tryAcquireSuccessfully(10, LONG_DELAY_MS);
assertNotInterrupted();
}
public void testTryAcquireTimeoutNoInterruptExpiredMultiPermit() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(LONG_DELAY_MS);
semaphore.tryAcquireUnsuccessfully(10, 30);
assertNotInterrupted();
}
public void testTryAcquireTimeoutSingleInterruptNoExpireMultiPermit() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(50);
requestInterruptIn(10);
semaphore.tryAcquireSuccessfully(10, LONG_DELAY_MS);
assertInterrupted();
}
public void testTryAcquireTimeoutSingleInterruptExpiredMultiPermit() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(LONG_DELAY_MS);
requestInterruptIn(10);
semaphore.tryAcquireUnsuccessfully(10, 50);
assertInterrupted();
}
public void testTryAcquireTimeoutMultiInterruptNoExpireMultiPermit() {
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(100);
repeatedlyInterruptTestThread(20, tearDownStack);
semaphore.tryAcquireSuccessfully(10, LONG_DELAY_MS);
assertInterrupted();
}
public void testTryAcquireTimeoutMultiInterruptExpiredMultiPermit() {
/*
* We don't "need" to schedule a release() call at all here, but by doing
* so, we come the closest we can to testing that the wait time is
* appropriately decreased on each progressive tryAcquire() call.
*/
TimedSemaphore semaphore = TimedSemaphore.createWithDelay(LONG_DELAY_MS);
repeatedlyInterruptTestThread(20, tearDownStack);
semaphore.tryAcquireUnsuccessfully(10, 70);
assertInterrupted();
}
/**
* Wrapper around {@link Stopwatch} which also contains an "expected completion time." Creating a
* {@code Completion} starts the underlying stopwatch.
*/
private static final class Completion {
final Stopwatch stopwatch;
final long expectedCompletionWaitMillis;
Completion(long expectedCompletionWaitMillis) {
this.expectedCompletionWaitMillis = expectedCompletionWaitMillis;
stopwatch = Stopwatch.createStarted();
}
/**
* Asserts that the expected completion time has passed (and not "too much" time beyond that).
*/
void assertCompletionExpected() {
assertAtLeastTimePassed(stopwatch, expectedCompletionWaitMillis);
assertTimeNotPassed(stopwatch, expectedCompletionWaitMillis + LONG_DELAY_MS);
}
/**
* Asserts that at least {@code timeout} has passed but the expected completion time has not.
*/
void assertCompletionNotExpected(long timeout) {
Preconditions.checkArgument(timeout < expectedCompletionWaitMillis);
assertAtLeastTimePassed(stopwatch, timeout);
assertTimeNotPassed(stopwatch, expectedCompletionWaitMillis);
}
}
private static void assertAtLeastTimePassed(Stopwatch stopwatch, long expectedMillis) {
long elapsedMillis = stopwatch.elapsed(MILLISECONDS);
/*
* The "+ 5" below is to permit, say, sleep(10) to sleep only 9 milliseconds. We see such
* behavior sometimes when running these tests publicly as part of Guava. "+ 5" is probably more
* generous than it needs to be.
*/
assertTrue(
"Expected elapsed millis to be >= " + expectedMillis + " but was " + elapsedMillis,
elapsedMillis + 5 >= expectedMillis);
}
// TODO(cpovirk): Split this into separate CountDownLatch and IncrementableCountDownLatch classes.
/** Manages a {@link BlockingQueue} and associated timings for a {@code put} call. */
private static final class TimedPutQueue {
final BlockingQueue<String> queue;
final Completion completed;
/**
* Creates a {@link EnableWrites} which open up a spot for a {@code put} to succeed in {@code
* countdownInMillis}.
*/
static TimedPutQueue createWithDelay(long countdownInMillis) {
return new TimedPutQueue(countdownInMillis);
}
private TimedPutQueue(long countdownInMillis) {
this.queue = new ArrayBlockingQueue<>(1);
assertTrue(queue.offer("blocksPutCallsUntilRemoved"));
this.completed = new Completion(countdownInMillis);
scheduleEnableWrites(this.queue, countdownInMillis);
}
/** Perform a {@code put} and assert that operation completed in the expected timeframe. */
void putSuccessfully() {
putUninterruptibly(queue, "");
completed.assertCompletionExpected();
assertEquals("", queue.peek());
}
private static void scheduleEnableWrites(BlockingQueue<String> queue, long countdownInMillis) {
Runnable toRun = new EnableWrites(queue, countdownInMillis);
// TODO(cpovirk): automatically fail the test if this thread throws
Thread enablerThread = new Thread(toRun);
enablerThread.start();
}
}
/** Manages a {@link BlockingQueue} and associated timings for a {@code take} call. */
private static final class TimedTakeQueue {
final BlockingQueue<String> queue;
final Completion completed;
/**
* Creates a {@link EnableReads} which insert an element for a {@code take} to receive in {@code
* countdownInMillis}.
*/
static TimedTakeQueue createWithDelay(long countdownInMillis) {
return new TimedTakeQueue(countdownInMillis);
}
private TimedTakeQueue(long countdownInMillis) {
this.queue = new ArrayBlockingQueue<>(1);
this.completed = new Completion(countdownInMillis);
scheduleEnableReads(this.queue, countdownInMillis);
}
/** Perform a {@code take} and assert that operation completed in the expected timeframe. */
void takeSuccessfully() {
assertEquals(EXPECTED_TAKE, takeUninterruptibly(queue));
completed.assertCompletionExpected();
assertTrue(queue.isEmpty());
}
private static void scheduleEnableReads(BlockingQueue<String> queue, long countdownInMillis) {
Runnable toRun = new EnableReads(queue, countdownInMillis);
// TODO(cpovirk): automatically fail the test if this thread throws
Thread enablerThread = new Thread(toRun);
enablerThread.start();
}
}
/** Manages a {@link Semaphore} and associated timings. */
private static final class TimedSemaphore {
final Semaphore semaphore;
final Completion completed;
/**
* Create a {@link Release} which will release a semaphore permit in {@code countdownInMillis}.
*/
static TimedSemaphore createWithDelay(long countdownInMillis) {
return new TimedSemaphore(countdownInMillis);
}
private TimedSemaphore(long countdownInMillis) {
this.semaphore = new Semaphore(0);
this.completed = new Completion(countdownInMillis);
scheduleRelease(countdownInMillis);
}
/**
* Requests a permit from the semaphore with a timeout and asserts that operation completed in
* the expected timeframe.
*/
void tryAcquireSuccessfully(long timeoutMillis) {
assertTrue(tryAcquireUninterruptibly(semaphore, timeoutMillis, MILLISECONDS));
completed.assertCompletionExpected();
}
void tryAcquireSuccessfully(int permits, long timeoutMillis) {
assertTrue(tryAcquireUninterruptibly(semaphore, permits, timeoutMillis, MILLISECONDS));
completed.assertCompletionExpected();
}
/**
* Requests a permit from the semaphore with a timeout and asserts that the wait returned within
* the expected timeout.
*/
private void tryAcquireUnsuccessfully(long timeoutMillis) {
assertFalse(tryAcquireUninterruptibly(semaphore, timeoutMillis, MILLISECONDS));
completed.assertCompletionNotExpected(timeoutMillis);
}
private void tryAcquireUnsuccessfully(int permits, long timeoutMillis) {
assertFalse(tryAcquireUninterruptibly(semaphore, permits, timeoutMillis, MILLISECONDS));
completed.assertCompletionNotExpected(timeoutMillis);
}
private void scheduleRelease(long countdownInMillis) {
DelayedActionRunnable toRun = new Release(semaphore, countdownInMillis);
// TODO(cpovirk): automatically fail the test if this thread throws
Thread releaserThread = new Thread(toRun);
releaserThread.start();
}
}
private abstract static class DelayedActionRunnable implements Runnable {
private final long tMinus;
protected DelayedActionRunnable(long tMinus) {
this.tMinus = tMinus;
}
@Override
public final void run() {
try {
Thread.sleep(tMinus);
} catch (InterruptedException e) {
throw new AssertionError(e);
}
doAction();
}
protected abstract void doAction();
}
private static class CountDown extends DelayedActionRunnable {
private final CountDownLatch latch;
public CountDown(CountDownLatch latch, long tMinus) {
super(tMinus);
this.latch = latch;
}
@Override
protected void doAction() {
latch.countDown();
}
}
private static class EnableWrites extends DelayedActionRunnable {
private final BlockingQueue<String> queue;
public EnableWrites(BlockingQueue<String> queue, long tMinus) {
super(tMinus);
assertFalse(queue.isEmpty());
assertFalse(queue.offer("shouldBeRejected"));
this.queue = queue;
}
@Override
protected void doAction() {
assertNotNull(queue.remove());
}
}
private static class EnableReads extends DelayedActionRunnable {
private final BlockingQueue<String> queue;
public EnableReads(BlockingQueue<String> queue, long tMinus) {
super(tMinus);
assertTrue(queue.isEmpty());
this.queue = queue;
}
@Override
protected void doAction() {
assertTrue(queue.offer(EXPECTED_TAKE));
}
}
private static final class TimedThread {
private final Thread thread;
private final Completion completed;
static TimedThread createWithDelay(long countdownInMillis) {
return new TimedThread(countdownInMillis);
}
private TimedThread(long expectedCompletionWaitMillis) {
completed = new Completion(expectedCompletionWaitMillis);
thread = new Thread(new JoinTarget(expectedCompletionWaitMillis));
thread.start();
}
void joinSuccessfully() {
Uninterruptibles.joinUninterruptibly(thread);
completed.assertCompletionExpected();
assertEquals(Thread.State.TERMINATED, thread.getState());
}
void joinSuccessfully(long timeoutMillis) {
Uninterruptibles.joinUninterruptibly(thread, timeoutMillis, MILLISECONDS);
completed.assertCompletionExpected();
assertEquals(Thread.State.TERMINATED, thread.getState());
}
void joinUnsuccessfully(long timeoutMillis) {
Uninterruptibles.joinUninterruptibly(thread, timeoutMillis, MILLISECONDS);
completed.assertCompletionNotExpected(timeoutMillis);
assertFalse(Thread.State.TERMINATED.equals(thread.getState()));
}
}
private static class JoinTarget extends DelayedActionRunnable {
public JoinTarget(long tMinus) {
super(tMinus);
}
@Override
protected void doAction() {}
}
private static class Release extends DelayedActionRunnable {
private final Semaphore semaphore;
public Release(Semaphore semaphore, long tMinus) {
super(tMinus);
this.semaphore = semaphore;
}
@Override
protected void doAction() {
semaphore.release(10);
}
}
private static void sleepSuccessfully(long sleepMillis) {
Completion completed = new Completion(sleepMillis - SLEEP_SLACK);
Uninterruptibles.sleepUninterruptibly(sleepMillis, MILLISECONDS);
completed.assertCompletionExpected();
}
private static void assertTimeNotPassed(Stopwatch stopwatch, long timelimitMillis) {
long elapsedMillis = stopwatch.elapsed(MILLISECONDS);
assertTrue(elapsedMillis < timelimitMillis);
}
/**
* Await an interrupt, then clear the interrupt status. Similar to {@code
* assertTrue(Thread.interrupted())} except that this version tolerates late interrupts.
*/
private static void assertInterrupted() {
try {
/*
* The sleep() will end immediately if we've already been interrupted or
* wait patiently for the interrupt if not.
*/
Thread.sleep(LONG_DELAY_MS);
fail("Dude, where's my interrupt?");
} catch (InterruptedException expected) {
}
}
private static void assertNotInterrupted() {
assertFalse(Thread.interrupted());
}
private static void requestInterruptIn(long millis) {
InterruptionUtil.requestInterruptIn(millis, MILLISECONDS);
}
private static class TestCondition implements Condition {
private final Lock lock;
private final Condition condition;
private TestCondition(Lock lock, Condition condition) {
this.lock = lock;
this.condition = condition;
}
static TestCondition createAndSignalAfter(long delay, TimeUnit unit) {
final TestCondition testCondition = create();
ScheduledExecutorService scheduledPool = Executors.newScheduledThreadPool(1);
// If signal() fails somehow, we should see a failed test, even without looking at the Future.
Future<?> unused =
scheduledPool.schedule(
new Runnable() {
@Override
public void run() {
testCondition.signal();
}
},
delay,
unit);
return testCondition;
}
static TestCondition create() {
Lock lock = new ReentrantLock();
Condition condition = lock.newCondition();
return new TestCondition(lock, condition);
}
@Override
public void await() throws InterruptedException {
lock.lock();
try {
condition.await();
} finally {
lock.unlock();
}
}
@Override
public boolean await(long time, TimeUnit unit) throws InterruptedException {
lock.lock();
try {
return condition.await(time, unit);
} finally {
lock.unlock();
}
}
@Override
public void awaitUninterruptibly() {
lock.lock();
try {
condition.awaitUninterruptibly();
} finally {
lock.unlock();
}
}
@Override
public long awaitNanos(long nanosTimeout) throws InterruptedException {
lock.lock();
try {
return condition.awaitNanos(nanosTimeout);
} finally {
lock.unlock();
}
}
@Override
public boolean awaitUntil(Date deadline) throws InterruptedException {
lock.lock();
try {
return condition.awaitUntil(deadline);
} finally {
lock.unlock();
}
}
@Override
public void signal() {
lock.lock();
try {
condition.signal();
} finally {
lock.unlock();
}
}
@Override
public void signalAll() {
lock.lock();
try {
condition.signalAll();
} finally {
lock.unlock();
}
}
}
}
|
apache-2.0
|
salyh/javamailspec
|
geronimo-jacc_1.4_spec/src/test/java/javax/security/jacc/EJBMethodPermissionCollectionTest.java
|
18794
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//
// This source code implements specifications defined by the Java
// Community Process. In order to remain compliant with the specification
// DO NOT add / change / or delete method signatures!
//
package javax.security.jacc;
import junit.framework.TestCase;
import java.security.PermissionCollection;
import java.security.Permission;
import java.io.ByteArrayOutputStream;
import java.io.ObjectOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ObjectInputStream;
import java.util.Enumeration;
/**
* @version $Rev$ $Date$
*/
public class EJBMethodPermissionCollectionTest extends TestCase {
public void testWildCards() {
PermissionCollection collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", ""));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", "")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", ",,a,b,c"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", ",,a,b,c")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", ",,"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", ",,")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", ",Local"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", ",Local")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", ",Local,a,b,c"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", ",Local,a,b,c")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", ",Local,"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", ",Local,")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", "hello"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", "hello")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", "hello,,a,b,c"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", "hello,,a,b,c")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", "hello,,"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", "hello,,")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", "hello,Local"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", "hello,Local")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", "hello,Local,a,b,c")));
collection = new EJBMethodPermission("HelloWorld", "").newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", "hello,Local,"));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", ",Local,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,a,b,c")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,,")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local")));
assertFalse(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,a,b,c")));
assertTrue(collection.implies(new EJBMethodPermission("HelloWorld", "hello,Local,")));
assertFalse(collection.implies(new EJBMethodPermission("GoodbyeWorld", "hello,Local,")));
}
public void testSerialization() throws Exception {
EJBMethodPermission p = new EJBMethodPermission("HelloWorld", "");
PermissionCollection collection = p.newPermissionCollection();
collection.add(new EJBMethodPermission("HelloWorld", ""));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(collection);
oos.flush();
byte[] bytes = baos.toByteArray();
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
ObjectInputStream ois = new ObjectInputStream(bais);
PermissionCollection collection2 = (PermissionCollection) ois.readObject();
Enumeration <Permission> ps = collection2.elements();
Permission p2 = ps.nextElement();
assertEquals(p2, p);
assertFalse(ps.hasMoreElements());
assertTrue(collection2.implies(p));
}
}
|
apache-2.0
|
andyperlitch/incubator-apex-core
|
engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
|
125566
|
/**
* Copyright (C) 2015 DataTorrent, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datatorrent.stram;
import java.io.*;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Field;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import javax.annotation.Nullable;
import com.datatorrent.netlet.util.DTThrowable;
import com.esotericsoftware.kryo.KryoException;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import net.engio.mbassy.bus.MBassador;
import net.engio.mbassy.bus.config.BusConfiguration;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.commons.lang3.mutable.MutableLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.apache.hadoop.yarn.webapp.NotFoundException;
import com.datatorrent.api.*;
import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.Operator.InputPort;
import com.datatorrent.api.Operator.OutputPort;
import com.datatorrent.api.Stats.OperatorStats;
import com.datatorrent.api.annotation.Stateless;
import com.datatorrent.bufferserver.auth.AuthManager;
import com.datatorrent.bufferserver.util.Codec;
import com.datatorrent.common.experimental.AppData;
import com.datatorrent.common.util.AsyncFSStorageAgent;
import com.datatorrent.common.util.FSStorageAgent;
import com.datatorrent.common.util.NumberAggregate;
import com.datatorrent.common.util.Pair;
import com.datatorrent.stram.Journal.Recoverable;
import com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest;
import com.datatorrent.stram.api.*;
import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.*;
import com.datatorrent.stram.engine.OperatorResponse;
import com.datatorrent.stram.engine.StreamingContainer;
import com.datatorrent.stram.engine.WindowGenerator;
import com.datatorrent.stram.plan.logical.LogicalOperatorStatus;
import com.datatorrent.stram.plan.logical.LogicalPlan;
import com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta;
import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
import com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta;
import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration;
import com.datatorrent.stram.plan.logical.Operators;
import com.datatorrent.stram.plan.logical.Operators.PortContextPair;
import com.datatorrent.stram.plan.logical.requests.LogicalPlanRequest;
import com.datatorrent.stram.plan.physical.*;
import com.datatorrent.stram.plan.physical.OperatorStatus.PortStatus;
import com.datatorrent.stram.plan.physical.PTOperator.PTInput;
import com.datatorrent.stram.plan.physical.PTOperator.PTOutput;
import com.datatorrent.stram.plan.physical.PTOperator.State;
import com.datatorrent.stram.plan.physical.PhysicalPlan.PlanContext;
import com.datatorrent.stram.util.ConfigUtils;
import com.datatorrent.stram.util.FSJsonLineFile;
import com.datatorrent.stram.util.MovingAverage.MovingAverageLong;
import com.datatorrent.stram.util.SharedPubSubWebSocketClient;
import com.datatorrent.stram.util.WebServicesClient;
import com.datatorrent.stram.webapp.*;
/**
* Tracks topology provisioning/allocation to containers<p>
* <br>
* The tasks include<br>
* Provisioning operators one container at a time. Each container gets assigned the operators, streams and its context<br>
* Monitors run time operations including heartbeat protocol and node status<br>
* Operator recovery and restart<br>
* <br>
*
* @since 0.3.2
*/
public class StreamingContainerManager implements PlanContext
{
private final static Logger LOG = LoggerFactory.getLogger(StreamingContainerManager.class);
public final static String GATEWAY_LOGIN_URL_PATH = "/ws/v2/login";
public final static String BUILTIN_APPDATA_URL = "builtin";
public final static String CONTAINERS_INFO_FILENAME_FORMAT = "containers_%d.json";
public final static String OPERATORS_INFO_FILENAME_FORMAT = "operators_%d.json";
public final static String APP_META_FILENAME = "meta.json";
public final static String APP_META_KEY_ATTRIBUTES = "attributes";
public final static String APP_META_KEY_METRICS = "metrics";
public final static long LATENCY_WARNING_THRESHOLD_MILLIS = 10 * 60 * 1000; // 10 minutes
public final static Recoverable SET_OPERATOR_PROPERTY = new SetOperatorProperty();
public final static Recoverable SET_PHYSICAL_OPERATOR_PROPERTY = new SetPhysicalOperatorProperty();
public final static int METRIC_QUEUE_SIZE = 1000;
private final FinalVars vars;
private final PhysicalPlan plan;
private final Clock clock;
private SharedPubSubWebSocketClient wsClient;
private FSStatsRecorder statsRecorder;
private FSEventRecorder eventRecorder;
protected final Map<String, String> containerStopRequests = new ConcurrentHashMap<String, String>();
protected final ConcurrentLinkedQueue<ContainerStartRequest> containerStartRequests = new ConcurrentLinkedQueue<ContainerStartRequest>();
protected boolean forcedShutdown = false;
private final ConcurrentLinkedQueue<Runnable> eventQueue = new ConcurrentLinkedQueue<Runnable>();
private final AtomicBoolean eventQueueProcessing = new AtomicBoolean();
private final HashSet<PTContainer> pendingAllocation = Sets.newLinkedHashSet();
protected String shutdownDiagnosticsMessage = "";
private long lastResourceRequest = 0;
private final Map<String, StreamingContainerAgent> containers = new ConcurrentHashMap<String, StreamingContainerAgent>();
private final List<Pair<PTOperator, Long>> purgeCheckpoints = new ArrayList<Pair<PTOperator, Long>>();
private final Map<Long, Set<PTOperator>> shutdownOperators = new HashMap<>();
private CriticalPathInfo criticalPathInfo;
private final ConcurrentMap<PTOperator, PTOperator> reportStats = Maps.newConcurrentMap();
private final AtomicBoolean deployChangeInProgress = new AtomicBoolean();
private int deployChangeCnt;
private MBassador<StramEvent> eventBus; // event bus for publishing stram events
final private Journal journal;
private RecoveryHandler recoveryHandler;
// window id to node id to end window stats
private final ConcurrentSkipListMap<Long, Map<Integer, EndWindowStats>> endWindowStatsOperatorMap = new ConcurrentSkipListMap<Long, Map<Integer, EndWindowStats>>();
private long committedWindowId;
// (operator id, port name) to timestamp
private final Map<Pair<Integer, String>, Long> operatorPortLastEndWindowTimestamps = Maps.newConcurrentMap();
private final Map<Integer, Long> operatorLastEndWindowTimestamps = Maps.newConcurrentMap();
private long lastStatsTimestamp = System.currentTimeMillis();
private long currentEndWindowStatsWindowId;
private long completeEndWindowStatsWindowId;
private final ConcurrentHashMap<String, MovingAverageLong> rpcLatencies = new ConcurrentHashMap<String, MovingAverageLong>();
private final AtomicLong nodeToStramRequestIds = new AtomicLong(1);
private long allocatedMemoryBytes = 0;
private List<AppDataSource> appDataSources = null;
private final Cache<Long, Object> commandResponse = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build();
private long lastLatencyWarningTime;
private transient ExecutorService poolExecutor;
private FileContext fileContext;
//logic operator name to a queue of logical metrics. this gets cleared periodically
private final Map<String, Queue<Pair<Long, Map<String, Object>>>> logicalMetrics = Maps.newConcurrentMap();
//logical operator name to latest logical metrics.
private final Map<String, Map<String, Object>> latestLogicalMetrics = Maps.newHashMap();
//logical operator name to latest counters. exists for backward compatibility.
private final Map<String, Object> latestLogicalCounters = Maps.newHashMap();
private final LinkedHashMap<String, ContainerInfo> completedContainers = new LinkedHashMap<String, ContainerInfo>()
{
private static final long serialVersionUID = 201405281500L;
@Override
protected boolean removeEldestEntry(Map.Entry<String, ContainerInfo> eldest)
{
long expireTime = System.currentTimeMillis() - 30 * 60 * 60;
Iterator<Map.Entry<String, ContainerInfo>> iterator = entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, ContainerInfo> entry = iterator.next();
if (entry.getValue().finishedTime < expireTime) {
iterator.remove();
}
}
return false;
}
};
private FSJsonLineFile containerFile;
private FSJsonLineFile operatorFile;
private final long startTime = System.currentTimeMillis();
private static class EndWindowStats
{
long emitTimestamp = -1;
HashMap<String, Long> dequeueTimestamps = new HashMap<String, Long>(); // input port name to end window dequeue time
Object counters;
Map<String, Object> metrics;
}
public static class CriticalPathInfo
{
long latency;
LinkedList<Integer> path = new LinkedList<Integer>();
}
private static class SetOperatorProperty implements Recoverable
{
final private String operatorName;
final private String propertyName;
final private String propertyValue;
private SetOperatorProperty()
{
this(null, null, null);
}
private SetOperatorProperty(String operatorName, String propertyName, String propertyValue)
{
this.operatorName = operatorName;
this.propertyName = propertyName;
this.propertyValue = propertyValue;
}
@Override
public void read(final Object object, final Input in) throws KryoException
{
final StreamingContainerManager scm = (StreamingContainerManager)object;
final String operatorName = in.readString();
final String propertyName = in.readString();
final String propertyValue = in.readString();
final OperatorMeta logicalOperator = scm.plan.getLogicalPlan().getOperatorMeta(operatorName);
if (logicalOperator == null) {
throw new IllegalArgumentException("Unknown operator " + operatorName);
}
scm.setOperatorProperty(logicalOperator, propertyName, propertyValue);
}
@Override
public void write(final Output out) throws KryoException
{
out.writeString(operatorName);
out.writeString(propertyName);
out.writeString(propertyValue);
}
}
private static class SetPhysicalOperatorProperty implements Recoverable
{
final private int operatorId;
final private String propertyName;
final private String propertyValue;
private SetPhysicalOperatorProperty()
{
this(-1, null, null);
}
private SetPhysicalOperatorProperty(int operatorId, String propertyName, String propertyValue)
{
this.operatorId = operatorId;
this.propertyName = propertyName;
this.propertyValue = propertyValue;
}
@Override
public void read(final Object object, final Input in) throws KryoException
{
final StreamingContainerManager scm = (StreamingContainerManager)object;
final int operatorId = in.readInt();
final String propertyName = in.readString();
final String propertyValue = in.readString();
final PTOperator o = scm.plan.getAllOperators().get(operatorId);
if (o == null) {
throw new IllegalArgumentException("Unknown physical operator " + operatorId);
}
scm.setPhysicalOperatorProperty(o, propertyName, propertyValue);
}
@Override
public void write(final Output out) throws KryoException
{
out.writeInt(operatorId);
out.writeString(propertyName);
out.writeString(propertyValue);
}
}
public StreamingContainerManager(LogicalPlan dag, Clock clock)
{
this(dag, false, clock);
}
public StreamingContainerManager(LogicalPlan dag)
{
this(dag, false, new SystemClock());
}
public StreamingContainerManager(LogicalPlan dag, boolean enableEventRecording, Clock clock)
{
this.clock = clock;
this.vars = new FinalVars(dag, clock.getTime());
poolExecutor = Executors.newFixedThreadPool(4);
// setup prior to plan creation for event recording
if (enableEventRecording) {
this.eventBus = new MBassador<StramEvent>(BusConfiguration.Default(1, 1, 1));
}
this.plan = new PhysicalPlan(dag, this);
this.journal = new Journal(this);
init(enableEventRecording);
}
private StreamingContainerManager(CheckpointState checkpointedState, boolean enableEventRecording)
{
this.vars = checkpointedState.finals;
this.clock = new SystemClock();
poolExecutor = Executors.newFixedThreadPool(4);
this.plan = checkpointedState.physicalPlan;
this.eventBus = new MBassador<StramEvent>(BusConfiguration.Default(1, 1, 1));
this.journal = new Journal(this);
init(enableEventRecording);
}
private void init(boolean enableEventRecording)
{
setupWsClient();
setupRecording(enableEventRecording);
setupStringCodecs();
try {
Path file = new Path(this.vars.appPath);
URI uri = file.toUri();
Configuration config = new YarnConfiguration();
fileContext = uri.getScheme() == null ? FileContext.getFileContext(config) : FileContext.getFileContext(uri, config);
saveMetaInfo();
String fileName = String.format(CONTAINERS_INFO_FILENAME_FORMAT, plan.getLogicalPlan().getValue(LogicalPlan.APPLICATION_ATTEMPT_ID));
this.containerFile = new FSJsonLineFile(fileContext, new Path(this.vars.appPath, fileName), FsPermission.getDefault());
this.containerFile.append(getAppMasterContainerInfo());
fileName = String.format(OPERATORS_INFO_FILENAME_FORMAT, plan.getLogicalPlan().getValue(LogicalPlan.APPLICATION_ATTEMPT_ID));
this.operatorFile = new FSJsonLineFile(fileContext, new Path(this.vars.appPath, fileName), FsPermission.getDefault());
} catch (IOException ex) {
throw DTThrowable.wrapIfChecked(ex);
}
}
public Journal getJournal() {
return journal;
}
public final ContainerInfo getAppMasterContainerInfo()
{
ContainerInfo ci = new ContainerInfo();
ci.id = System.getenv(ApplicationConstants.Environment.CONTAINER_ID.toString());
String nmHost = System.getenv(ApplicationConstants.Environment.NM_HOST.toString());
String nmPort = System.getenv(ApplicationConstants.Environment.NM_PORT.toString());
String nmHttpPort = System.getenv(ApplicationConstants.Environment.NM_HTTP_PORT.toString());
ci.state = "ACTIVE";
ci.jvmName = ManagementFactory.getRuntimeMXBean().getName();
ci.numOperators = 0;
YarnConfiguration conf = new YarnConfiguration();
if (nmHost != null) {
if (nmPort != null) {
ci.host = nmHost + ":" + nmPort;
}
if (nmHttpPort != null) {
String nodeHttpAddress = nmHost + ":" + nmHttpPort;
if (allocatedMemoryBytes == 0) {
String url = ConfigUtils.getSchemePrefix(conf) + nodeHttpAddress + "/ws/v1/node/containers/" + ci.id;
WebServicesClient webServicesClient = new WebServicesClient();
try {
String content = webServicesClient.process(url, String.class, new WebServicesClient.GetWebServicesHandler<String>());
JSONObject json = new JSONObject(content);
int totalMemoryNeededMB = json.getJSONObject("container").getInt("totalMemoryNeededMB");
if (totalMemoryNeededMB > 0) {
allocatedMemoryBytes = totalMemoryNeededMB * 1024 * 1024;
} else {
LOG.warn("Could not determine the memory allocated for the streaming application master. Node manager is reporting {} MB from {}", totalMemoryNeededMB, url);
}
}
catch (Exception ex) {
LOG.warn("Could not determine the memory allocated for the streaming application master", ex);
}
}
ci.containerLogsUrl = ConfigUtils.getSchemePrefix(conf) + nodeHttpAddress + "/node/containerlogs/" + ci.id + "/" + System.getenv(ApplicationConstants.Environment.USER.toString());
ci.rawContainerLogsUrl = ConfigUtils.getRawContainerLogsUrl(conf, nodeHttpAddress, plan.getLogicalPlan().getAttributes().get(LogicalPlan.APPLICATION_ID), ci.id);
}
}
ci.memoryMBAllocated = (int)(allocatedMemoryBytes / (1024 * 1024));
ci.memoryMBFree = ((int)(Runtime.getRuntime().freeMemory() / (1024 * 1024)));
ci.lastHeartbeat = -1;
ci.startedTime = startTime;
ci.finishedTime = -1;
return ci;
}
public void updateRPCLatency(String containerId, long latency)
{
if (vars.rpcLatencyCompensationSamples > 0) {
MovingAverageLong latencyMA = rpcLatencies.get(containerId);
if (latencyMA == null) {
final MovingAverageLong val = new MovingAverageLong(vars.rpcLatencyCompensationSamples);
latencyMA = rpcLatencies.putIfAbsent(containerId, val);
if (latencyMA == null) {
latencyMA = val;
}
}
latencyMA.add(latency);
}
}
private void setupRecording(boolean enableEventRecording)
{
if (this.vars.enableStatsRecording) {
statsRecorder = new FSStatsRecorder();
statsRecorder.setBasePath(this.vars.appPath + "/" + LogicalPlan.SUBDIR_STATS);
statsRecorder.setup();
}
if (enableEventRecording) {
eventRecorder = new FSEventRecorder(plan.getLogicalPlan().getValue(LogicalPlan.APPLICATION_ID));
eventRecorder.setBasePath(this.vars.appPath + "/" + LogicalPlan.SUBDIR_EVENTS);
eventRecorder.setWebSocketClient(wsClient);
eventRecorder.setup();
eventBus.subscribe(eventRecorder);
}
}
private void setupStringCodecs()
{
Map<Class<?>, Class<? extends StringCodec<?>>> codecs = this.plan.getLogicalPlan().getAttributes().get(Context.DAGContext.STRING_CODECS);
StringCodecs.loadConverters(codecs);
}
private void setupWsClient()
{
String gatewayAddress = plan.getLogicalPlan().getValue(LogicalPlan.GATEWAY_CONNECT_ADDRESS);
boolean gatewayUseSsl = plan.getLogicalPlan().getValue(LogicalPlan.GATEWAY_USE_SSL);
String gatewayUserName = plan.getLogicalPlan().getValue(LogicalPlan.GATEWAY_USER_NAME);
String gatewayPassword = plan.getLogicalPlan().getValue(LogicalPlan.GATEWAY_PASSWORD);
if (gatewayAddress != null) {
try {
wsClient = new SharedPubSubWebSocketClient((gatewayUseSsl ? "wss://" : "ws://") + gatewayAddress + "/pubsub", 500);
if (gatewayUserName != null && gatewayPassword != null) {
wsClient.setLoginUrl((gatewayUseSsl ? "https://" : "http://") + gatewayAddress + GATEWAY_LOGIN_URL_PATH);
wsClient.setUserName(gatewayUserName);
wsClient.setPassword(gatewayPassword);
}
wsClient.setup();
}
catch (Exception ex) {
LOG.warn("Cannot establish websocket connection to {}", gatewayAddress, ex);
}
}
}
public void teardown()
{
if (eventBus != null) {
eventBus.shutdown();
}
if (eventRecorder != null) {
eventRecorder.teardown();
}
if (statsRecorder != null) {
statsRecorder.teardown();
}
IOUtils.closeQuietly(containerFile);
IOUtils.closeQuietly(operatorFile);
if(poolExecutor != null) {
poolExecutor.shutdown();
}
if(poolExecutor != null) {
poolExecutor.shutdown();
}
}
public void subscribeToEvents(Object listener)
{
if (eventBus != null) {
eventBus.subscribe(listener);
}
}
public PhysicalPlan getPhysicalPlan()
{
return plan;
}
public long getCommittedWindowId()
{
return committedWindowId;
}
public boolean isGatewayConnected()
{
return wsClient != null && wsClient.isConnectionOpen();
}
public SharedPubSubWebSocketClient getWsClient()
{
return wsClient;
}
private String convertAppDataUrl(String url)
{
if (BUILTIN_APPDATA_URL.equals(url)) {
return url;
}
/*else if (url != null) { String messageProxyUrl = this.plan.getLogicalPlan().getAttributes().get(Context.DAGContext.APPLICATION_DATA_MESSAGE_PROXY_URL);
if (messageProxyUrl != null) {
StringBuilder convertedUrl = new StringBuilder(messageProxyUrl);
convertedUrl.append("?url=");
try {
convertedUrl.append(URLEncoder.encode(url, "UTF-8"));
return convertedUrl.toString();
} catch (UnsupportedEncodingException ex) {
LOG.warn("URL {} cannot be encoded", url);
}
}
}
*/
LOG.warn("App Data URL {} cannot be converted for the client.", url);
return url;
}
private final Object appDataSourcesLock = new Object();
public List<AppDataSource> getAppDataSources()
{
synchronized (appDataSourcesLock) {
if (appDataSources == null) {
appDataSources = new ArrayList<AppDataSource>();
operators:
for (LogicalPlan.OperatorMeta operatorMeta : plan.getLogicalPlan().getAllOperators()) {
Map<LogicalPlan.InputPortMeta, LogicalPlan.StreamMeta> inputStreams = operatorMeta.getInputStreams();
Map<LogicalPlan.OutputPortMeta, LogicalPlan.StreamMeta> outputStreams = operatorMeta.getOutputStreams();
String queryOperatorName = null;
String queryUrl = null;
String queryTopic = null;
LOG.warn("DEBUG: looking at operator {} {}", operatorMeta.getName(), Thread.currentThread().getId());
for (Map.Entry<LogicalPlan.InputPortMeta, LogicalPlan.StreamMeta> entry : inputStreams.entrySet()) {
LogicalPlan.InputPortMeta portMeta = entry.getKey();
if (portMeta.isAppDataQueryPort()) {
if (queryUrl == null) {
OperatorMeta queryOperatorMeta = entry.getValue().getSource().getOperatorMeta();
if (queryOperatorMeta.getOperator() instanceof AppData.ConnectionInfoProvider) {
AppData.ConnectionInfoProvider queryOperator = (AppData.ConnectionInfoProvider) queryOperatorMeta.getOperator();
queryOperatorName = queryOperatorMeta.getName();
queryUrl = queryOperator.getAppDataURL();
queryTopic = queryOperator.getTopic();
}
} else {
LOG.warn("Multiple query ports found in operator {}. Ignoring the App Data Source.", operatorMeta.getName());
continue operators;
}
}
}
for (Map.Entry<LogicalPlan.OutputPortMeta, LogicalPlan.StreamMeta> entry : outputStreams.entrySet()) {
LogicalPlan.OutputPortMeta portMeta = entry.getKey();
LOG.warn("DEBUG: looking at port {} {}", portMeta.getPortName(), Thread.currentThread().getId());
if (portMeta.isAppDataResultPort()) {
AppDataSource appDataSource = new AppDataSource();
appDataSource.setType(AppDataSource.Type.DAG);
appDataSource.setOperatorName(operatorMeta.getName());
appDataSource.setPortName(portMeta.getPortName());
if (queryOperatorName == null) {
LOG.warn("There is no query operator for the App Data Source {}.{}. Ignoring the App Data Source.", operatorMeta.getName(), portMeta.getPortName());
continue;
}
appDataSource.setQueryOperatorName(queryOperatorName);
appDataSource.setQueryTopic(queryTopic);
appDataSource.setQueryUrl(convertAppDataUrl(queryUrl));
List<LogicalPlan.InputPortMeta> sinks = entry.getValue().getSinks();
if (sinks.isEmpty()) {
LOG.warn("There is no result operator for the App Data Source {}.{}. Ignoring the App Data Source.", operatorMeta.getName(), portMeta.getPortName());
continue;
}
if (sinks.size() > 1) {
LOG.warn("There are multiple result operators for the App Data Source {}.{}. Ignoring the App Data Source.", operatorMeta.getName(), portMeta.getPortName());
continue;
}
OperatorMeta resultOperatorMeta = sinks.get(0).getOperatorWrapper();
if (resultOperatorMeta.getOperator() instanceof AppData.ConnectionInfoProvider) {
AppData.ConnectionInfoProvider resultOperator = (AppData.ConnectionInfoProvider) resultOperatorMeta.getOperator();
appDataSource.setResultOperatorName(resultOperatorMeta.getName());
appDataSource.setResultTopic(resultOperator.getTopic());
appDataSource.setResultUrl(convertAppDataUrl(resultOperator.getAppDataURL()));
AppData.AppendQueryIdToTopic queryIdAppended = resultOperator.getClass().getAnnotation(AppData.AppendQueryIdToTopic.class);
if (queryIdAppended != null && queryIdAppended.value()) {
appDataSource.setResultAppendQIDTopic(true);
}
} else {
LOG.warn("Result operator for the App Data Source {}.{} does not implement the right interface. Ignoring the App Data Source.", operatorMeta.getName(), portMeta.getPortName());
continue;
}
LOG.warn("DEBUG: Adding appDataSource {} {}", appDataSource.getName(), Thread.currentThread().getId());
appDataSources.add(appDataSource);
}
}
}
}
}
return appDataSources;
}
public Map<String, Map<String, Object>> getLatestLogicalMetrics()
{
return latestLogicalMetrics;
}
/**
* Check periodically that deployed containers phone home.
* Run from the master main loop (single threaded access).
*/
public void monitorHeartbeat()
{
long currentTms = clock.getTime();
// look for resource allocation timeout
if (!pendingAllocation.isEmpty()) {
// look for resource allocation timeout
if (lastResourceRequest + plan.getLogicalPlan().getValue(LogicalPlan.RESOURCE_ALLOCATION_TIMEOUT_MILLIS) < currentTms) {
String msg = String.format("Shutdown due to resource allocation timeout (%s ms) waiting for %s containers", currentTms - lastResourceRequest, pendingAllocation.size());
LOG.warn(msg);
for (PTContainer c : pendingAllocation) {
LOG.warn("Waiting for resource: {}m priority: {} {}", c.getRequiredMemoryMB(), c.getResourceRequestPriority(), c);
}
shutdownAllContainers(msg);
this.forcedShutdown = true;
}
else {
for (PTContainer c : pendingAllocation) {
LOG.debug("Waiting for resource: {}m {}", c.getRequiredMemoryMB(), c);
}
}
}
// monitor currently deployed containers
for (StreamingContainerAgent sca : containers.values()) {
PTContainer c = sca.container;
if (!pendingAllocation.contains(c) && c.getExternalId() != null) {
if (sca.lastHeartbeatMillis == 0) {
//LOG.debug("{} {} {}", c.getExternalId(), currentTms - sca.createdMillis, this.vars.heartbeatTimeoutMillis);
// container allocated but process was either not launched or is not able to phone home
if (currentTms - sca.createdMillis > 2 * this.vars.heartbeatTimeoutMillis) {
LOG.info("Container {}@{} startup timeout ({} ms).", c.getExternalId(), c.host, currentTms - sca.createdMillis);
containerStopRequests.put(c.getExternalId(), c.getExternalId());
}
}
else {
if (currentTms - sca.lastHeartbeatMillis > this.vars.heartbeatTimeoutMillis) {
if (!isApplicationIdle()) {
// request stop (kill) as process may still be hanging around (would have been detected by Yarn otherwise)
LOG.info("Container {}@{} heartbeat timeout ({} ms).", c.getExternalId(), c.host, currentTms - sca.lastHeartbeatMillis);
containerStopRequests.put(c.getExternalId(), c.getExternalId());
}
}
}
}
}
// events that may modify the plan
processEvents();
committedWindowId = updateCheckpoints(false);
calculateEndWindowStats();
if (this.vars.enableStatsRecording) {
recordStats(currentTms);
}
}
private void recordStats(long currentTms)
{
try {
statsRecorder.recordContainers(containers, currentTms);
statsRecorder.recordOperators(getOperatorInfoList(), currentTms);
}
catch (Exception ex) {
LOG.warn("Exception caught when recording stats", ex);
}
}
private void calculateEndWindowStats()
{
if (!endWindowStatsOperatorMap.isEmpty()) {
Set<Integer> allCurrentOperators = plan.getAllOperators().keySet();
if (endWindowStatsOperatorMap.size() > this.vars.maxWindowsBehindForStats) {
LOG.warn("Some operators are behind for more than {} windows! Trimming the end window stats map", this.vars.maxWindowsBehindForStats);
while (endWindowStatsOperatorMap.size() > this.vars.maxWindowsBehindForStats) {
LOG.debug("Removing incomplete end window stats for window id {}. Collected operator set: {}. Complete set: {}",
endWindowStatsOperatorMap.firstKey(),
endWindowStatsOperatorMap.get(endWindowStatsOperatorMap.firstKey()).keySet(), allCurrentOperators);
endWindowStatsOperatorMap.remove(endWindowStatsOperatorMap.firstKey());
}
}
//logicalMetrics.clear();
int numOperators = allCurrentOperators.size();
Long windowId = endWindowStatsOperatorMap.firstKey();
while (windowId != null) {
Map<Integer, EndWindowStats> endWindowStatsMap = endWindowStatsOperatorMap.get(windowId);
Set<Integer> endWindowStatsOperators = endWindowStatsMap.keySet();
aggregateMetrics(windowId, endWindowStatsMap);
if (allCurrentOperators.containsAll(endWindowStatsOperators)) {
if (endWindowStatsMap.size() < numOperators) {
if (windowId < completeEndWindowStatsWindowId) {
LOG.debug("Disregarding stale end window stats for window {}", windowId);
endWindowStatsOperatorMap.remove(windowId);
}
else {
break;
}
}
else {
// collected data from all operators for this window id. start latency calculation
List<OperatorMeta> rootOperatorMetas = plan.getLogicalPlan().getRootOperators();
Set<PTOperator> endWindowStatsVisited = new HashSet<PTOperator>();
Set<PTOperator> leafOperators = new HashSet<PTOperator>();
for (OperatorMeta root : rootOperatorMetas) {
List<PTOperator> rootOperators = plan.getOperators(root);
for (PTOperator rootOperator : rootOperators) {
// DFS for visiting the operators for latency calculation
LOG.debug("Calculating latency starting from operator {}", rootOperator.getId());
calculateLatency(rootOperator, endWindowStatsMap, endWindowStatsVisited, leafOperators);
}
}
CriticalPathInfo cpi = new CriticalPathInfo();
//LOG.debug("Finding critical path...");
cpi.latency = findCriticalPath(endWindowStatsMap, leafOperators, cpi.path);
criticalPathInfo = cpi;
endWindowStatsOperatorMap.remove(windowId);
currentEndWindowStatsWindowId = windowId;
}
}
else {
// the old stats contains operators that do not exist any more
// this is probably right after a partition happens.
LOG.debug("Stats for non-existent operators detected. Disregarding end window stats for window {}", windowId);
endWindowStatsOperatorMap.remove(windowId);
}
windowId = endWindowStatsOperatorMap.higherKey(windowId);
}
}
}
private void aggregateMetrics(long windowId, Map<Integer, EndWindowStats> endWindowStatsMap)
{
Collection<OperatorMeta> logicalOperators = getLogicalPlan().getAllOperators();
//for backward compatibility
for (OperatorMeta operatorMeta : logicalOperators) {
Context.CountersAggregator aggregator = operatorMeta.getValue(OperatorContext.COUNTERS_AGGREGATOR);
if (aggregator == null) {
continue;
}
Collection<PTOperator> physicalOperators = plan.getAllOperators(operatorMeta);
List<Object> counters = Lists.newArrayList();
for (PTOperator operator : physicalOperators) {
EndWindowStats stats = endWindowStatsMap.get(operator.getId());
if (stats != null && stats.counters != null) {
counters.add(stats.counters);
}
}
if (counters.size() > 0) {
Object aggregate = aggregator.aggregate(counters);
latestLogicalCounters.put(operatorMeta.getName(), aggregate);
}
}
for (OperatorMeta operatorMeta : logicalOperators) {
AutoMetric.Aggregator aggregator = operatorMeta.getMetricAggregatorMeta() != null ?
operatorMeta.getMetricAggregatorMeta().getAggregator() : null;
if (aggregator == null) {
continue;
}
Collection<PTOperator> physicalOperators = plan.getAllOperators(operatorMeta);
List<AutoMetric.PhysicalMetricsContext> metricPool = Lists.newArrayList();
for (PTOperator operator : physicalOperators) {
EndWindowStats stats = endWindowStatsMap.get(operator.getId());
if (stats != null && stats.metrics != null) {
PhysicalMetricsContextImpl physicalMetrics = new PhysicalMetricsContextImpl(operator.getId(), stats.metrics);
metricPool.add(physicalMetrics);
}
}
Map<String, Object> lm = aggregator.aggregate(windowId, metricPool);
if (lm != null && lm.size() > 0) {
Queue<Pair<Long, Map<String, Object>>> windowMetrics = logicalMetrics.get(operatorMeta.getName());
if (windowMetrics == null) {
windowMetrics = new LinkedBlockingQueue<Pair<Long, Map<String, Object>>>(METRIC_QUEUE_SIZE)
{
@Override
public boolean add(Pair<Long, Map<String, Object>> longMapPair)
{
if (remainingCapacity() <= 1) {
remove();
}
return super.add(longMapPair);
}
};
logicalMetrics.put(operatorMeta.getName(), windowMetrics);
}
LOG.debug("Adding to logical metrics for {}", operatorMeta.getName());
windowMetrics.add(new Pair<Long, Map<String, Object>>(windowId, lm));
Map<String, Object> oldValue = latestLogicalMetrics.put(operatorMeta.getName(), lm);
if (oldValue == null) {
try {
saveMetaInfo();
} catch (IOException ex) {
LOG.error("Cannot save application meta info to DFS. App data sources will not be available.", ex);
}
}
}
}
}
/**
* This method is for saving meta information about this application in HDFS -- the meta information that generally
* does not change across multiple attempts
*/
private void saveMetaInfo() throws IOException
{
Path file = new Path(this.vars.appPath, APP_META_FILENAME + "." + System.nanoTime());
try (FSDataOutputStream os = fileContext.create(file, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), Options.CreateOpts.CreateParent.createParent())) { JSONObject top = new JSONObject();
JSONObject attributes = new JSONObject();
for (Map.Entry<Attribute<?>, Object> entry : this.plan.getLogicalPlan().getAttributes().entrySet()) {
attributes.put(entry.getKey().getSimpleName(), entry.getValue());
}
JSONObject autoMetrics = new JSONObject();
for (Map.Entry<String, Map<String, Object>> entry : latestLogicalMetrics.entrySet()) {
autoMetrics.put(entry.getKey(), new JSONArray(entry.getValue().keySet()));
}
top.put(APP_META_KEY_ATTRIBUTES, attributes);
top.put(APP_META_KEY_METRICS, autoMetrics);
os.write(top.toString().getBytes());
} catch (JSONException ex) {
throw new RuntimeException(ex);
}
Path origPath = new Path(this.vars.appPath, APP_META_FILENAME);
fileContext.rename(file, origPath, Options.Rename.OVERWRITE);
}
public Queue<Pair<Long, Map<String, Object>>> getWindowMetrics(String operatorName)
{
return logicalMetrics.get(operatorName);
}
private void calculateLatency(PTOperator oper, Map<Integer, EndWindowStats> endWindowStatsMap, Set<PTOperator> endWindowStatsVisited, Set<PTOperator> leafOperators)
{
endWindowStatsVisited.add(oper);
OperatorStatus operatorStatus = oper.stats;
EndWindowStats endWindowStats = endWindowStatsMap.get(oper.getId());
if (endWindowStats == null) {
LOG.info("End window stats is null for operator {}, probably a new operator after partitioning", oper);
return;
}
// find the maximum end window emit time from all input ports
long upstreamMaxEmitTimestamp = -1;
PTOperator upstreamMaxEmitTimestampOperator = null;
for (PTOperator.PTInput input : oper.getInputs()) {
if (null != input.source.source) {
PTOperator upstreamOp = input.source.source;
EndWindowStats upstreamEndWindowStats = endWindowStatsMap.get(upstreamOp.getId());
if (upstreamEndWindowStats == null) {
LOG.info("End window stats is null for operator {}", oper);
return;
}
long adjustedEndWindowEmitTimestamp = upstreamEndWindowStats.emitTimestamp;
MovingAverageLong rpcLatency = rpcLatencies.get(upstreamOp.getContainer().getExternalId());
if (rpcLatency != null) {
adjustedEndWindowEmitTimestamp += rpcLatency.getAvg();
}
if (adjustedEndWindowEmitTimestamp > upstreamMaxEmitTimestamp) {
upstreamMaxEmitTimestamp = adjustedEndWindowEmitTimestamp;
upstreamMaxEmitTimestampOperator = upstreamOp;
}
}
}
if (upstreamMaxEmitTimestamp > 0) {
long adjustedEndWindowEmitTimestamp = endWindowStats.emitTimestamp;
MovingAverageLong rpcLatency = rpcLatencies.get(oper.getContainer().getExternalId());
if (rpcLatency != null) {
adjustedEndWindowEmitTimestamp += rpcLatency.getAvg();
}
if (upstreamMaxEmitTimestamp <= adjustedEndWindowEmitTimestamp) {
LOG.debug("Adding {} to latency MA for {}", adjustedEndWindowEmitTimestamp - upstreamMaxEmitTimestamp, oper);
operatorStatus.latencyMA.add(adjustedEndWindowEmitTimestamp - upstreamMaxEmitTimestamp);
} else {
operatorStatus.latencyMA.add(0);
if (lastLatencyWarningTime < System.currentTimeMillis() - LATENCY_WARNING_THRESHOLD_MILLIS) {
LOG.warn("Latency calculation for this operator may not be correct because upstream end window timestamp is greater than this operator's end window timestamp: {} ({}) > {} ({}). Please verify that the system clocks are in sync in your cluster. You can also try tweaking the RPC_LATENCY_COMPENSATION_SAMPLES application attribute (currently set to {}).",
upstreamMaxEmitTimestamp, upstreamMaxEmitTimestampOperator, adjustedEndWindowEmitTimestamp, oper, this.vars.rpcLatencyCompensationSamples);
lastLatencyWarningTime = System.currentTimeMillis();
}
}
}
if (oper.getOutputs().isEmpty()) {
// it is a leaf operator
leafOperators.add(oper);
}
else {
for (PTOperator.PTOutput output : oper.getOutputs()) {
for (PTOperator.PTInput input : output.sinks) {
if (input.target != null) {
PTOperator downStreamOp = input.target;
if (!endWindowStatsVisited.contains(downStreamOp)) {
calculateLatency(downStreamOp, endWindowStatsMap, endWindowStatsVisited, leafOperators);
}
}
}
}
}
}
/*
* returns cumulative latency
*/
private long findCriticalPath(Map<Integer, EndWindowStats> endWindowStatsMap, Set<PTOperator> operators, LinkedList<Integer> criticalPath)
{
long maxEndWindowTimestamp = 0;
PTOperator maxOperator = null;
for (PTOperator operator : operators) {
EndWindowStats endWindowStats = endWindowStatsMap.get(operator.getId());
if (maxEndWindowTimestamp < endWindowStats.emitTimestamp) {
maxEndWindowTimestamp = endWindowStats.emitTimestamp;
maxOperator = operator;
}
}
if (maxOperator == null) {
return 0;
}
criticalPath.addFirst(maxOperator.getId());
OperatorStatus operatorStatus = maxOperator.stats;
operators.clear();
if (maxOperator.getInputs() == null || maxOperator.getInputs().isEmpty()) {
return operatorStatus.latencyMA.getAvg();
}
for (PTOperator.PTInput input : maxOperator.getInputs()) {
if (null != input.source.source) {
operators.add(input.source.source);
}
}
return operatorStatus.latencyMA.getAvg() + findCriticalPath(endWindowStatsMap, operators, criticalPath);
}
public int processEvents()
{
for (PTOperator o : reportStats.keySet()) {
List<OperatorStats> stats = o.stats.listenerStats.poll();
if (stats != null) {
// append into single list
List<OperatorStats> moreStats;
while ((moreStats = o.stats.listenerStats.poll()) != null) {
stats.addAll(moreStats);
}
}
o.stats.lastWindowedStats = stats;
if (o.stats.lastWindowedStats != null) {
// call listeners only with non empty window list
if (o.statsListeners != null) {
plan.onStatusUpdate(o);
}
}
reportStats.remove(o);
}
if (!this.shutdownOperators.isEmpty()) {
synchronized (this.shutdownOperators) {
Iterator<Map.Entry<Long, Set<PTOperator>>> it = shutdownOperators.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<Long, Set<PTOperator>> windowAndOpers = it.next();
if (windowAndOpers.getKey().longValue() > this.committedWindowId) {
// wait until window is committed
continue;
} else {
LOG.info("Removing inactive operators at window {} {}", Codec.getStringWindowId(windowAndOpers.getKey()), windowAndOpers.getValue());
for (PTOperator oper : windowAndOpers.getValue()) {
plan.removeTerminatedPartition(oper);
}
it.remove();
}
}
}
}
if (!eventQueue.isEmpty()) {
for (PTOperator oper : plan.getAllOperators().values()) {
if (oper.getState() != PTOperator.State.ACTIVE) {
LOG.debug("Skipping plan updates due to inactive operator {} {}", oper, oper.getState());
return 0;
}
}
}
int count = 0;
Runnable command;
while ((command = this.eventQueue.poll()) != null) {
eventQueueProcessing.set(true);
try {
command.run();
count++;
}
catch (Exception e) {
// TODO: handle error
LOG.error("Failed to execute {}", command, e);
}
eventQueueProcessing.set(false);
}
if (count > 0) {
try {
checkpoint();
}
catch (Exception e) {
throw new RuntimeException("Failed to checkpoint state.", e);
}
}
return count;
}
/**
* Schedule container restart. Called by Stram after a container was terminated
* and requires recovery (killed externally, or after heartbeat timeout). <br>
* Recovery will resolve affected operators (within the container and
* everything downstream with respective recovery checkpoint states).
* Dependent operators will be undeployed and buffer server connections reset prior to
* redeploy to recovery checkpoint.
*
* @param containerId
*/
public void scheduleContainerRestart(String containerId)
{
StreamingContainerAgent cs = this.getContainerAgent(containerId);
if (cs == null || cs.shutdownRequested) {
// the container is no longer used / was released by us
return;
}
LOG.info("Initiating recovery for {}@{}", containerId, cs.container.host);
cs.container.setState(PTContainer.State.KILLED);
cs.container.bufferServerAddress = null;
cs.container.setResourceRequestPriority(-1);
cs.container.setAllocatedMemoryMB(0);
cs.container.setAllocatedVCores(0);
// resolve dependencies
UpdateCheckpointsContext ctx = new UpdateCheckpointsContext(clock);
for (PTOperator oper : cs.container.getOperators()) {
updateRecoveryCheckpoints(oper, ctx);
}
includeLocalUpstreamOperators(ctx);
// redeploy cycle for all affected operators
LOG.info("Affected operators {}", ctx.visited);
deploy(Collections.<PTContainer>emptySet(), ctx.visited, Sets.newHashSet(cs.container), ctx.visited);
}
/**
* Transitively add operators that are container local to the dependency set.
* (All downstream operators were traversed during checkpoint update.)
*
* @param ctx
*/
private void includeLocalUpstreamOperators(UpdateCheckpointsContext ctx)
{
Set<PTOperator> newOperators = Sets.newHashSet();
// repeat until no more local upstream operators are found
do {
newOperators.clear();
for (PTOperator oper : ctx.visited) {
for (PTInput input : oper.getInputs()) {
if (input.source.source.getContainer() == oper.getContainer()) {
if (!ctx.visited.contains(input.source.source)) {
newOperators.add(input.source.source);
}
}
}
}
if (!newOperators.isEmpty()) {
for (PTOperator oper : newOperators) {
updateRecoveryCheckpoints(oper, ctx);
}
}
} while (!newOperators.isEmpty());
}
public void removeContainerAgent(String containerId)
{
LOG.debug("Removing container agent {}", containerId);
StreamingContainerAgent containerAgent = containers.remove(containerId);
if (containerAgent != null) {
// record operator stop for this container
for (PTOperator oper : containerAgent.container.getOperators()) {
StramEvent ev = new StramEvent.StopOperatorEvent(oper.getName(), oper.getId(), containerId);
recordEventAsync(ev);
}
containerAgent.container.setFinishedTime(System.currentTimeMillis());
containerAgent.container.setState(PTContainer.State.KILLED);
completedContainers.put(containerId, containerAgent.getContainerInfo());
}
}
public Collection<ContainerInfo> getCompletedContainerInfo()
{
return Collections.unmodifiableCollection(completedContainers.values());
}
public static class ContainerResource
{
public final String containerId;
public final String host;
public final int memoryMB;
public final int vCores;
public final int priority;
public final String nodeHttpAddress;
public ContainerResource(int priority, String containerId, String host, int memoryMB, int vCores, String nodeHttpAddress)
{
this.containerId = containerId;
this.host = host;
this.memoryMB = memoryMB;
this.vCores = vCores;
this.priority = priority;
this.nodeHttpAddress = nodeHttpAddress;
}
/**
* @return String
*/
@Override
public String toString()
{
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("containerId", this.containerId)
.append("host", this.host)
.append("memoryMB", this.memoryMB)
.toString();
}
}
/**
* Assign operators to allocated container resource.
*
* @param resource
* @param bufferServerAddr
* @return streaming container agent
*/
public StreamingContainerAgent assignContainer(ContainerResource resource, InetSocketAddress bufferServerAddr)
{
PTContainer container = null;
// match container waiting for resource
for (PTContainer c : pendingAllocation) {
if (c.getState() == PTContainer.State.NEW || c.getState() == PTContainer.State.KILLED) {
if (c.getResourceRequestPriority() == resource.priority) {
container = c;
break;
}
}
}
if (container == null) {
LOG.debug("No container matching allocated resource {}", resource);
LOG.debug("Containers waiting for allocation {}", pendingAllocation);
return null;
}
pendingAllocation.remove(container);
container.setState(PTContainer.State.ALLOCATED);
if (container.getExternalId() != null) {
LOG.info("Removing container agent {}", container.getExternalId());
this.containers.remove(container.getExternalId());
}
container.setExternalId(resource.containerId);
container.host = resource.host;
container.bufferServerAddress = bufferServerAddr;
if (UserGroupInformation.isSecurityEnabled()) {
byte[] token = AuthManager.generateToken();
container.setBufferServerToken(token);
}
container.nodeHttpAddress = resource.nodeHttpAddress;
container.setAllocatedMemoryMB(resource.memoryMB);
container.setAllocatedVCores(resource.vCores);
container.setStartedTime(-1);
container.setFinishedTime(-1);
writeJournal(container.getSetContainerState());
StreamingContainerAgent sca = new StreamingContainerAgent(container, newStreamingContainerContext(container), this);
containers.put(resource.containerId, sca);
LOG.debug("Assigned container {} priority {}", resource.containerId, resource.priority);
return sca;
}
private StreamingContainerContext newStreamingContainerContext(PTContainer container)
{
try {
int bufferServerMemory = 0;
Iterator<PTOperator> operatorIterator = container.getOperators().iterator();
while (operatorIterator.hasNext()) {
bufferServerMemory += operatorIterator.next().getBufferServerMemory();
}
LOG.debug("Buffer Server Memory {}", bufferServerMemory);
// the logical plan is not to be serialized via RPC, clone attributes only
StreamingContainerContext scc = new StreamingContainerContext(plan.getLogicalPlan().getAttributes().clone(), null);
scc.attributes.put(ContainerContext.IDENTIFIER, container.getExternalId());
scc.attributes.put(ContainerContext.BUFFER_SERVER_MB, bufferServerMemory);
scc.attributes.put(ContainerContext.BUFFER_SERVER_TOKEN, container.getBufferServerToken());
scc.startWindowMillis = this.vars.windowStartMillis;
return scc;
}
catch (CloneNotSupportedException ex) {
throw new RuntimeException("Cannot clone DAG attributes", ex);
}
}
public StreamingContainerAgent getContainerAgent(String containerId)
{
StreamingContainerAgent cs = containers.get(containerId);
if (cs == null) {
LOG.warn("Trying to get unknown container {}", containerId);
}
return cs;
}
public Collection<StreamingContainerAgent> getContainerAgents()
{
return this.containers.values();
}
private void processOperatorDeployStatus(final PTOperator oper, OperatorHeartbeat ohb, StreamingContainerAgent sca)
{
OperatorHeartbeat.DeployState ds = null;
if (ohb != null) {
ds = ohb.getState();
}
LOG.debug("heartbeat {} {}/{} {}", oper, oper.getState(), ds, oper.getContainer().getExternalId());
switch (oper.getState()) {
case ACTIVE:
// Commented out the warning below because it's expected when the operator does something
// quickly and goes out of commission, it will report SHUTDOWN correcly whereas this code
// is incorrectly expecting ACTIVE to be reported.
//LOG.warn("status out of sync {} expected {} remote {}", oper, oper.getState(), ds);
// operator expected active, check remote status
if (ds == null) {
sca.deployOpers.add(oper);
}
else {
switch (ds) {
case SHUTDOWN:
// schedule operator deactivation against the windowId
// will be processed once window is committed and all dependent operators completed processing
long windowId = oper.stats.currentWindowId.get();
if (ohb.windowStats != null && !ohb.windowStats.isEmpty()) {
windowId = ohb.windowStats.get(ohb.windowStats.size()-1).windowId;
}
LOG.debug("Operator {} deactivated at window {}", oper, windowId);
synchronized (this.shutdownOperators) {
Set<PTOperator> deactivatedOpers = this.shutdownOperators.get(windowId);
if (deactivatedOpers == null) {
this.shutdownOperators.put(windowId, deactivatedOpers = new HashSet<>());
}
deactivatedOpers.add(oper);
}
sca.undeployOpers.add(oper.getId());
// record operator stop event
recordEventAsync(new StramEvent.StopOperatorEvent(oper.getName(), oper.getId(), oper.getContainer().getExternalId()));
break;
case FAILED:
processOperatorFailure(oper);
sca.undeployOpers.add(oper.getId());
recordEventAsync(new StramEvent.StopOperatorEvent(oper.getName(), oper.getId(), oper.getContainer().getExternalId()));
break;
case ACTIVE:
break;
}
}
break;
case PENDING_UNDEPLOY:
if (ds == null) {
// operator no longer deployed in container
recordEventAsync(new StramEvent.StopOperatorEvent(oper.getName(), oper.getId(), oper.getContainer().getExternalId()));
oper.setState(State.PENDING_DEPLOY);
sca.deployOpers.add(oper);
}
else {
// operator is currently deployed, request undeploy
sca.undeployOpers.add(oper.getId());
}
break;
case PENDING_DEPLOY:
if (ds == null) {
// operator to be deployed
sca.deployOpers.add(oper);
}
else {
// operator was deployed in container
PTContainer container = oper.getContainer();
LOG.debug("{} marking deployed: {} remote status {}", container.getExternalId(), oper, ds);
oper.setState(PTOperator.State.ACTIVE);
oper.stats.lastHeartbeat = null; // reset on redeploy
oper.stats.lastWindowIdChangeTms = clock.getTime();
recordEventAsync(new StramEvent.StartOperatorEvent(oper.getName(), oper.getId(), container.getExternalId()));
}
break;
default:
//LOG.warn("Unhandled operator state {} {} remote {}", oper, oper.getState(), ds);
if (ds != null) {
// operator was removed and needs to be undeployed from container
sca.undeployOpers.add(oper.getId());
recordEventAsync(new StramEvent.StopOperatorEvent(oper.getName(), oper.getId(), oper.getContainer().getExternalId()));
}
}
}
private void processOperatorFailure(PTOperator oper)
{
// count failure transitions *->FAILED, applies to initialization as well as intermittent failures
if (oper.getState() == PTOperator.State.ACTIVE) {
oper.setState(PTOperator.State.INACTIVE);
oper.failureCount++;
oper.getOperatorMeta().getStatus().failureCount++;
LOG.warn("Operator failure: {} count: {}", oper, oper.failureCount);
Integer maxAttempts = oper.getOperatorMeta().getValue(OperatorContext.RECOVERY_ATTEMPTS);
if (maxAttempts == null || oper.failureCount <= maxAttempts) {
// restart entire container in attempt to recover operator
// in the future a more sophisticated recovery strategy could
// involve initial redeploy attempt(s) of affected operator in
// existing container or sandbox container for just the operator
LOG.error("Initiating container restart after operator failure {}", oper);
containerStopRequests.put(oper.getContainer().getExternalId(), oper.getContainer().getExternalId());
}
else {
String msg = String.format("Shutdown after reaching failure threshold for %s", oper);
LOG.warn(msg);
shutdownAllContainers(msg);
forcedShutdown = true;
}
}
else {
// should not get here
LOG.warn("Failed operator {} {} {} to be undeployed by container", oper, oper.getState());
}
}
/**
* process the heartbeat from each container.
* called by the RPC thread for each container. (i.e. called by multiple threads)
*
* @param heartbeat
* @return heartbeat response
*/
@SuppressWarnings("StatementWithEmptyBody")
public ContainerHeartbeatResponse processHeartbeat(ContainerHeartbeat heartbeat)
{
long currentTimeMillis = clock.getTime();
final StreamingContainerAgent sca = this.containers.get(heartbeat.getContainerId());
if (sca == null || sca.container.getState() == PTContainer.State.KILLED) {
// could be orphaned container that was replaced and needs to terminate
LOG.error("Unknown container {}", heartbeat.getContainerId());
ContainerHeartbeatResponse response = new ContainerHeartbeatResponse();
response.shutdown = true;
return response;
}
//LOG.debug("{} {} {}", new Object[]{sca.container.containerId, sca.container.bufferServerAddress, sca.container.getState()});
if (sca.container.getState() == PTContainer.State.ALLOCATED) {
// capture dynamically assigned address from container
if (sca.container.bufferServerAddress == null && heartbeat.bufferServerHost != null) {
sca.container.bufferServerAddress = InetSocketAddress.createUnresolved(heartbeat.bufferServerHost, heartbeat.bufferServerPort);
LOG.info("Container {} buffer server: {}", sca.container.getExternalId(), sca.container.bufferServerAddress);
}
final long containerStartTime = System.currentTimeMillis();
sca.container.setState(PTContainer.State.ACTIVE);
sca.container.setStartedTime(containerStartTime);
sca.container.setFinishedTime(-1);
sca.jvmName = heartbeat.jvmName;
poolExecutor.submit(new Runnable()
{
@Override
public void run()
{
try {
containerFile.append(sca.getContainerInfo());
} catch (IOException ex) {
LOG.warn("Cannot write to container file");
}
for (PTOperator ptOp : sca.container.getOperators()) {
try {
JSONObject operatorInfo = new JSONObject();
operatorInfo.put("name", ptOp.getName());
operatorInfo.put("id", ptOp.getId());
operatorInfo.put("container", sca.container.getExternalId());
operatorInfo.put("startTime", containerStartTime);
operatorFile.append(operatorInfo);
} catch (IOException | JSONException ex) {
LOG.warn("Cannot write to operator file: ", ex);
}
}
}
});
}
if (heartbeat.restartRequested) {
LOG.error("Container {} restart request", sca.container.getExternalId());
containerStopRequests.put(sca.container.getExternalId(), sca.container.getExternalId());
}
sca.memoryMBFree = heartbeat.memoryMBFree;
sca.gcCollectionCount = heartbeat.gcCollectionCount;
sca.gcCollectionTime = heartbeat.gcCollectionTime;
sca.undeployOpers.clear();
sca.deployOpers.clear();
if (!this.deployChangeInProgress.get()) {
sca.deployCnt = this.deployChangeCnt;
}
Set<Integer> reportedOperators = Sets.newHashSetWithExpectedSize(sca.container.getOperators().size());
boolean containerIdle = true;
for (OperatorHeartbeat shb : heartbeat.getContainerStats().operators) {
long maxEndWindowTimestamp = 0;
reportedOperators.add(shb.nodeId);
PTOperator oper = this.plan.getAllOperators().get(shb.getNodeId());
if (oper == null) {
LOG.info("Heartbeat for unknown operator {} (container {})", shb.getNodeId(), heartbeat.getContainerId());
sca.undeployOpers.add(shb.nodeId);
continue;
}
if (shb.requestResponse != null) {
for (StatsListener.OperatorResponse obj : shb.requestResponse) {
if (obj instanceof OperatorResponse) { // This is to identify platform requests
commandResponse.put((Long) obj.getResponseId(), obj.getResponse());
LOG.debug(" Got back the response {} for the request {}", obj, obj.getResponseId());
}
else { // This is to identify user requests
if (oper.stats.operatorResponses == null) {
oper.stats.operatorResponses = new ArrayList<StatsListener.OperatorResponse>();
}
oper.stats.operatorResponses.add(obj);
}
}
}
//LOG.debug("heartbeat {} {}/{} {}", oper, oper.getState(), shb.getState(), oper.getContainer().getExternalId());
if (!(oper.getState() == PTOperator.State.ACTIVE && shb.getState() == OperatorHeartbeat.DeployState.ACTIVE)) {
// deploy state may require synchronization
processOperatorDeployStatus(oper, shb, sca);
}
oper.stats.lastHeartbeat = shb;
List<ContainerStats.OperatorStats> statsList = shb.getOperatorStatsContainer();
if (!oper.stats.isIdle()) {
containerIdle = false;
}
if (!statsList.isEmpty()) {
long tuplesProcessed = 0;
long tuplesEmitted = 0;
long totalCpuTimeUsed = 0;
int statCount = 0;
long maxDequeueTimestamp = -1;
oper.stats.recordingId = null;
final OperatorStatus status = oper.stats;
status.statsRevs.checkout();
for (Map.Entry<String, PortStatus> entry : status.inputPortStatusList.entrySet()) {
entry.getValue().recordingId = null;
}
for (Map.Entry<String, PortStatus> entry : status.outputPortStatusList.entrySet()) {
entry.getValue().recordingId = null;
}
for (ContainerStats.OperatorStats stats : statsList) {
/* report checkpoint-ed WindowId status of the operator */
if (stats.checkpoint instanceof Checkpoint) {
if (oper.getRecentCheckpoint() == null || oper.getRecentCheckpoint().windowId < stats.checkpoint.getWindowId()) {
addCheckpoint(oper, (Checkpoint) stats.checkpoint);
if (stats.checkpointStats != null) {
status.checkpointStats = stats.checkpointStats;
status.checkpointTimeMA.add(stats.checkpointStats.checkpointTime);
}
oper.failureCount = 0;
}
}
oper.stats.recordingId = stats.recordingId;
/* report all the other stuff */
// calculate the stats related to end window
EndWindowStats endWindowStats = new EndWindowStats(); // end window stats for a particular window id for a particular node
Collection<ContainerStats.OperatorStats.PortStats> ports = stats.inputPorts;
if (ports != null) {
Set<String> currentInputPortSet = Sets.newHashSetWithExpectedSize(ports.size());
for (ContainerStats.OperatorStats.PortStats s : ports) {
currentInputPortSet.add(s.id);
PortStatus ps = status.inputPortStatusList.get(s.id);
if (ps == null) {
ps = status.new PortStatus();
ps.portName = s.id;
status.inputPortStatusList.put(s.id, ps);
}
ps.totalTuples += s.tupleCount;
ps.recordingId = s.recordingId;
tuplesProcessed += s.tupleCount;
endWindowStats.dequeueTimestamps.put(s.id, s.endWindowTimestamp);
Pair<Integer, String> operatorPortName = new Pair<Integer, String>(oper.getId(), s.id);
long lastEndWindowTimestamp = operatorPortLastEndWindowTimestamps.containsKey(operatorPortName) ? operatorPortLastEndWindowTimestamps.get(operatorPortName) : lastStatsTimestamp;
long portElapsedMillis = Math.max(s.endWindowTimestamp - lastEndWindowTimestamp, 0);
//LOG.debug("=== PROCESSED TUPLE COUNT for {}: {}, {}, {}, {}", operatorPortName, s.tupleCount, portElapsedMillis, operatorPortLastEndWindowTimestamps.get(operatorPortName), lastStatsTimestamp);
ps.tuplesPMSMA.add(s.tupleCount, portElapsedMillis);
ps.bufferServerBytesPMSMA.add(s.bufferServerBytes, portElapsedMillis);
ps.queueSizeMA.add(s.queueSize);
operatorPortLastEndWindowTimestamps.put(operatorPortName, s.endWindowTimestamp);
if (maxEndWindowTimestamp < s.endWindowTimestamp) {
maxEndWindowTimestamp = s.endWindowTimestamp;
}
if (s.endWindowTimestamp > maxDequeueTimestamp) {
maxDequeueTimestamp = s.endWindowTimestamp;
}
}
// need to remove dead ports, for unifiers
Iterator<Map.Entry<String, PortStatus>> it = status.inputPortStatusList.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, PortStatus> entry = it.next();
if (!currentInputPortSet.contains(entry.getKey())) {
it.remove();
}
}
}
ports = stats.outputPorts;
if (ports != null) {
Set<String> currentOutputPortSet = Sets.newHashSetWithExpectedSize(ports.size());
for (ContainerStats.OperatorStats.PortStats s : ports) {
currentOutputPortSet.add(s.id);
PortStatus ps = status.outputPortStatusList.get(s.id);
if (ps == null) {
ps = status.new PortStatus();
ps.portName = s.id;
status.outputPortStatusList.put(s.id, ps);
}
ps.totalTuples += s.tupleCount;
ps.recordingId = s.recordingId;
tuplesEmitted += s.tupleCount;
Pair<Integer, String> operatorPortName = new Pair<Integer, String>(oper.getId(), s.id);
long lastEndWindowTimestamp = operatorPortLastEndWindowTimestamps.containsKey(operatorPortName) ? operatorPortLastEndWindowTimestamps.get(operatorPortName) : lastStatsTimestamp;
long portElapsedMillis = Math.max(s.endWindowTimestamp - lastEndWindowTimestamp, 0);
//LOG.debug("=== EMITTED TUPLE COUNT for {}: {}, {}, {}, {}", operatorPortName, s.tupleCount, portElapsedMillis, operatorPortLastEndWindowTimestamps.get(operatorPortName), lastStatsTimestamp);
ps.tuplesPMSMA.add(s.tupleCount, portElapsedMillis);
ps.bufferServerBytesPMSMA.add(s.bufferServerBytes, portElapsedMillis);
operatorPortLastEndWindowTimestamps.put(operatorPortName, s.endWindowTimestamp);
if (maxEndWindowTimestamp < s.endWindowTimestamp) {
maxEndWindowTimestamp = s.endWindowTimestamp;
}
}
if (ports.size() > 0) {
endWindowStats.emitTimestamp = ports.iterator().next().endWindowTimestamp;
}
// need to remove dead ports, for unifiers
Iterator<Map.Entry<String, PortStatus>> it = status.outputPortStatusList.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, PortStatus> entry = it.next();
if (!currentOutputPortSet.contains(entry.getKey())) {
it.remove();
}
}
}
// for output operator, just take the maximum dequeue time for emit timestamp.
// (we don't know the latency for output operators because they don't emit tuples)
if (endWindowStats.emitTimestamp < 0) {
endWindowStats.emitTimestamp = maxDequeueTimestamp;
}
if (status.currentWindowId.get() != stats.windowId) {
status.lastWindowIdChangeTms = currentTimeMillis;
status.currentWindowId.set(stats.windowId);
}
totalCpuTimeUsed += stats.cpuTimeUsed;
statCount++;
if (oper.getOperatorMeta().getValue(OperatorContext.COUNTERS_AGGREGATOR) != null) {
endWindowStats.counters = stats.counters;
}
if (oper.getOperatorMeta().getMetricAggregatorMeta() != null &&
oper.getOperatorMeta().getMetricAggregatorMeta().getAggregator() != null) {
endWindowStats.metrics = stats.metrics;
}
if (stats.windowId > currentEndWindowStatsWindowId) {
Map<Integer, EndWindowStats> endWindowStatsMap = endWindowStatsOperatorMap.get(stats.windowId);
if (endWindowStatsMap == null) {
endWindowStatsOperatorMap.putIfAbsent(stats.windowId, new ConcurrentSkipListMap<Integer, EndWindowStats>());
endWindowStatsMap = endWindowStatsOperatorMap.get(stats.windowId);
}
endWindowStatsMap.put(shb.getNodeId(), endWindowStats);
Set<Integer> allCurrentOperators = plan.getAllOperators().keySet();
int numOperators = plan.getAllOperators().size();
if (allCurrentOperators.containsAll(endWindowStatsMap.keySet()) && endWindowStatsMap.size() == numOperators) {
completeEndWindowStatsWindowId = stats.windowId;
}
}
}
status.totalTuplesProcessed.add(tuplesProcessed);
status.totalTuplesEmitted.add(tuplesEmitted);
OperatorMeta logicalOperator = oper.getOperatorMeta();
LogicalOperatorStatus logicalStatus = logicalOperator.getStatus();
if (!oper.isUnifier()) {
logicalStatus.totalTuplesProcessed += tuplesProcessed;
logicalStatus.totalTuplesEmitted += tuplesEmitted;
}
long lastMaxEndWindowTimestamp = operatorLastEndWindowTimestamps.containsKey(oper.getId()) ? operatorLastEndWindowTimestamps.get(oper.getId()) : lastStatsTimestamp;
if (maxEndWindowTimestamp >= lastMaxEndWindowTimestamp) {
double tuplesProcessedPMSMA = 0.0;
double tuplesEmittedPMSMA = 0.0;
if (statCount != 0) {
//LOG.debug("CPU for {}: {} / {} - {}", oper.getId(), totalCpuTimeUsed, maxEndWindowTimestamp, lastMaxEndWindowTimestamp);
status.cpuNanosPMSMA.add(totalCpuTimeUsed, maxEndWindowTimestamp - lastMaxEndWindowTimestamp);
}
for (PortStatus ps : status.inputPortStatusList.values()) {
tuplesProcessedPMSMA += ps.tuplesPMSMA.getAvg();
}
for (PortStatus ps : status.outputPortStatusList.values()) {
tuplesEmittedPMSMA += ps.tuplesPMSMA.getAvg();
}
status.tuplesProcessedPSMA.set(Math.round(tuplesProcessedPMSMA * 1000));
status.tuplesEmittedPSMA.set(Math.round(tuplesEmittedPMSMA * 1000));
}
else {
//LOG.warn("This timestamp for {} is lower than the previous!! {} < {}", oper.getId(), maxEndWindowTimestamp, lastMaxEndWindowTimestamp);
}
operatorLastEndWindowTimestamps.put(oper.getId(), maxEndWindowTimestamp);
status.listenerStats.add(statsList);
this.reportStats.put(oper, oper);
status.statsRevs.commit();
}
if (lastStatsTimestamp < maxEndWindowTimestamp) {
lastStatsTimestamp = maxEndWindowTimestamp;
}
}
sca.lastHeartbeatMillis = currentTimeMillis;
for (PTOperator oper : sca.container.getOperators()) {
if (!reportedOperators.contains(oper.getId())) {
processOperatorDeployStatus(oper, null, sca);
}
}
ContainerHeartbeatResponse rsp = getHeartbeatResponse(sca);
if (containerIdle && isApplicationIdle()) {
LOG.info("requesting idle shutdown for container {}", heartbeat.getContainerId());
rsp.shutdown = true;
}
else {
if (sca.shutdownRequested) {
LOG.info("requesting shutdown for container {}", heartbeat.getContainerId());
rsp.shutdown = true;
}
}
List<StramToNodeRequest> requests = rsp.nodeRequests != null ? rsp.nodeRequests : new ArrayList<StramToNodeRequest>();
ConcurrentLinkedQueue<StramToNodeRequest> operatorRequests = sca.getOperatorRequests();
while (true) {
StramToNodeRequest r = operatorRequests.poll();
if (r == null) {
break;
}
requests.add(r);
}
rsp.nodeRequests = requests;
rsp.committedWindowId = committedWindowId;
return rsp;
}
private ContainerHeartbeatResponse getHeartbeatResponse(StreamingContainerAgent sca)
{
ContainerHeartbeatResponse rsp = new ContainerHeartbeatResponse();
if (this.deployChangeInProgress.get() || sca.deployCnt != this.deployChangeCnt) {
LOG.debug("{} deferred requests due to concurrent plan change.", sca.container.toIdStateString());
rsp.hasPendingRequests = true;
return rsp;
}
if (!sca.undeployOpers.isEmpty()) {
rsp.undeployRequest = Lists.newArrayList(sca.undeployOpers);
rsp.hasPendingRequests = (!sca.deployOpers.isEmpty());
return rsp;
}
Set<PTOperator> deployOperators = sca.deployOpers;
if (!deployOperators.isEmpty()) {
// deploy once all containers are running and no undeploy operations are pending.
for (PTContainer c : getPhysicalPlan().getContainers()) {
if (c.getState() != PTContainer.State.ACTIVE) {
LOG.debug("{} waiting for container activation {}", sca.container.toIdStateString(), c.toIdStateString());
rsp.hasPendingRequests = true;
return rsp;
}
for (PTOperator oper : c.getOperators()) {
if (oper.getState() == PTOperator.State.PENDING_UNDEPLOY) {
LOG.debug("{} waiting for undeploy {} {}", sca.container.toIdStateString(), c.toIdStateString(), oper);
rsp.hasPendingRequests = true;
return rsp;
}
}
}
LOG.debug("{} deployable operators: {}", sca.container.toIdStateString(), deployOperators);
List<OperatorDeployInfo> deployList = sca.getDeployInfoList(deployOperators);
if (deployList != null && !deployList.isEmpty()) {
rsp.deployRequest = deployList;
rsp.nodeRequests = Lists.newArrayList();
for (PTOperator o : deployOperators) {
rsp.nodeRequests.addAll(o.deployRequests);
}
}
rsp.hasPendingRequests = false;
return rsp;
}
return rsp;
}
private boolean isApplicationIdle()
{
if (eventQueueProcessing.get()) {
return false;
}
for (StreamingContainerAgent sca : this.containers.values()) {
if (sca.hasPendingWork()) {
// container may have no active operators but deploy request pending
return false;
}
for (PTOperator oper : sca.container.getOperators()) {
if (!oper.stats.isIdle()) {
return false;
}
}
}
return true;
}
@SuppressWarnings("StatementWithEmptyBody")
void addCheckpoint(PTOperator node, Checkpoint checkpoint)
{
synchronized (node.checkpoints) {
if (!node.checkpoints.isEmpty()) {
Checkpoint lastCheckpoint = node.checkpoints.getLast();
// skip unless checkpoint moves
if (lastCheckpoint.windowId != checkpoint.windowId) {
if (lastCheckpoint.windowId > checkpoint.windowId) {
// list needs to have max windowId last
LOG.warn("Out of sequence checkpoint {} last {} (operator {})", checkpoint, lastCheckpoint, node);
ListIterator<Checkpoint> li = node.checkpoints.listIterator();
while (li.hasNext() && li.next().windowId < checkpoint.windowId) {
//continue;
}
if (li.previous().windowId != checkpoint.windowId) {
li.add(checkpoint);
}
}
else {
node.checkpoints.add(checkpoint);
}
}
}
else {
node.checkpoints.add(checkpoint);
}
}
}
public static class UpdateCheckpointsContext
{
public final MutableLong committedWindowId = new MutableLong(Long.MAX_VALUE);
public final Set<PTOperator> visited = new LinkedHashSet<PTOperator>();
public final Set<PTOperator> blocked = new LinkedHashSet<PTOperator>();
public final long currentTms;
public final boolean recovery;
public UpdateCheckpointsContext(Clock clock)
{
this.currentTms = clock.getTime();
this.recovery = false;
}
public UpdateCheckpointsContext(Clock clock, boolean recovery)
{
this.currentTms = clock.getTime();
this.recovery = recovery;
}
}
/**
* Compute checkpoints required for a given operator instance to be recovered.
* This is done by looking at checkpoints available for downstream dependencies first,
* and then selecting the most recent available checkpoint that is smaller than downstream.
*
* @param operator Operator instance for which to find recovery checkpoint
* @param ctx Context into which to collect traversal info
*/
public void updateRecoveryCheckpoints(PTOperator operator, UpdateCheckpointsContext ctx)
{
if (operator.getRecoveryCheckpoint().windowId < ctx.committedWindowId.longValue()) {
ctx.committedWindowId.setValue(operator.getRecoveryCheckpoint().windowId);
}
if (operator.getState() == PTOperator.State.ACTIVE && (ctx.currentTms - operator.stats.lastWindowIdChangeTms) > operator.stats.windowProcessingTimeoutMillis) {
// if the checkpoint is ahead, then it is not blocked but waiting for activation (state-less recovery, at-most-once)
if (ctx.committedWindowId.longValue() >= operator.getRecoveryCheckpoint().windowId) {
ctx.blocked.add(operator);
}
}
long maxCheckpoint = operator.getRecentCheckpoint().windowId;
if (ctx.recovery && maxCheckpoint == Stateless.WINDOW_ID && operator.isOperatorStateLess()) {
long currentWindowId = WindowGenerator.getWindowId(ctx.currentTms, this.vars.windowStartMillis, this.getLogicalPlan().getValue(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS));
maxCheckpoint = currentWindowId;
}
// DFS downstream operators
for (PTOperator.PTOutput out : operator.getOutputs()) {
for (PTOperator.PTInput sink : out.sinks) {
PTOperator sinkOperator = sink.target;
if (!ctx.visited.contains(sinkOperator)) {
// downstream traversal
updateRecoveryCheckpoints(sinkOperator, ctx);
}
// recovery window id cannot move backwards
// when dynamically adding new operators
if (sinkOperator.getRecoveryCheckpoint().windowId >= operator.getRecoveryCheckpoint().windowId) {
maxCheckpoint = Math.min(maxCheckpoint, sinkOperator.getRecoveryCheckpoint().windowId);
}
if (ctx.blocked.contains(sinkOperator)) {
if (sinkOperator.stats.getCurrentWindowId() == operator.stats.getCurrentWindowId()) {
// downstream operator is blocked by this operator
ctx.blocked.remove(sinkOperator);
}
}
}
}
// checkpoint frozen during deployment
if (ctx.recovery || operator.getState() != PTOperator.State.PENDING_DEPLOY) {
// remove previous checkpoints
Checkpoint c1 = Checkpoint.INITIAL_CHECKPOINT;
synchronized (operator.checkpoints) {
if (!operator.checkpoints.isEmpty() && (operator.checkpoints.getFirst()).windowId <= maxCheckpoint) {
c1 = operator.checkpoints.getFirst();
Checkpoint c2;
while (operator.checkpoints.size() > 1 && ((c2 = operator.checkpoints.get(1)).windowId) <= maxCheckpoint) {
operator.checkpoints.removeFirst();
//LOG.debug("Checkpoint to delete: operator={} windowId={}", operator.getName(), c1);
this.purgeCheckpoints.add(new Pair<PTOperator, Long>(operator, c1.windowId));
c1 = c2;
}
}
else {
if (ctx.recovery && operator.checkpoints.isEmpty() && operator.isOperatorStateLess()) {
LOG.debug("Adding checkpoint for stateless operator {} {}", operator, Codec.getStringWindowId(maxCheckpoint));
c1 = operator.addCheckpoint(maxCheckpoint, this.vars.windowStartMillis);
}
}
}
//LOG.debug("Operator {} checkpoints: commit {} recent {}", new Object[] {operator.getName(), c1, operator.checkpoints});
operator.setRecoveryCheckpoint(c1);
}
else {
LOG.debug("Skipping checkpoint update {} during {}", operator, operator.getState());
}
ctx.visited.add(operator);
}
public long windowIdToMillis(long windowId)
{
int widthMillis = plan.getLogicalPlan().getValue(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS);
return WindowGenerator.getWindowMillis(windowId, this.vars.windowStartMillis, widthMillis);
}
public long getWindowStartMillis()
{
return this.vars.windowStartMillis;
}
/**
* Visit all operators to update current checkpoint based on updated downstream state.
* Purge older checkpoints that are no longer needed.
*/
private long updateCheckpoints(boolean recovery)
{
UpdateCheckpointsContext ctx = new UpdateCheckpointsContext(clock, recovery);
for (OperatorMeta logicalOperator : plan.getLogicalPlan().getRootOperators()) {
//LOG.debug("Updating checkpoints for operator {}", logicalOperator.getName());
List<PTOperator> operators = plan.getOperators(logicalOperator);
if (operators != null) {
for (PTOperator operator : operators) {
updateRecoveryCheckpoints(operator, ctx);
}
}
}
purgeCheckpoints();
for (PTOperator oper : ctx.blocked) {
String containerId = oper.getContainer().getExternalId();
if (containerId != null) {
LOG.info("Blocked operator {} container {} time {}ms", oper, oper.getContainer().toIdStateString(), ctx.currentTms - oper.stats.lastWindowIdChangeTms);
this.containerStopRequests.put(containerId, containerId);
}
}
return ctx.committedWindowId.longValue();
}
private BufferServerController getBufferServerClient(PTOperator operator)
{
BufferServerController bsc = new BufferServerController(operator.getLogicalId());
bsc.setToken(operator.getContainer().getBufferServerToken());
InetSocketAddress address = operator.getContainer().bufferServerAddress;
StreamingContainer.eventloop.connect(address.isUnresolved() ? new InetSocketAddress(address.getHostName(), address.getPort()) : address, bsc);
return bsc;
}
private void purgeCheckpoints()
{
for (Pair<PTOperator, Long> p : purgeCheckpoints) {
final PTOperator operator = p.getFirst();
if (!operator.isOperatorStateLess()) {
final long windowId = p.getSecond();
Runnable r = new Runnable()
{
@Override
public void run()
{
try {
operator.getOperatorMeta().getValue(OperatorContext.STORAGE_AGENT).delete(operator.getId(), windowId);
}
catch (IOException ex) {
LOG.error("Failed to purge checkpoint for operator {} for windowId {}", operator, windowId, ex);
}
}
};
poolExecutor.submit(r);
}
// delete stream state when using buffer server
for (PTOperator.PTOutput out : operator.getOutputs()) {
if (!out.isDownStreamInline()) {
if (operator.getContainer().bufferServerAddress == null) {
// address should be null only for a new container, in which case there should not be a purge request
// TODO: logging added to find out how we got here
LOG.warn("purge request w/o buffer server address source {} container {} checkpoints {}",
out, operator.getContainer(), operator.checkpoints);
continue;
}
for (InputPortMeta ipm : out.logicalStream.getSinks()) {
StreamCodec<?> streamCodecInfo = StreamingContainerAgent.getStreamCodec(ipm);
Integer codecId = plan.getStreamCodecIdentifier(streamCodecInfo);
// following needs to match the concat logic in StreamingContainer
String sourceIdentifier = Integer.toString(operator.getId()).concat(Component.CONCAT_SEPARATOR).concat(out.portName).concat(Component.CONCAT_SEPARATOR).concat(codecId.toString());
// delete everything from buffer server prior to new checkpoint
BufferServerController bsc = getBufferServerClient(operator);
try {
bsc.purge(null, sourceIdentifier, operator.checkpoints.getFirst().windowId - 1);
}
catch (RuntimeException re) {
LOG.warn("Failed to purge {} {}", bsc.addr, sourceIdentifier, re);
}
}
}
}
}
purgeCheckpoints.clear();
}
/**
* Mark all containers for shutdown, next container heartbeat response
* will propagate the shutdown request. This is controlled soft shutdown.
* If containers don't respond, the application can be forcefully terminated
* via yarn using forceKillApplication.
*
* @param message
*/
public void shutdownAllContainers(String message)
{
this.shutdownDiagnosticsMessage = message;
LOG.info("Initiating application shutdown: {}", message);
for (StreamingContainerAgent cs : this.containers.values()) {
cs.shutdownRequested = true;
}
}
private Map<PTContainer, List<PTOperator>> groupByContainer(Collection<PTOperator> operators)
{
Map<PTContainer, List<PTOperator>> m = new HashMap<PTContainer, List<PTOperator>>();
for (PTOperator node : operators) {
List<PTOperator> nodes = m.get(node.getContainer());
if (nodes == null) {
nodes = new ArrayList<PTOperator>();
m.put(node.getContainer(), nodes);
}
nodes.add(node);
}
return m;
}
private void requestContainer(PTContainer c)
{
ContainerStartRequest dr = new ContainerStartRequest(c);
containerStartRequests.add(dr);
pendingAllocation.add(dr.container);
lastResourceRequest = System.currentTimeMillis();
for (PTOperator operator : c.getOperators()) {
operator.setState(PTOperator.State.INACTIVE);
}
}
@Override
public void deploy(Set<PTContainer> releaseContainers, Collection<PTOperator> undeploy, Set<PTContainer> startContainers, Collection<PTOperator> deploy)
{
try {
this.deployChangeInProgress.set(true);
Map<PTContainer, List<PTOperator>> undeployGroups = groupByContainer(undeploy);
// stop affected operators (exclude new/failed containers)
// order does not matter, remove all operators in each container in one sweep
for (Map.Entry<PTContainer, List<PTOperator>> e : undeployGroups.entrySet()) {
// container may already be in failed or pending deploy state, notified by RM or timed out
PTContainer c = e.getKey();
if (!startContainers.contains(c) && !releaseContainers.contains(c) && c.getState() != PTContainer.State.KILLED) {
LOG.debug("scheduling undeploy {} {}", e.getKey().getExternalId(), e.getValue());
for (PTOperator oper : e.getValue()) {
oper.setState(PTOperator.State.PENDING_UNDEPLOY);
}
}
}
// start new containers
for (PTContainer c : startContainers) {
requestContainer(c);
}
// (re)deploy affected operators
// can happen in parallel after buffer server for recovered publishers is reset
Map<PTContainer, List<PTOperator>> deployGroups = groupByContainer(deploy);
for (Map.Entry<PTContainer, List<PTOperator>> e : deployGroups.entrySet()) {
if (!startContainers.contains(e.getKey())) {
// to reset publishers, clean buffer server past checkpoint so subscribers don't read stale data (including end of stream)
for (PTOperator operator : e.getValue()) {
for (PTOperator.PTOutput out : operator.getOutputs()) {
if (!out.isDownStreamInline()) {
for (InputPortMeta ipm : out.logicalStream.getSinks()) {
StreamCodec<?> streamCodecInfo = StreamingContainerAgent.getStreamCodec(ipm);
Integer codecId = plan.getStreamCodecIdentifier(streamCodecInfo);
// following needs to match the concat logic in StreamingContainer
String sourceIdentifier = Integer.toString(operator.getId()).concat(Component.CONCAT_SEPARATOR).concat(out.portName).concat(Component.CONCAT_SEPARATOR).concat(codecId.toString());
if (operator.getContainer().getState() == PTContainer.State.ACTIVE) {
// TODO: unit test - find way to mock this when testing rest of logic
if (operator.getContainer().bufferServerAddress.getPort() != 0) {
BufferServerController bsc = getBufferServerClient(operator);
// reset publisher (stale operator may still write data until disconnected)
// ensures new subscriber starting to read from checkpoint will wait until publisher redeploy cycle is complete
try {
bsc.reset(null, sourceIdentifier, 0);
}
catch (Exception ex) {
LOG.error("Failed to reset buffer server {} {}", sourceIdentifier, ex);
}
}
}
}
}
}
}
}
// add to operators that we expect to deploy
LOG.debug("scheduling deploy {} {}", e.getKey().getExternalId(), e.getValue());
for (PTOperator oper : e.getValue()) {
// operator will be deployed after it has been undeployed, if still referenced by the container
if (oper.getState() != PTOperator.State.PENDING_UNDEPLOY) {
oper.setState(PTOperator.State.PENDING_DEPLOY);
}
}
}
// stop containers that are no longer used
for (PTContainer c : releaseContainers) {
if (c.getExternalId() == null) {
continue;
}
StreamingContainerAgent sca = containers.get(c.getExternalId());
if (sca != null) {
LOG.debug("Container marked for shutdown: {}", c);
// container already removed from plan
// TODO: monitor soft shutdown
sca.shutdownRequested = true;
}
}
}
finally {
this.deployChangeCnt++;
this.deployChangeInProgress.set(false);
}
}
@Override
public void recordEventAsync(StramEvent ev)
{
if (eventBus != null) {
eventBus.publishAsync(ev);
}
}
@Override
public void dispatch(Runnable r)
{
this.eventQueue.add(r);
}
public OperatorInfo getOperatorInfo(int operatorId)
{
PTOperator o = this.plan.getAllOperators().get(operatorId);
return o == null ? null : fillPhysicalOperatorInfo(o);
}
public List<OperatorInfo> getOperatorInfoList()
{
List<OperatorInfo> infoList = new ArrayList<OperatorInfo>();
for (PTContainer container : this.plan.getContainers()) {
for (PTOperator operator : container.getOperators()) {
infoList.add(fillPhysicalOperatorInfo(operator));
}
}
return infoList;
}
public LogicalOperatorInfo getLogicalOperatorInfo(String operatorName)
{
OperatorMeta operatorMeta = getLogicalPlan().getOperatorMeta(operatorName);
if (operatorMeta == null) {
return null;
}
return fillLogicalOperatorInfo(operatorMeta);
}
public List<LogicalOperatorInfo> getLogicalOperatorInfoList()
{
List<LogicalOperatorInfo> infoList = new ArrayList<LogicalOperatorInfo>();
Collection<OperatorMeta> allOperators = getLogicalPlan().getAllOperators();
for (OperatorMeta operatorMeta : allOperators) {
infoList.add(fillLogicalOperatorInfo(operatorMeta));
}
return infoList;
}
public OperatorAggregationInfo getOperatorAggregationInfo(String operatorName)
{
OperatorMeta operatorMeta = getLogicalPlan().getOperatorMeta(operatorName);
if (operatorMeta == null) {
return null;
}
return fillOperatorAggregationInfo(operatorMeta);
}
public static long toWsWindowId(long windowId)
{
// until console handles -1
return windowId < 0 ? 0 : windowId;
}
private OperatorInfo fillPhysicalOperatorInfo(PTOperator operator)
{
OperatorInfo oi = new OperatorInfo();
oi.container = operator.getContainer().getExternalId();
oi.host = operator.getContainer().host;
oi.id = Integer.toString(operator.getId());
oi.name = operator.getName();
oi.className = operator.getOperatorMeta().getOperator().getClass().getName();
oi.status = operator.getState().toString();
if (operator.isUnifier()) {
oi.unifierClass = operator.getUnifierClass().getName();
}
oi.logicalName = operator.getOperatorMeta().getName();
OperatorStatus os = operator.stats;
oi.recordingId = os.recordingId;
oi.totalTuplesProcessed = os.totalTuplesProcessed.get();
oi.totalTuplesEmitted = os.totalTuplesEmitted.get();
oi.tuplesProcessedPSMA = os.tuplesProcessedPSMA.get();
oi.tuplesEmittedPSMA = os.tuplesEmittedPSMA.get();
oi.cpuPercentageMA = os.cpuNanosPMSMA.getAvg() / 10000;
oi.latencyMA = os.latencyMA.getAvg();
oi.failureCount = operator.failureCount;
oi.recoveryWindowId = toWsWindowId(operator.getRecoveryCheckpoint().windowId);
oi.currentWindowId = toWsWindowId(os.currentWindowId.get());
if (os.lastHeartbeat != null) {
oi.lastHeartbeat = os.lastHeartbeat.getGeneratedTms();
}
if (os.checkpointStats != null) {
oi.checkpointTime = os.checkpointStats.checkpointTime;
oi.checkpointStartTime = os.checkpointStats.checkpointStartTime;
}
if (os.checkpointStats != null) {
oi.checkpointTime = os.checkpointStats.checkpointTime;
oi.checkpointStartTime = os.checkpointStats.checkpointStartTime;
}
oi.checkpointTimeMA = os.checkpointTimeMA.getAvg();
for (PortStatus ps : os.inputPortStatusList.values()) {
PortInfo pinfo = new PortInfo();
pinfo.name = ps.portName;
pinfo.type = "input";
pinfo.totalTuples = ps.totalTuples;
pinfo.tuplesPSMA = Math.round(ps.tuplesPMSMA.getAvg() * 1000);
pinfo.bufferServerBytesPSMA = Math.round(ps.bufferServerBytesPMSMA.getAvg() * 1000);
pinfo.queueSizeMA = ps.queueSizeMA.getAvg();
pinfo.recordingId = ps.recordingId;
oi.addPort(pinfo);
}
for (PortStatus ps : os.outputPortStatusList.values()) {
PortInfo pinfo = new PortInfo();
pinfo.name = ps.portName;
pinfo.type = "output";
pinfo.totalTuples = ps.totalTuples;
pinfo.tuplesPSMA = Math.round(ps.tuplesPMSMA.getAvg() * 1000);
pinfo.bufferServerBytesPSMA = Math.round(ps.bufferServerBytesPMSMA.getAvg() * 1000);
pinfo.recordingId = ps.recordingId;
oi.addPort(pinfo);
}
oi.counters = os.getLastWindowedStats().size() > 0 ?
os.getLastWindowedStats().get(os.getLastWindowedStats().size() - 1).counters : null;
oi.metrics = os.getLastWindowedStats().size() > 0 ?
os.getLastWindowedStats().get(os.getLastWindowedStats().size() - 1).metrics : null;
return oi;
}
private LogicalOperatorInfo fillLogicalOperatorInfo(OperatorMeta operator)
{
LogicalOperatorInfo loi = new LogicalOperatorInfo();
loi.name = operator.getName();
loi.className = operator.getOperator().getClass().getName();
loi.totalTuplesEmitted = operator.getStatus().totalTuplesEmitted;
loi.totalTuplesProcessed = operator.getStatus().totalTuplesProcessed;
loi.failureCount = operator.getStatus().failureCount;
loi.status = new HashMap<String, MutableInt>();
loi.partitions = new TreeSet<Integer>();
loi.unifiers = new TreeSet<Integer>();
loi.containerIds = new TreeSet<String>();
loi.hosts = new TreeSet<String>();
Collection<PTOperator> physicalOperators = getPhysicalPlan().getAllOperators(operator);
NumberAggregate.LongAggregate checkpointTimeAggregate = new NumberAggregate.LongAggregate();
for (PTOperator physicalOperator : physicalOperators) {
OperatorStatus os = physicalOperator.stats;
if (physicalOperator.isUnifier()) {
loi.unifiers.add(physicalOperator.getId());
}
else {
loi.partitions.add(physicalOperator.getId());
// exclude unifier, not sure if we should include it in the future
loi.tuplesEmittedPSMA += os.tuplesEmittedPSMA.get();
loi.tuplesProcessedPSMA += os.tuplesProcessedPSMA.get();
// calculate maximum latency for all partitions
long latency = calculateLatency(physicalOperator);
if (latency > loi.latencyMA) {
loi.latencyMA = latency;
}
checkpointTimeAggregate.addNumber(os.checkpointTimeMA.getAvg());
}
loi.cpuPercentageMA += os.cpuNanosPMSMA.getAvg() / 10000;
if (os.lastHeartbeat != null && (loi.lastHeartbeat == 0 || loi.lastHeartbeat > os.lastHeartbeat.getGeneratedTms())) {
loi.lastHeartbeat = os.lastHeartbeat.getGeneratedTms();
}
long currentWindowId = toWsWindowId(os.currentWindowId.get());
if (loi.currentWindowId == 0 || loi.currentWindowId > currentWindowId) {
loi.currentWindowId = currentWindowId;
}
MutableInt count = loi.status.get(physicalOperator.getState().toString());
if (count == null) {
count = new MutableInt();
loi.status.put(physicalOperator.getState().toString(), count);
}
count.increment();
if (physicalOperator.getRecoveryCheckpoint() != null) {
long recoveryWindowId = toWsWindowId(physicalOperator.getRecoveryCheckpoint().windowId);
if (loi.recoveryWindowId == 0 || loi.recoveryWindowId > recoveryWindowId) {
loi.recoveryWindowId = recoveryWindowId;
}
}
PTContainer container = physicalOperator.getContainer();
if (container != null) {
String externalId = container.getExternalId();
if (externalId != null) {
loi.containerIds.add(externalId);
loi.hosts.add(container.host);
}
}
}
loi.checkpointTimeMA = checkpointTimeAggregate.getAvg().longValue();
loi.counters = latestLogicalCounters.get(operator.getName());
loi.autoMetrics = latestLogicalMetrics.get(operator.getName());
return loi;
}
private OperatorAggregationInfo fillOperatorAggregationInfo(OperatorMeta operator)
{
OperatorAggregationInfo oai = new OperatorAggregationInfo();
Collection<PTOperator> physicalOperators = getPhysicalPlan().getAllOperators(operator);
if (physicalOperators.isEmpty()) {
return null;
}
oai.name = operator.getName();
for (PTOperator physicalOperator : physicalOperators) {
if (!physicalOperator.isUnifier()) {
OperatorStatus os = physicalOperator.stats;
oai.latencyMA.addNumber(os.latencyMA.getAvg());
oai.cpuPercentageMA.addNumber(os.cpuNanosPMSMA.getAvg() / 10000);
oai.tuplesEmittedPSMA.addNumber(os.tuplesEmittedPSMA.get());
oai.tuplesProcessedPSMA.addNumber(os.tuplesProcessedPSMA.get());
oai.currentWindowId.addNumber(os.currentWindowId.get());
oai.recoveryWindowId.addNumber(toWsWindowId(physicalOperator.getRecoveryCheckpoint().windowId));
if (os.lastHeartbeat != null) {
oai.lastHeartbeat.addNumber(os.lastHeartbeat.getGeneratedTms());
}
oai.checkpointTime.addNumber(os.checkpointTimeMA.getAvg());
}
}
return oai;
}
private long calculateLatency(PTOperator operator)
{
long latency = operator.stats.latencyMA.getAvg();
long maxUnifierLatency = 0;
for (PTOutput output : operator.getOutputs()) {
for (PTInput input : output.sinks) {
if (input.target.isUnifier()) {
long thisUnifierLatency = calculateLatency(input.target);
if (maxUnifierLatency < thisUnifierLatency) {
maxUnifierLatency = thisUnifierLatency;
}
}
}
}
return latency + maxUnifierLatency;
}
public List<StreamInfo> getStreamInfoList()
{
List<StreamInfo> infoList = new ArrayList<StreamInfo>();
for (PTContainer container : this.plan.getContainers()) {
for (PTOperator operator : container.getOperators()) {
List<PTOutput> outputs = operator.getOutputs();
for (PTOutput output : outputs) {
StreamInfo si = new StreamInfo();
si.logicalName = output.logicalStream.getName();
si.source.operatorId = String.valueOf(operator.getId());
si.source.portName = output.portName;
si.locality = output.logicalStream.getLocality();
for (PTInput input : output.sinks) {
StreamInfo.Port p = new StreamInfo.Port();
p.operatorId = String.valueOf(input.target.getId());
if (input.target.isUnifier()) {
p.portName = StreamingContainer.getUnifierInputPortName(input.portName, operator.getId(), output.portName);
}
else {
p.portName = input.portName;
}
si.sinks.add(p);
}
infoList.add(si);
}
}
}
return infoList;
}
private static class RecordingRequestFilter implements Predicate<StramToNodeRequest>
{
final static Set<StramToNodeRequest.RequestType> MATCH_TYPES = Sets.newHashSet(StramToNodeRequest.RequestType.START_RECORDING, StramToNodeRequest.RequestType.STOP_RECORDING, StramToNodeRequest.RequestType.SYNC_RECORDING);
@Override
public boolean apply(@Nullable StramToNodeRequest input)
{
return input != null && MATCH_TYPES.contains(input.getRequestType());
}
}
private class SetOperatorPropertyRequestFilter implements Predicate<StramToNodeRequest>
{
final String propertyKey;
SetOperatorPropertyRequestFilter(String key)
{
this.propertyKey = key;
}
@Override
public boolean apply(@Nullable StramToNodeRequest input)
{
if (input == null) {
return false;
}
if (input instanceof StramToNodeSetPropertyRequest) {
return ((StramToNodeSetPropertyRequest)input).getPropertyKey().equals(propertyKey);
}
return false;
}
}
private void updateOnDeployRequests(PTOperator p, Predicate<StramToNodeRequest> superseded, StramToNodeRequest newRequest)
{
// filter existing requests
List<StramToNodeRequest> cloneRequests = new ArrayList<StramToNodeRequest>(p.deployRequests.size());
for (StramToNodeRequest existingRequest : p.deployRequests) {
if (!superseded.apply(existingRequest)) {
cloneRequests.add(existingRequest);
}
}
// add new request, if any
if (newRequest != null) {
cloneRequests.add(newRequest);
}
p.deployRequests = Collections.unmodifiableList(cloneRequests);
}
private StreamingContainerAgent getContainerAgentFromOperatorId(int operatorId)
{
PTOperator oper = plan.getAllOperators().get(operatorId);
if (oper != null) {
StreamingContainerAgent sca = containers.get(oper.getContainer().getExternalId());
if (sca != null) {
return sca;
}
}
// throw exception that propagates to web client
throw new NotFoundException("Operator ID " + operatorId + " not found");
}
public void startRecording(String id, int operId, String portName, long numWindows)
{
StreamingContainerAgent sca = getContainerAgentFromOperatorId(operId);
StramToNodeStartRecordingRequest request = new StramToNodeStartRecordingRequest();
request.setOperatorId(operId);
if (!StringUtils.isBlank(portName)) {
request.setPortName(portName);
}
request.setNumWindows(numWindows);
request.setId(id);
sca.addOperatorRequest(request);
PTOperator operator = plan.getAllOperators().get(operId);
if (operator != null) {
// restart on deploy
updateOnDeployRequests(operator, new RecordingRequestFilter(), request);
}
}
public void stopRecording(int operId, String portName)
{
StreamingContainerAgent sca = getContainerAgentFromOperatorId(operId);
StramToNodeRequest request = new StramToNodeRequest();
request.setOperatorId(operId);
if (!StringUtils.isBlank(portName)) {
request.setPortName(portName);
}
request.setRequestType(StramToNodeRequest.RequestType.STOP_RECORDING);
sca.addOperatorRequest(request);
PTOperator operator = plan.getAllOperators().get(operId);
if (operator != null) {
// no stop on deploy, but remove existing start
updateOnDeployRequests(operator, new RecordingRequestFilter(), null);
}
}
public void syncStats()
{
statsRecorder.requestSync();
}
public void syncEvents()
{
eventRecorder.requestSync();
}
public void stopContainer(String containerId)
{
this.containerStopRequests.put(containerId, containerId);
}
public Recoverable getSetOperatorProperty(String operatorName, String propertyName, String propertyValue) {
return new SetOperatorProperty(operatorName, propertyName, propertyValue);
}
public Recoverable getSetPhysicalOperatorProperty(int operatorId, String propertyName, String propertyValue) {
return new SetPhysicalOperatorProperty(operatorId, propertyName, propertyValue);
}
public void setOperatorProperty(String operatorName, String propertyName, String propertyValue)
{
OperatorMeta logicalOperator = plan.getLogicalPlan().getOperatorMeta(operatorName);
if (logicalOperator == null) {
throw new IllegalArgumentException("Unknown operator " + operatorName);
}
writeJournal(new SetOperatorProperty(operatorName, propertyName, propertyValue));
setOperatorProperty(logicalOperator, propertyName, propertyValue);
}
private void setOperatorProperty(OperatorMeta logicalOperator, String propertyName, String propertyValue)
{
Map<String, String> properties = Collections.singletonMap(propertyName, propertyValue);
LogicalPlanConfiguration.setOperatorProperties(logicalOperator.getOperator(), properties);
List<PTOperator> operators = plan.getOperators(logicalOperator);
for (PTOperator o : operators) {
StramToNodeSetPropertyRequest request = new StramToNodeSetPropertyRequest();
request.setOperatorId(o.getId());
request.setPropertyKey(propertyName);
request.setPropertyValue(propertyValue);
addOperatorRequest(o, request);
// re-apply to checkpointed state on deploy
updateOnDeployRequests(o, new SetOperatorPropertyRequestFilter(propertyName), request);
}
// should probably not record it here because it's better to get confirmation from the operators first.
// but right now, the operators do not give confirmation for the requests. so record it here for now.
recordEventAsync(new StramEvent.SetOperatorPropertyEvent(logicalOperator.getName(), propertyName, propertyValue));
}
/**
* Set property on a physical operator. The property change is applied asynchronously on the deployed operator.
*
* @param operatorId
* @param propertyName
* @param propertyValue
*/
public void setPhysicalOperatorProperty(int operatorId, String propertyName, String propertyValue)
{
PTOperator o = this.plan.getAllOperators().get(operatorId);
if (o == null) {
return;
}
writeJournal(new SetPhysicalOperatorProperty(operatorId, propertyName, propertyValue));
setPhysicalOperatorProperty(o, propertyName, propertyValue);
}
private void setPhysicalOperatorProperty(PTOperator o, String propertyName, String propertyValue)
{
String operatorName = o.getName();
StramToNodeSetPropertyRequest request = new StramToNodeSetPropertyRequest();
request.setOperatorId(o.getId());
request.setPropertyKey(propertyName);
request.setPropertyValue(propertyValue);
addOperatorRequest(o, request);
updateOnDeployRequests(o, new SetOperatorPropertyRequestFilter(propertyName), request);
// should probably not record it here because it's better to get confirmation from the operators first.
// but right now, the operators do not give confirmation for the requests. so record it here for now.
recordEventAsync(new StramEvent.SetPhysicalOperatorPropertyEvent(operatorName, o.getId(), propertyName, propertyValue));
}
@Override
public void addOperatorRequest(PTOperator oper, StramToNodeRequest request)
{
StreamingContainerAgent sca = getContainerAgent(oper.getContainer().getExternalId());
// yarn may not assigned resource to the container yet
if (sca != null) {
sca.addOperatorRequest(request);
}
}
/**
* Send requests to change logger levels to all containers
*
* @param changedLoggers loggers that were changed.
*/
public void setLoggersLevel(Map<String, String> changedLoggers)
{
LOG.debug("change logger request");
StramToNodeChangeLoggersRequest request = new StramToNodeChangeLoggersRequest();
request.setTargetChanges(changedLoggers);
for (StreamingContainerAgent stramChildAgent : containers.values()) {
stramChildAgent.addOperatorRequest(request);
}
}
public FutureTask<Object> getPhysicalOperatorProperty(int operatorId, String propertyName, long waitTime)
{
PTOperator o = this.plan.getAllOperators().get(operatorId);
StramToNodeGetPropertyRequest request = new StramToNodeGetPropertyRequest();
request.setOperatorId(operatorId);
request.setPropertyName(propertyName);
addOperatorRequest(o, request);
RequestHandler task = new RequestHandler();
task.requestId = nodeToStramRequestIds.incrementAndGet();
task.waitTime = waitTime;
request.requestId = task.requestId;
FutureTask<Object> future = new FutureTask<Object>(task);
dispatch(future);
return future;
}
public Attribute.AttributeMap getApplicationAttributes()
{
LogicalPlan lp = getLogicalPlan();
try {
return lp.getAttributes().clone();
}
catch (CloneNotSupportedException ex) {
throw new RuntimeException("Cannot clone DAG attributes", ex);
}
}
public Attribute.AttributeMap getOperatorAttributes(String operatorId)
{
OperatorMeta logicalOperator = plan.getLogicalPlan().getOperatorMeta(operatorId);
if (logicalOperator == null) {
throw new IllegalArgumentException("Invalid operatorId " + operatorId);
}
try {
return logicalOperator.getAttributes().clone();
}
catch (CloneNotSupportedException ex) {
throw new RuntimeException("Cannot clone operator attributes", ex);
}
}
public Map<String, Object> getPortAttributes(String operatorId, String portName)
{
OperatorMeta logicalOperator = plan.getLogicalPlan().getOperatorMeta(operatorId);
if (logicalOperator == null) {
throw new IllegalArgumentException("Invalid operatorId " + operatorId);
}
Operators.PortMappingDescriptor portMap = new Operators.PortMappingDescriptor();
Operators.describe(logicalOperator.getOperator(), portMap);
PortContextPair<InputPort<?>> inputPort = portMap.inputPorts.get(portName);
if (inputPort != null) {
HashMap<String, Object> portAttributeMap = new HashMap<String, Object>();
InputPortMeta portMeta = logicalOperator.getMeta(inputPort.component);
Map<Attribute<Object>, Object> rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
}
return portAttributeMap;
}
else {
PortContextPair<OutputPort<?>> outputPort = portMap.outputPorts.get(portName);
if (outputPort != null) {
HashMap<String, Object> portAttributeMap = new HashMap<String, Object>();
OutputPortMeta portMeta = logicalOperator.getMeta(outputPort.component);
Map<Attribute<Object>, Object> rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
}
return portAttributeMap;
}
throw new IllegalArgumentException("Invalid port name " + portName);
}
}
public LogicalPlan getLogicalPlan()
{
return plan.getLogicalPlan();
}
/**
* Asynchronously process the logical, physical plan and execution layer changes.
* Caller can use the returned future to block until processing is complete.
*
* @param requests
* @return future
* @throws Exception
*/
public FutureTask<Object> logicalPlanModification(List<LogicalPlanRequest> requests) throws Exception
{
// delegate processing to dispatch thread
FutureTask<Object> future = new FutureTask<Object>(new LogicalPlanChangeRunnable(requests));
dispatch(future);
//LOG.info("Scheduled plan changes: {}", requests);
return future;
}
private class LogicalPlanChangeRunnable implements java.util.concurrent.Callable<Object>
{
final List<LogicalPlanRequest> requests;
private LogicalPlanChangeRunnable(List<LogicalPlanRequest> requests)
{
this.requests = requests;
}
@Override
public Object call() throws Exception
{
// clone logical plan, for dry run and validation
LOG.info("Begin plan changes: {}", requests);
LogicalPlan lp = plan.getLogicalPlan();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
LogicalPlan.write(lp, bos);
bos.flush();
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
lp = LogicalPlan.read(bis);
PlanModifier pm = new PlanModifier(lp);
for (LogicalPlanRequest request : requests) {
LOG.debug("Dry run plan change: {}", request);
request.execute(pm);
}
lp.validate();
// perform changes on live plan
pm = new PlanModifier(plan);
for (LogicalPlanRequest request : requests) {
request.execute(pm);
// record an event for the request. however, we should probably record these when we get a confirmation.
recordEventAsync(new StramEvent.ChangeLogicalPlanEvent(request));
}
pm.applyChanges(StreamingContainerManager.this);
LOG.info("Plan changes applied: {}", requests);
return null;
}
}
public CriticalPathInfo getCriticalPathInfo()
{
return criticalPathInfo;
}
private void checkpoint() throws IOException
{
if (recoveryHandler != null) {
LOG.debug("Checkpointing state");
DataOutputStream out = recoveryHandler.rotateLog();
journal.setOutputStream(out);
// checkpoint the state
CheckpointState cs = new CheckpointState();
cs.finals = this.vars;
cs.physicalPlan = this.plan;
recoveryHandler.save(cs);
}
}
@Override
public void writeJournal(Recoverable operation)
{
try {
if (journal != null) {
journal.write(operation);
}
}
catch (Exception e) {
throw new IllegalStateException("Failed to write to journal " + operation, e);
}
}
/**
* Get the instance for the given application. If the application directory contains a checkpoint, the state will be restored.
*
* @param rh
* @param dag
* @param enableEventRecording
* @return instance of {@link StreamingContainerManager}
* @throws IOException
*/
public static StreamingContainerManager getInstance(RecoveryHandler rh, LogicalPlan dag, boolean enableEventRecording) throws IOException
{
try {
CheckpointState checkpointedState = (CheckpointState) rh.restore();
StreamingContainerManager scm;
if (checkpointedState == null) {
scm = new StreamingContainerManager(dag, enableEventRecording, new SystemClock());
}
else {
// find better way to support final transient members
PhysicalPlan plan = checkpointedState.physicalPlan;
plan.getLogicalPlan().setAttribute(LogicalPlan.APPLICATION_ATTEMPT_ID, dag.getAttributes().get(LogicalPlan.APPLICATION_ATTEMPT_ID));
scm = new StreamingContainerManager(checkpointedState, enableEventRecording);
for (Field f : plan.getClass().getDeclaredFields()) {
if (f.getType() == PlanContext.class) {
f.setAccessible(true);
try {
f.set(plan, scm);
}
catch (Exception e) {
throw new RuntimeException("Failed to set " + f, e);
}
f.setAccessible(false);
}
}
DataInputStream logStream = rh.getLog();
scm.journal.replay(logStream);
logStream.close();
// restore checkpoint info
plan.syncCheckpoints(scm.vars.windowStartMillis, scm.clock.getTime());
scm.committedWindowId = scm.updateCheckpoints(true);
// at this point the physical plan has been fully restored
// populate container agents for existing containers
for (PTContainer c : plan.getContainers()) {
if (c.getExternalId() != null) {
LOG.debug("Restore container agent {} for {}", c.getExternalId(), c);
StreamingContainerAgent sca = new StreamingContainerAgent(c, scm.newStreamingContainerContext(c), scm);
scm.containers.put(c.getExternalId(), sca);
}
else {
LOG.debug("Requesting new resource for {}", c.toIdStateString());
scm.requestContainer(c);
}
}
}
scm.recoveryHandler = rh;
scm.checkpoint();
return scm;
}
catch (IOException e) {
throw new IllegalStateException("Failed to read checkpointed state", e);
}
}
private static class FinalVars implements java.io.Serializable
{
private static final long serialVersionUID = 3827310557521807024L;
private final long windowStartMillis;
private final int heartbeatTimeoutMillis;
private final String appPath;
private final int maxWindowsBehindForStats;
private final boolean enableStatsRecording;
private final int rpcLatencyCompensationSamples;
private FinalVars(LogicalPlan dag, long tms)
{
Attribute.AttributeMap attributes = dag.getAttributes();
/* try to align to it to please eyes. */
windowStartMillis = tms - (tms % 1000);
if (attributes.get(LogicalPlan.APPLICATION_PATH) == null) {
throw new IllegalArgumentException("Not set: " + LogicalPlan.APPLICATION_PATH);
}
this.appPath = attributes.get(LogicalPlan.APPLICATION_PATH);
if (attributes.get(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS) == null) {
attributes.put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 500);
}
if (attributes.get(LogicalPlan.CHECKPOINT_WINDOW_COUNT) == null) {
attributes.put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 30000 / attributes.get(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS));
}
this.heartbeatTimeoutMillis = dag.getValue(LogicalPlan.HEARTBEAT_TIMEOUT_MILLIS);
this.maxWindowsBehindForStats = dag.getValue(LogicalPlan.STATS_MAX_ALLOWABLE_WINDOWS_LAG);
this.enableStatsRecording = dag.getValue(LogicalPlan.ENABLE_STATS_RECORDING);
this.rpcLatencyCompensationSamples = dag.getValue(LogicalPlan.RPC_LATENCY_COMPENSATION_SAMPLES);
}
private FinalVars(FinalVars other, LogicalPlan dag)
{
this.windowStartMillis = other.windowStartMillis;
this.heartbeatTimeoutMillis = other.heartbeatTimeoutMillis;
this.maxWindowsBehindForStats = other.maxWindowsBehindForStats;
this.enableStatsRecording = other.enableStatsRecording;
this.appPath = dag.getValue(LogicalPlan.APPLICATION_PATH);
this.rpcLatencyCompensationSamples = other.rpcLatencyCompensationSamples;
}
}
/**
* The state that can be saved and used to recover the manager.
*/
static class CheckpointState implements Serializable
{
private static final long serialVersionUID = 3827310557521807024L;
private FinalVars finals;
private PhysicalPlan physicalPlan;
/**
* Modify previously saved state to allow for re-launch of application.
*/
public void setApplicationId(LogicalPlan newApp, Configuration conf)
{
LogicalPlan lp = physicalPlan.getLogicalPlan();
String appId = newApp.getValue(LogicalPlan.APPLICATION_ID);
String oldAppId = lp.getValue(LogicalPlan.APPLICATION_ID);
if (oldAppId == null) {
throw new AssertionError("Missing original application id");
}
lp.setAttribute(LogicalPlan.APPLICATION_ID, appId);
lp.setAttribute(LogicalPlan.APPLICATION_PATH, newApp.assertAppPath());
lp.setAttribute(LogicalPlan.LIBRARY_JARS, newApp.getValue(LogicalPlan.LIBRARY_JARS));
lp.setAttribute(LogicalPlan.ARCHIVES, newApp.getValue(LogicalPlan.ARCHIVES));
this.finals = new FinalVars(finals, lp);
StorageAgent sa = lp.getValue(OperatorContext.STORAGE_AGENT);
if(sa instanceof AsyncFSStorageAgent){
// replace the default storage agent, if present
AsyncFSStorageAgent fssa = (AsyncFSStorageAgent) sa;
if (fssa.path.contains(oldAppId)) {
fssa = new AsyncFSStorageAgent(fssa.path.replace(oldAppId, appId), conf);
lp.setAttribute(OperatorContext.STORAGE_AGENT, fssa);
}
} else if (sa instanceof FSStorageAgent) {
// replace the default storage agent, if present
FSStorageAgent fssa = (FSStorageAgent) sa;
if (fssa.path.contains(oldAppId)) {
fssa = new FSStorageAgent(fssa.path.replace(oldAppId, appId), conf);
lp.setAttribute(OperatorContext.STORAGE_AGENT, fssa);
}
}
}
}
public interface RecoveryHandler
{
/**
* Save snapshot.
*
* @param state
* @throws IOException
*/
void save(Object state) throws IOException;
/**
* Restore snapshot. Must get/apply log after restore.
*
* @return snapshot
* @throws IOException
*/
Object restore() throws IOException;
/**
* Backup log. Call before save.
*
* @return output stream
* @throws IOException
*/
DataOutputStream rotateLog() throws IOException;
/**
* Get input stream for log. Call after restore.
*
* @return input stream
* @throws IOException
*/
DataInputStream getLog() throws IOException;
}
private class RequestHandler implements Callable<Object>
{
/*
* The unique requestId of the request
*/
public long requestId;
/*
* The maximum time this thread will wait for the response
*/
public long waitTime = 5000;
@Override
@SuppressWarnings("SleepWhileInLoop")
public Object call() throws Exception
{
Object obj;
long expiryTime = System.currentTimeMillis() + waitTime;
while ((obj = commandResponse.getIfPresent(requestId)) == null && expiryTime > System.currentTimeMillis()) {
Thread.sleep(100);
LOG.debug("Polling for a response to request with Id {}", requestId);
}
if(obj != null) {
commandResponse.invalidate(requestId);
return obj;
}
return null;
}
}
@VisibleForTesting
protected Collection<Pair<Long, Map<String, Object>>> getLogicalMetrics(String operatorName)
{
if (logicalMetrics.get(operatorName) != null) {
return Collections.unmodifiableCollection(logicalMetrics.get(operatorName));
}
return null;
}
@VisibleForTesting
protected Object getLogicalCounter(String operatorName)
{
return latestLogicalCounters.get(operatorName);
}
}
|
apache-2.0
|
uaraven/nano
|
sample/webservice/eBayDemoApp/src/com/ebay/trading/api/ProfileCategoryGroupCodeType.java
|
1481
|
// Generated by xsd compiler for android/java
// DO NOT CHANGE!
package com.ebay.trading.api;
/**
*
* Enumerated type that defines the category group values. Business Policies profiles (Payment,
* Shipping, and Return Policy) are linked to category groups.
* <br><br>
* <span class="tablenote"><strong>Note:</strong>
* Business Policies are not yet available for use on the eBay platform.
* </span>
*
*/
public enum ProfileCategoryGroupCodeType {
/**
*
* Default value.
*
*/
INHERIT("Inherit"),
/**
*
* None.
*
*/
NONE("None"),
/**
*
* For Business Policies, the 'ALL' enumeration value represents all eBay categories
* except for motor vehicles.
*
*/
ALL("ALL"),
/**
*
* For Business Policies, the 'MOTORS_VEHICLE' enumeration value represents all motor vehicle
* categories.
*
*/
MOTORS_VEHICLE("MOTORS_VEHICLE");
private final String value;
ProfileCategoryGroupCodeType(String v) {
value = v;
}
public String value() {
return value;
}
public static ProfileCategoryGroupCodeType fromValue(String v) {
if (v != null) {
for (ProfileCategoryGroupCodeType c: ProfileCategoryGroupCodeType.values()) {
if (c.value.equals(v)) {
return c;
}
}
}
throw new IllegalArgumentException(v);
}
}
|
apache-2.0
|
nkhuyu/parquet-mr
|
parquet-column/src/test/java/parquet/column/values/dictionary/TestDictionary.java
|
20344
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package parquet.column.values.dictionary;
import static org.junit.Assert.assertEquals;
import static parquet.column.Encoding.PLAIN;
import static parquet.column.Encoding.PLAIN_DICTIONARY;
import static parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY;
import static parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE;
import static parquet.schema.PrimitiveType.PrimitiveTypeName.FLOAT;
import static parquet.schema.PrimitiveType.PrimitiveTypeName.INT32;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
import parquet.bytes.BytesInput;
import parquet.column.ColumnDescriptor;
import parquet.column.Dictionary;
import parquet.column.Encoding;
import parquet.column.page.DictionaryPage;
import parquet.column.values.ValuesReader;
import parquet.column.values.ValuesWriter;
import parquet.column.values.dictionary.DictionaryValuesWriter.PlainBinaryDictionaryValuesWriter;
import parquet.column.values.dictionary.DictionaryValuesWriter.PlainDoubleDictionaryValuesWriter;
import parquet.column.values.dictionary.DictionaryValuesWriter.PlainFloatDictionaryValuesWriter;
import parquet.column.values.dictionary.DictionaryValuesWriter.PlainIntegerDictionaryValuesWriter;
import parquet.column.values.dictionary.DictionaryValuesWriter.PlainLongDictionaryValuesWriter;
import parquet.column.values.fallback.FallbackValuesWriter;
import parquet.column.values.plain.BinaryPlainValuesReader;
import parquet.column.values.plain.PlainValuesReader;
import parquet.column.values.plain.PlainValuesWriter;
import parquet.io.api.Binary;
import parquet.schema.PrimitiveType.PrimitiveTypeName;
public class TestDictionary {
private <I extends DictionaryValuesWriter> FallbackValuesWriter<I, PlainValuesWriter> plainFallBack(I dvw, int initialSize) {
return FallbackValuesWriter.of(dvw, new PlainValuesWriter(initialSize));
}
private FallbackValuesWriter<PlainBinaryDictionaryValuesWriter, PlainValuesWriter> newPlainBinaryDictionaryValuesWriter(int maxDictionaryByteSize, int initialSize) {
return plainFallBack(new PlainBinaryDictionaryValuesWriter(maxDictionaryByteSize, PLAIN_DICTIONARY, PLAIN_DICTIONARY), initialSize);
}
private FallbackValuesWriter<PlainLongDictionaryValuesWriter, PlainValuesWriter> newPlainLongDictionaryValuesWriter(int maxDictionaryByteSize, int initialSize) {
return plainFallBack(new PlainLongDictionaryValuesWriter(maxDictionaryByteSize, PLAIN_DICTIONARY, PLAIN_DICTIONARY), initialSize);
}
private FallbackValuesWriter<PlainIntegerDictionaryValuesWriter, PlainValuesWriter> newPlainIntegerDictionaryValuesWriter(int maxDictionaryByteSize, int initialSize) {
return plainFallBack(new PlainIntegerDictionaryValuesWriter(maxDictionaryByteSize, PLAIN_DICTIONARY, PLAIN_DICTIONARY), initialSize);
}
private FallbackValuesWriter<PlainDoubleDictionaryValuesWriter, PlainValuesWriter> newPlainDoubleDictionaryValuesWriter(int maxDictionaryByteSize, int initialSize) {
return plainFallBack(new PlainDoubleDictionaryValuesWriter(maxDictionaryByteSize, PLAIN_DICTIONARY, PLAIN_DICTIONARY), initialSize);
}
private FallbackValuesWriter<PlainFloatDictionaryValuesWriter, PlainValuesWriter> newPlainFloatDictionaryValuesWriter(int maxDictionaryByteSize, int initialSize) {
return plainFallBack(new PlainFloatDictionaryValuesWriter(maxDictionaryByteSize, PLAIN_DICTIONARY, PLAIN_DICTIONARY), initialSize);
}
@Test
public void testBinaryDictionary() throws IOException {
int COUNT = 100;
ValuesWriter cw = newPlainBinaryDictionaryValuesWriter(200, 10000);
writeRepeated(COUNT, cw, "a");
BytesInput bytes1 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
writeRepeated(COUNT, cw, "b");
BytesInput bytes2 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
// now we will fall back
writeDistinct(COUNT, cw, "c");
BytesInput bytes3 = getBytesAndCheckEncoding(cw, PLAIN);
DictionaryValuesReader cr = initDicReader(cw, BINARY);
checkRepeated(COUNT, bytes1, cr, "a");
checkRepeated(COUNT, bytes2, cr, "b");
BinaryPlainValuesReader cr2 = new BinaryPlainValuesReader();
checkDistinct(COUNT, bytes3, cr2, "c");
}
@Test
public void testBinaryDictionaryFallBack() throws IOException {
int slabSize = 100;
int maxDictionaryByteSize = 50;
final ValuesWriter cw = newPlainBinaryDictionaryValuesWriter(maxDictionaryByteSize, slabSize);
int fallBackThreshold = maxDictionaryByteSize;
int dataSize=0;
for (long i = 0; i < 100; i++) {
Binary binary = Binary.fromString("str" + i);
cw.writeBytes(binary);
dataSize += (binary.length() + 4);
if (dataSize < fallBackThreshold) {
assertEquals(PLAIN_DICTIONARY, cw.getEncoding());
} else {
assertEquals(PLAIN, cw.getEncoding());
}
}
//Fallbacked to Plain encoding, therefore use PlainValuesReader to read it back
ValuesReader reader = new BinaryPlainValuesReader();
reader.initFromPage(100, cw.getBytes().toByteArray(), 0);
for (long i = 0; i < 100; i++) {
assertEquals(Binary.fromString("str" + i), reader.readBytes());
}
//simulate cutting the page
cw.reset();
assertEquals(0, cw.getBufferedSize());
}
@Test
public void testBinaryDictionaryChangedValues() throws IOException {
int COUNT = 100;
ValuesWriter cw = newPlainBinaryDictionaryValuesWriter(200, 10000);
writeRepeatedWithReuse(COUNT, cw, "a");
BytesInput bytes1 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
writeRepeatedWithReuse(COUNT, cw, "b");
BytesInput bytes2 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
// now we will fall back
writeDistinct(COUNT, cw, "c");
BytesInput bytes3 = getBytesAndCheckEncoding(cw, PLAIN);
DictionaryValuesReader cr = initDicReader(cw, BINARY);
checkRepeated(COUNT, bytes1, cr, "a");
checkRepeated(COUNT, bytes2, cr, "b");
BinaryPlainValuesReader cr2 = new BinaryPlainValuesReader();
checkDistinct(COUNT, bytes3, cr2, "c");
}
@Test
public void testFirstPageFallBack() throws IOException {
int COUNT = 1000;
ValuesWriter cw = newPlainBinaryDictionaryValuesWriter(10000, 10000);
writeDistinct(COUNT, cw, "a");
// not efficient so falls back
BytesInput bytes1 = getBytesAndCheckEncoding(cw, PLAIN);
writeRepeated(COUNT, cw, "b");
// still plain because we fell back on first page
BytesInput bytes2 = getBytesAndCheckEncoding(cw, PLAIN);
ValuesReader cr = new BinaryPlainValuesReader();
checkDistinct(COUNT, bytes1, cr, "a");
checkRepeated(COUNT, bytes2, cr, "b");
}
@Test
public void testSecondPageFallBack() throws IOException {
int COUNT = 1000;
ValuesWriter cw = newPlainBinaryDictionaryValuesWriter(1000, 10000);
writeRepeated(COUNT, cw, "a");
BytesInput bytes1 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
writeDistinct(COUNT, cw, "b");
// not efficient so falls back
BytesInput bytes2 = getBytesAndCheckEncoding(cw, PLAIN);
writeRepeated(COUNT, cw, "a");
// still plain because we fell back on previous page
BytesInput bytes3 = getBytesAndCheckEncoding(cw, PLAIN);
ValuesReader cr = initDicReader(cw, BINARY);
checkRepeated(COUNT, bytes1, cr, "a");
cr = new BinaryPlainValuesReader();
checkDistinct(COUNT, bytes2, cr, "b");
checkRepeated(COUNT, bytes3, cr, "a");
}
@Test
public void testLongDictionary() throws IOException {
int COUNT = 1000;
int COUNT2 = 2000;
final FallbackValuesWriter<PlainLongDictionaryValuesWriter, PlainValuesWriter> cw = newPlainLongDictionaryValuesWriter(10000, 10000);
for (long i = 0; i < COUNT; i++) {
cw.writeLong(i % 50);
}
BytesInput bytes1 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
assertEquals(50, cw.initialWriter.getDictionarySize());
for (long i = COUNT2; i > 0; i--) {
cw.writeLong(i % 50);
}
BytesInput bytes2 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
assertEquals(50, cw.initialWriter.getDictionarySize());
DictionaryValuesReader cr = initDicReader(cw, PrimitiveTypeName.INT64);
cr.initFromPage(COUNT, bytes1.toByteArray(), 0);
for (long i = 0; i < COUNT; i++) {
long back = cr.readLong();
assertEquals(i % 50, back);
}
cr.initFromPage(COUNT2, bytes2.toByteArray(), 0);
for (long i = COUNT2; i > 0; i--) {
long back = cr.readLong();
assertEquals(i % 50, back);
}
}
private void roundTripLong(FallbackValuesWriter<PlainLongDictionaryValuesWriter, PlainValuesWriter> cw, ValuesReader reader, int maxDictionaryByteSize) throws IOException {
int fallBackThreshold = maxDictionaryByteSize / 8;
for (long i = 0; i < 100; i++) {
cw.writeLong(i);
if (i < fallBackThreshold) {
assertEquals(cw.getEncoding(), PLAIN_DICTIONARY);
} else {
assertEquals(cw.getEncoding(), PLAIN);
}
}
reader.initFromPage(100, cw.getBytes().toByteArray(), 0);
for (long i = 0; i < 100; i++) {
assertEquals(i, reader.readLong());
}
}
@Test
public void testLongDictionaryFallBack() throws IOException {
int slabSize = 100;
int maxDictionaryByteSize = 50;
final FallbackValuesWriter<PlainLongDictionaryValuesWriter, PlainValuesWriter> cw = newPlainLongDictionaryValuesWriter(maxDictionaryByteSize, slabSize);
// Fallbacked to Plain encoding, therefore use PlainValuesReader to read it back
ValuesReader reader = new PlainValuesReader.LongPlainValuesReader();
roundTripLong(cw, reader, maxDictionaryByteSize);
//simulate cutting the page
cw.reset();
assertEquals(0,cw.getBufferedSize());
cw.resetDictionary();
roundTripLong(cw, reader, maxDictionaryByteSize);
}
@Test
public void testDoubleDictionary() throws IOException {
int COUNT = 1000;
int COUNT2 = 2000;
final FallbackValuesWriter<PlainDoubleDictionaryValuesWriter, PlainValuesWriter> cw = newPlainDoubleDictionaryValuesWriter(10000, 10000);
for (double i = 0; i < COUNT; i++) {
cw.writeDouble(i % 50);
}
BytesInput bytes1 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
assertEquals(50, cw.initialWriter.getDictionarySize());
for (double i = COUNT2; i > 0; i--) {
cw.writeDouble(i % 50);
}
BytesInput bytes2 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
assertEquals(50, cw.initialWriter.getDictionarySize());
final DictionaryValuesReader cr = initDicReader(cw, DOUBLE);
cr.initFromPage(COUNT, bytes1.toByteArray(), 0);
for (double i = 0; i < COUNT; i++) {
double back = cr.readDouble();
assertEquals(i % 50, back, 0.0);
}
cr.initFromPage(COUNT2, bytes2.toByteArray(), 0);
for (double i = COUNT2; i > 0; i--) {
double back = cr.readDouble();
assertEquals(i % 50, back, 0.0);
}
}
private void roundTripDouble(FallbackValuesWriter<PlainDoubleDictionaryValuesWriter, PlainValuesWriter> cw, ValuesReader reader, int maxDictionaryByteSize) throws IOException {
int fallBackThreshold = maxDictionaryByteSize / 8;
for (double i = 0; i < 100; i++) {
cw.writeDouble(i);
if (i < fallBackThreshold) {
assertEquals(cw.getEncoding(), PLAIN_DICTIONARY);
} else {
assertEquals(cw.getEncoding(), PLAIN);
}
}
reader.initFromPage(100, cw.getBytes().toByteArray(), 0);
for (double i = 0; i < 100; i++) {
assertEquals(i, reader.readDouble(), 0.00001);
}
}
@Test
public void testDoubleDictionaryFallBack() throws IOException {
int slabSize = 100;
int maxDictionaryByteSize = 50;
final FallbackValuesWriter<PlainDoubleDictionaryValuesWriter, PlainValuesWriter> cw = newPlainDoubleDictionaryValuesWriter(maxDictionaryByteSize, slabSize);
// Fallbacked to Plain encoding, therefore use PlainValuesReader to read it back
ValuesReader reader = new PlainValuesReader.DoublePlainValuesReader();
roundTripDouble(cw, reader, maxDictionaryByteSize);
//simulate cutting the page
cw.reset();
assertEquals(0,cw.getBufferedSize());
cw.resetDictionary();
roundTripDouble(cw, reader, maxDictionaryByteSize);
}
@Test
public void testIntDictionary() throws IOException {
int COUNT = 2000;
int COUNT2 = 4000;
final FallbackValuesWriter<PlainIntegerDictionaryValuesWriter, PlainValuesWriter> cw = newPlainIntegerDictionaryValuesWriter(10000, 10000);
for (int i = 0; i < COUNT; i++) {
cw.writeInteger(i % 50);
}
BytesInput bytes1 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
assertEquals(50, cw.initialWriter.getDictionarySize());
for (int i = COUNT2; i > 0; i--) {
cw.writeInteger(i % 50);
}
BytesInput bytes2 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
assertEquals(50, cw.initialWriter.getDictionarySize());
DictionaryValuesReader cr = initDicReader(cw, INT32);
cr.initFromPage(COUNT, bytes1.toByteArray(), 0);
for (int i = 0; i < COUNT; i++) {
int back = cr.readInteger();
assertEquals(i % 50, back);
}
cr.initFromPage(COUNT2, bytes2.toByteArray(), 0);
for (int i = COUNT2; i > 0; i--) {
int back = cr.readInteger();
assertEquals(i % 50, back);
}
}
private void roundTripInt(FallbackValuesWriter<PlainIntegerDictionaryValuesWriter, PlainValuesWriter> cw, ValuesReader reader, int maxDictionaryByteSize) throws IOException {
int fallBackThreshold = maxDictionaryByteSize / 4;
for (int i = 0; i < 100; i++) {
cw.writeInteger(i);
if (i < fallBackThreshold) {
assertEquals(cw.getEncoding(), PLAIN_DICTIONARY);
} else {
assertEquals(cw.getEncoding(), PLAIN);
}
}
reader.initFromPage(100, cw.getBytes().toByteArray(), 0);
for (int i = 0; i < 100; i++) {
assertEquals(i, reader.readInteger());
}
}
@Test
public void testIntDictionaryFallBack() throws IOException {
int slabSize = 100;
int maxDictionaryByteSize = 50;
final FallbackValuesWriter<PlainIntegerDictionaryValuesWriter, PlainValuesWriter> cw = newPlainIntegerDictionaryValuesWriter(maxDictionaryByteSize, slabSize);
// Fallbacked to Plain encoding, therefore use PlainValuesReader to read it back
ValuesReader reader = new PlainValuesReader.IntegerPlainValuesReader();
roundTripInt(cw, reader, maxDictionaryByteSize);
//simulate cutting the page
cw.reset();
assertEquals(0,cw.getBufferedSize());
cw.resetDictionary();
roundTripInt(cw, reader, maxDictionaryByteSize);
}
@Test
public void testFloatDictionary() throws IOException {
int COUNT = 2000;
int COUNT2 = 4000;
final FallbackValuesWriter<PlainFloatDictionaryValuesWriter, PlainValuesWriter> cw = newPlainFloatDictionaryValuesWriter(10000, 10000);
for (float i = 0; i < COUNT; i++) {
cw.writeFloat(i % 50);
}
BytesInput bytes1 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
assertEquals(50, cw.initialWriter.getDictionarySize());
for (float i = COUNT2; i > 0; i--) {
cw.writeFloat(i % 50);
}
BytesInput bytes2 = getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
assertEquals(50, cw.initialWriter.getDictionarySize());
DictionaryValuesReader cr = initDicReader(cw, FLOAT);
cr.initFromPage(COUNT, bytes1.toByteArray(), 0);
for (float i = 0; i < COUNT; i++) {
float back = cr.readFloat();
assertEquals(i % 50, back, 0.0f);
}
cr.initFromPage(COUNT2, bytes2.toByteArray(), 0);
for (float i = COUNT2; i > 0; i--) {
float back = cr.readFloat();
assertEquals(i % 50, back, 0.0f);
}
}
private void roundTripFloat(FallbackValuesWriter<PlainFloatDictionaryValuesWriter, PlainValuesWriter> cw, ValuesReader reader, int maxDictionaryByteSize) throws IOException {
int fallBackThreshold = maxDictionaryByteSize / 4;
for (float i = 0; i < 100; i++) {
cw.writeFloat(i);
if (i < fallBackThreshold) {
assertEquals(cw.getEncoding(), PLAIN_DICTIONARY);
} else {
assertEquals(cw.getEncoding(), PLAIN);
}
}
reader.initFromPage(100, cw.getBytes().toByteArray(), 0);
for (float i = 0; i < 100; i++) {
assertEquals(i, reader.readFloat(), 0.00001);
}
}
@Test
public void testFloatDictionaryFallBack() throws IOException {
int slabSize = 100;
int maxDictionaryByteSize = 50;
final FallbackValuesWriter<PlainFloatDictionaryValuesWriter, PlainValuesWriter> cw = newPlainFloatDictionaryValuesWriter(maxDictionaryByteSize, slabSize);
// Fallbacked to Plain encoding, therefore use PlainValuesReader to read it back
ValuesReader reader = new PlainValuesReader.FloatPlainValuesReader();
roundTripFloat(cw, reader, maxDictionaryByteSize);
//simulate cutting the page
cw.reset();
assertEquals(0,cw.getBufferedSize());
cw.resetDictionary();
roundTripFloat(cw, reader, maxDictionaryByteSize);
}
@Test
public void testZeroValues() throws IOException {
FallbackValuesWriter<PlainIntegerDictionaryValuesWriter, PlainValuesWriter> cw = newPlainIntegerDictionaryValuesWriter(100, 100);
cw.writeInteger(34);
cw.writeInteger(34);
getBytesAndCheckEncoding(cw, PLAIN_DICTIONARY);
DictionaryValuesReader reader = initDicReader(cw, INT32);
// pretend there are 100 nulls. what matters is offset = bytes.length.
byte[] bytes = {0x00, 0x01, 0x02, 0x03}; // data doesn't matter
int offset = bytes.length;
reader.initFromPage(100, bytes, offset);
}
private DictionaryValuesReader initDicReader(ValuesWriter cw, PrimitiveTypeName type)
throws IOException {
final DictionaryPage dictionaryPage = cw.createDictionaryPage().copy();
final ColumnDescriptor descriptor = new ColumnDescriptor(new String[] {"foo"}, type, 0, 0);
final Dictionary dictionary = PLAIN.initDictionary(descriptor, dictionaryPage);
final DictionaryValuesReader cr = new DictionaryValuesReader(dictionary);
return cr;
}
private void checkDistinct(int COUNT, BytesInput bytes, ValuesReader cr, String prefix) throws IOException {
cr.initFromPage(COUNT, bytes.toByteArray(), 0);
for (int i = 0; i < COUNT; i++) {
Assert.assertEquals(prefix + i, cr.readBytes().toStringUsingUTF8());
}
}
private void checkRepeated(int COUNT, BytesInput bytes, ValuesReader cr, String prefix) throws IOException {
cr.initFromPage(COUNT, bytes.toByteArray(), 0);
for (int i = 0; i < COUNT; i++) {
Assert.assertEquals(prefix + i % 10, cr.readBytes().toStringUsingUTF8());
}
}
private void writeDistinct(int COUNT, ValuesWriter cw, String prefix) {
for (int i = 0; i < COUNT; i++) {
cw.writeBytes(Binary.fromString(prefix + i));
}
}
private void writeRepeated(int COUNT, ValuesWriter cw, String prefix) {
for (int i = 0; i < COUNT; i++) {
cw.writeBytes(Binary.fromString(prefix + i % 10));
}
}
private void writeRepeatedWithReuse(int COUNT, ValuesWriter cw, String prefix) {
Binary reused = Binary.fromString(prefix + "0");
for (int i = 0; i < COUNT; i++) {
Binary content = Binary.fromString(prefix + i % 10);
System.arraycopy(content.getBytes(), 0, reused.getBytes(), 0, reused.length());
cw.writeBytes(reused);
}
}
private BytesInput getBytesAndCheckEncoding(ValuesWriter cw, Encoding encoding)
throws IOException {
BytesInput bytes = BytesInput.copy(cw.getBytes());
assertEquals(encoding, cw.getEncoding());
cw.reset();
return bytes;
}
}
|
apache-2.0
|
UniquePassive/runelite
|
runelite-client/src/main/java/net/runelite/client/plugins/screenshot/imgur/ImageUploadRequest.java
|
1852
|
/*
* Copyright (c) 2018, Lotto <https://github.com/devLotto>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.screenshot.imgur;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Base64;
import lombok.Data;
@Data
public class ImageUploadRequest
{
private final String image;
private final String type;
public ImageUploadRequest(File imageFile) throws IOException
{
this.image = Base64.getEncoder().encodeToString(Files.readAllBytes(imageFile.toPath()));
this.type = "base64";
}
}
|
bsd-2-clause
|
magicDGS/gatk
|
src/testUtils/java/org/broadinstitute/hellbender/testutils/ReadsPreprocessingPipelineTestData.java
|
15659
|
package org.broadinstitute.hellbender.testutils;
import com.google.common.collect.Lists;
import htsjdk.samtools.SAMRecord;
import org.broadinstitute.hellbender.engine.ReadContextData;
import org.broadinstitute.hellbender.engine.ReferenceShard;
import org.broadinstitute.hellbender.engine.VariantShard;
import org.broadinstitute.hellbender.exceptions.GATKException;
import org.broadinstitute.hellbender.utils.KV;
import org.broadinstitute.hellbender.utils.SimpleInterval;
import org.broadinstitute.hellbender.utils.read.ArtificialReadUtils;
import org.broadinstitute.hellbender.utils.read.GATKRead;
import org.broadinstitute.hellbender.utils.reference.ReferenceBases;
import org.broadinstitute.hellbender.utils.variant.GATKVariant;
import org.broadinstitute.hellbender.utils.variant.MinimalVariant;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* ReadsPreprocessingPipelineTestData contains coordinated test data that can be used in the many transforms that
* are a part of the ReadsPreprocessingPipeline.
*/
public class ReadsPreprocessingPipelineTestData {
private final List<KV<Integer, Integer>> readStartLength;
private final List<GATKRead> reads;
private final List<KV<ReferenceShard, Iterable<GATKRead>>> kvRefShardiReads;
private final List<SimpleInterval> readIntervals;
private final List<SimpleInterval> allIntervals;
private final List<KV<ReferenceBases, Iterable<GATKRead>>> kvRefBasesiReads;
private final List<KV<VariantShard, GATKRead>> kvVariantShardRead;
private final List<GATKVariant> variants;
private final List<KV<VariantShard, GATKVariant>> kvVariantShardVariant;
private final List<KV<GATKRead, ReferenceBases>> kvReadsRefBases;
private final List<KV<GATKRead, GATKVariant>> kvReadVariant;
private final List<KV<GATKRead, Iterable<GATKVariant>>> kvReadiVariantBroken; // The dataflow version is currently broken (Issue #795).
private final List<KV<GATKRead, Iterable<GATKVariant>>> kvReadiVariantFixed;
private final List<KV<GATKRead, ReadContextData>> kvReadContextData;
/**
* ReadsPreprocessingPipelineTestData holds a bunch of connected data for testing classes that work with
* reads, variants, references bases and pairing those types together.
* @param clazz The class to be used to back the GATKRead, either Read.class, or SAMRecord.class.
*/
public ReadsPreprocessingPipelineTestData(Class<?> clazz) {
final int shardRatio = ReferenceShard.REFERENCE_SHARD_SIZE / VariantShard.VARIANT_SHARDSIZE;
readStartLength = Arrays.asList(KV.of(100, 50), KV.of(140, 100),
KV.of(ReferenceShard.REFERENCE_SHARD_SIZE, 10),
KV.of(3*ReferenceShard.REFERENCE_SHARD_SIZE - 1, 10));
reads = Lists.newArrayList(
makeRead("1", readStartLength.get(0), 1, clazz),
makeRead("1", readStartLength.get(1), 2, clazz),
makeRead("1", readStartLength.get(2), 3, clazz),
makeRead("1", readStartLength.get(3), 4, clazz),
makeRead("2", readStartLength.get(2), 5, clazz)
);
kvRefShardiReads = Arrays.asList(
KV.of(new ReferenceShard(0, "1"), Lists.newArrayList(reads.get(1), reads.get(0))),
KV.of(new ReferenceShard(1, "1"), Lists.newArrayList(reads.get(2))),
KV.of(new ReferenceShard(2, "1"), Lists.newArrayList(reads.get(3))),
KV.of(new ReferenceShard(1, "2"), Lists.newArrayList(reads.get(4)))
);
readIntervals = Lists.newArrayList(
makeInterval("1", readStartLength.get(0)),
makeInterval("1", readStartLength.get(1)),
makeInterval("1", readStartLength.get(2)),
makeInterval("1", readStartLength.get(3)),
makeInterval("2", readStartLength.get(2))
);
// The first two reads are mapped onto the same reference shard. The ReferenceBases returned should
// be from the start of the first read [rStartLength.get(0).getKey()] to the end
// the second [rStartLength.get(1).getKey() + rStartLength.get(1).getValue()-1].
SimpleInterval spannedReadInterval =
new SimpleInterval("1", readStartLength.get(0).getKey(), readStartLength.get(1).getKey() + readStartLength.get(1).getValue()-1);
allIntervals = Lists.newArrayList(readIntervals.iterator());
allIntervals.add(spannedReadInterval);
kvRefBasesiReads = Arrays.asList(
KV.of(FakeReferenceSource.bases(spannedReadInterval), Lists.newArrayList(reads.get(1), reads.get(0))),
KV.of(FakeReferenceSource.bases(readIntervals.get(2)), Lists.newArrayList(reads.get(2))),
KV.of(FakeReferenceSource.bases(readIntervals.get(3)), Lists.newArrayList(reads.get(3))),
KV.of(FakeReferenceSource.bases(readIntervals.get(4)), Lists.newArrayList(reads.get(4)))
);
kvReadsRefBases = Arrays.asList(
KV.of(reads.get(0), getBases("1", reads.get(0).getStart(), reads.get(0).getEnd())),
KV.of(reads.get(1), getBases("1", reads.get(1).getStart(), reads.get(1).getEnd())),
KV.of(reads.get(2), getBases("1", reads.get(2).getStart(), reads.get(2).getEnd())),
KV.of(reads.get(3), getBases("1", reads.get(3).getStart(), reads.get(3).getEnd())),
KV.of(reads.get(4), getBases("2", reads.get(4).getStart(), reads.get(4).getEnd()))
);
variants = Lists.newArrayList(
new MinimalVariant(new SimpleInterval("1", 170, 180), true, false),
new MinimalVariant(new SimpleInterval("1", 210, 220), false, true),
new MinimalVariant(new SimpleInterval("1", ReferenceShard.REFERENCE_SHARD_SIZE,
ReferenceShard.REFERENCE_SHARD_SIZE), true, false),
new MinimalVariant(new SimpleInterval("1", 3 * ReferenceShard.REFERENCE_SHARD_SIZE - 2,
3 * ReferenceShard.REFERENCE_SHARD_SIZE + 2), false, true),
new MinimalVariant(new SimpleInterval("2", ReferenceShard.REFERENCE_SHARD_SIZE,
ReferenceShard.REFERENCE_SHARD_SIZE), false, true)
);
kvVariantShardRead = Arrays.asList(
KV.of(new VariantShard(0, "1"), reads.get(0)),
KV.of(new VariantShard(0, "1"), reads.get(1)),
KV.of(new VariantShard(shardRatio, "1"), reads.get(2)),
KV.of(new VariantShard(3 * shardRatio - 1, "1"), reads.get(3)), // The second to last read spans
KV.of(new VariantShard(3 * shardRatio, "1"), reads.get(3)), // two shards.
KV.of(new VariantShard(shardRatio, "2"), reads.get(4))
);
kvVariantShardVariant = Arrays.asList(
KV.of(new VariantShard(0, "1"), variants.get(0)),
KV.of(new VariantShard(0, "1"), variants.get(1)),
KV.of(new VariantShard(shardRatio, "1"), variants.get(2)),
KV.of(new VariantShard(3*shardRatio - 1, "1"), variants.get(3)), // The second to last variant spans
KV.of(new VariantShard(3*shardRatio, "1"), variants.get(3)), // two shards.
KV.of(new VariantShard(shardRatio, "2"), variants.get(4))
);
kvReadVariant = Arrays.asList(
KV.of(reads.get(1), variants.get(0)),
KV.of(reads.get(1), variants.get(1)),
KV.of(reads.get(2), variants.get(2)),
KV.of(reads.get(3), variants.get(3)), // The read and variant span two variant shards, that's
KV.of(reads.get(3), variants.get(3)), // why there are two of them (2,3).
KV.of(reads.get(4), variants.get(4))
);
final KV<GATKRead, GATKVariant> readNullVariant = KV.of(reads.get(0), null);
Iterable<GATKVariant> variant10 = Lists.newArrayList(kvReadVariant.get(1).getValue(), kvReadVariant.get(0).getValue());
Iterable<GATKVariant> variant2 = Lists.newArrayList(kvReadVariant.get(2).getValue());
Iterable<GATKVariant> variant3 = Lists.newArrayList(kvReadVariant.get(3).getValue());
Iterable<GATKVariant> variant4 = Lists.newArrayList(kvReadVariant.get(5).getValue());
Iterable<GATKVariant> nullVariant = Lists.newArrayList(readNullVariant.getValue());
// The dataflow version is currently broken (Issue #795). This is only an issue at this point.
// The bug is effectively masked at the point of the larger transforms.
kvReadiVariantBroken = Arrays.asList(
KV.of(kvReadVariant.get(0).getKey(), variant10),
KV.of(kvReadVariant.get(2).getKey(), variant2),
KV.of(kvReadVariant.get(3).getKey(), variant3),
KV.of(kvReadVariant.get(5).getKey(), variant4)
);
kvReadiVariantFixed = Arrays.asList(
KV.of(kvReadVariant.get(0).getKey(), variant10),
KV.of(kvReadVariant.get(2).getKey(), variant2),
KV.of(kvReadVariant.get(3).getKey(), variant3),
KV.of(kvReadVariant.get(5).getKey(), variant4),
KV.of(reads.get(0), nullVariant)
);
kvReadContextData = Arrays.asList(
KV.of(kvReadsRefBases.get(0).getKey(), new ReadContextData(kvReadsRefBases.get(0).getValue(), Lists.newArrayList())),
KV.of(kvReadsRefBases.get(1).getKey(), new ReadContextData(kvReadsRefBases.get(1).getValue(), kvReadiVariantBroken.get(0).getValue())),
KV.of(kvReadsRefBases.get(2).getKey(), new ReadContextData(kvReadsRefBases.get(2).getValue(), kvReadiVariantBroken.get(1).getValue())),
KV.of(kvReadsRefBases.get(3).getKey(), new ReadContextData(kvReadsRefBases.get(3).getValue(), kvReadiVariantBroken.get(2).getValue())),
KV.of(kvReadsRefBases.get(4).getKey(), new ReadContextData(kvReadsRefBases.get(4).getValue(), kvReadiVariantBroken.get(3).getValue()))
);
}
/**
* makeRead creates a read backed by either SAMRecord or Google model Read.
* @param startLength the key is the start of the read, the value is the length.
* @param i name
* @param clazz either Google model Read or SAMRecord
* @return a new GAKTRead with either a Google model backed or SAMRecord backed read.
*/
public static GATKRead makeRead(String contig, KV<Integer, Integer> startLength, int i, Class<?> clazz) {
return makeRead(contig, startLength.getKey(), startLength.getValue(),i, clazz);
}
/**
* makeRead creates a read backed by either SAMRecord or Google model Read.
* @param start start position of the read
* @param length length of the read
* @param i name
* @param clazz either Google model Read or SAMRecord
* @return a new GAKTRead with either a Google model backed or SAMRecord backed read.
*/
public static GATKRead makeRead(String contig, int start, int length, int i, Class<?> clazz) {
if (clazz == SAMRecord.class) {
return ArtificialReadUtils.createSamBackedRead(Integer.toString(i), contig, start, length);
} else {
throw new GATKException("invalid GATKRead type");
}
}
/**
* Generates a List of artificial reads located in significant positions relative to reference shard
* boundaries. For each reference shard, places a read at the start of the shard, 1 base after the
* start, at the middle of the shard, 1 base before the end, and at the end. Each read has a length of 100.
*
* @param numContigs Generate reads for this many contigs (starting at "1" and increasing numerically)
* @param numShardsPerContig Generate reads for this many reference shards within each contig. Each shard will have 5 reads, as described above.
* @param readImplementation Backing GATKRead implementation to use (SAMRecord.class or Read.class)
* @return a List of artificial reads located in significant positions relative to reference shard boundaries
*/
public static List<GATKRead> makeReferenceShardBoundaryReads( final int numContigs, final int numShardsPerContig, final Class<?> readImplementation ) {
final List<GATKRead> reads = new ArrayList<>();
int id = 0;
for ( int contig = 1; contig <= numContigs; ++contig ) {
for ( int shardNum = 0; shardNum < numShardsPerContig; ++shardNum ) {
// All shards except the first start on a multiple of REFERENCE_SHARD_SIZE (since we can't have a mapped read with an alignment start of 0, the first shard starts at 1)
final int shardStart = ReferenceShard.REFERENCE_SHARD_SIZE * shardNum + (shardNum == 0 ? 1 : 0);
final int shardEnd = ReferenceShard.REFERENCE_SHARD_SIZE * (shardNum + 1) - 1;
final int shardMiddle = shardEnd - (ReferenceShard.REFERENCE_SHARD_SIZE / 2);
for ( int readStart : Arrays.asList(shardStart, shardStart + 1, shardMiddle, shardEnd - 1, shardEnd) ) {
reads.add(makeRead(Integer.toString(contig), readStart, 100, ++id, readImplementation));
}
}
}
return reads;
}
private SimpleInterval makeInterval(String contig, KV<Integer, Integer> startLength) {
return new SimpleInterval(contig, startLength.getKey(), startLength.getKey() + startLength.getValue() - 1);
}
private ReferenceBases getBases(String contig, int start, int end) {
return FakeReferenceSource.bases(new SimpleInterval(contig, start, end));
}
public final List<KV<Integer, Integer>> getReadStartLength() {
return readStartLength;
}
public List<KV<ReferenceShard, Iterable<GATKRead>>> getKvRefShardiReads() {
return kvRefShardiReads;
}
public List<SimpleInterval> getReadIntervals() {
return readIntervals;
}
public List<SimpleInterval> getAllIntervals() {
return allIntervals;
}
public List<KV<ReferenceBases, Iterable<GATKRead>>> getKvRefBasesiReads() {
return kvRefBasesiReads;
}
public List<GATKRead> getReads() {
return reads;
}
public List<KV<GATKRead, ReferenceBases>> getKvReadsRefBases() {
return kvReadsRefBases;
}
/**
* The dataflow version is currently broken (Issue #795).
*/
public List<KV<GATKRead, Iterable<GATKVariant>>> getKvReadiVariantBroken() {
return kvReadiVariantBroken;
}
public List<KV<GATKRead, GATKVariant>> getKvReadVariant() {
return kvReadVariant;
}
public List<GATKVariant> getVariants() {
return variants;
}
public List<KV<GATKRead, ReadContextData>> getKvReadContextData() {
return kvReadContextData;
}
public List<KV<VariantShard, GATKRead>> getKvVariantShardRead() {
return kvVariantShardRead;
}
public List<KV<VariantShard, GATKVariant>> getKvVariantShardVariant() {
return kvVariantShardVariant;
}
@Test
public static void verifyDivisibilityWithRefShard() {
// We want the ratio between the two shard types to be an int so we can use them more easily for testing.
Assert.assertEquals(Math.floorMod(ReferenceShard.REFERENCE_SHARD_SIZE, VariantShard.VARIANT_SHARDSIZE), 0);
}
public List<KV<GATKRead, Iterable<GATKVariant>>> getKvReadiVariantFixed() {
return kvReadiVariantFixed;
}
}
|
bsd-3-clause
|
vkscool/xstream
|
xstream/src/java/com/thoughtworks/xstream/io/ExtendedHierarchicalStreamWriterHelper.java
|
767
|
/*
* Copyright (C) 2006 Joe Walnes.
* Copyright (C) 2006, 2007, 2014 XStream Committers.
* All rights reserved.
*
* The software in this package is published under the terms of the BSD
* style license a copy of which has been included with this distribution in
* the LICENSE.txt file.
*
* Created on 22. June 2006 by Mauro Talevi
*/
package com.thoughtworks.xstream.io;
public class ExtendedHierarchicalStreamWriterHelper {
public static void startNode(final HierarchicalStreamWriter writer, final String name, final Class<?> clazz) {
if (writer instanceof ExtendedHierarchicalStreamWriter) {
((ExtendedHierarchicalStreamWriter)writer).startNode(name, clazz);
} else {
writer.startNode(name);
}
}
}
|
bsd-3-clause
|
js0701/chromium-crosswalk
|
chrome/android/java/src/org/chromium/chrome/browser/enhancedbookmarks/EnhancedBookmarkFilter.java
|
660
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.enhancedbookmarks;
/**
* Possible filters for the enhanced bookmarks.
*/
enum EnhancedBookmarkFilter {
OFFLINE_PAGES("OFFLINE_PAGES");
/**
* An {@link EnhancedBookmarkFilter} can be persisted in URLs. To ensure the
* URLs are consistent, values should remain the same even after the enums
* are renamed.
*/
public final String value;
private EnhancedBookmarkFilter(String value) {
this.value = value;
}
}
|
bsd-3-clause
|