code
stringlengths 3
1.04M
| repo_name
stringlengths 5
109
| path
stringlengths 6
306
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.04M
|
---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.interceptor;
import org.apache.camel.CamelExecutionException;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.AdviceWithRouteBuilder;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.model.RouteDefinition;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
public class AdviceWithLambdaTest extends ContextTestSupport {
@Test
public void testNoAdvised() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testAdvised() throws Exception {
AdviceWithRouteBuilder.adviceWith(context, null, a -> {
a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
// END SNIPPET: e1
@Test
public void testAdvisedNoLog() throws Exception {
AdviceWithRouteBuilder.adviceWith(context, null, false, a -> {
a.weaveByToUri("mock:result").remove();
a.weaveAddLast().transform().constant("Bye World");
});
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(0);
Object out = template.requestBody("direct:start", "Hello World");
assertEquals("Bye World", out);
assertMockEndpointsSatisfied();
}
@Test
public void testAdvisedNoNewRoutesAllowed() throws Exception {
try {
AdviceWithRouteBuilder.adviceWith(context, 0, a -> {
a.from("direct:bar").to("mock:bar");
a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
});
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
// expected
}
}
@Test
public void testAdvisedThrowException() throws Exception {
AdviceWithRouteBuilder.adviceWith(context, "myRoute", a -> {
a.interceptSendToEndpoint("mock:foo").to("mock:advised").throwException(new IllegalArgumentException("Damn"));
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(0);
try {
template.sendBody("direct:start", "Hello World");
fail("Should have thrown exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertEquals("Damn", e.getCause().getMessage());
}
assertMockEndpointsSatisfied();
}
@Test
public void testAdvisedRouteDefinition() throws Exception {
AdviceWithRouteBuilder.adviceWith(context, context.getRouteDefinitions().get(0), a -> {
a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testAdvisedEmptyRouteDefinition() throws Exception {
try {
AdviceWithRouteBuilder.adviceWith(context, new RouteDefinition(), a -> {
a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
});
fail("Should throw exception");
} catch (IllegalArgumentException e) {
// expected
}
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").id("myRoute").to("mock:foo").to("mock:result");
}
};
}
}
| adessaigne/camel | core/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithLambdaTest.java | Java | apache-2.0 | 5,390 |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.launcher3;
import android.content.ContentValues;
import android.content.Context;
import com.android.launcher3.compat.UserHandleCompat;
import java.util.ArrayList;
/**
* Represents a folder containing shortcuts or apps.
*/
public class FolderInfo extends ItemInfo {
public static final int NO_FLAGS = 0x00000000;
/**
* The folder is locked in sorted mode
*/
public static final int FLAG_ITEMS_SORTED = 0x00000001;
/**
* It is a work folder
*/
public static final int FLAG_WORK_FOLDER = 0x00000002;
/**
* The multi-page animation has run for this folder
*/
public static final int FLAG_MULTI_PAGE_ANIMATION = 0x00000004;
/**
* Whether this folder has been opened
*/
public boolean opened;
public int options;
/**
* The apps and shortcuts
*/
public ArrayList<ShortcutInfo> contents = new ArrayList<ShortcutInfo>();
ArrayList<FolderListener> listeners = new ArrayList<FolderListener>();
public FolderInfo() {
itemType = LauncherSettings.Favorites.ITEM_TYPE_FOLDER;
user = UserHandleCompat.myUserHandle();
}
/**
* Add an app or shortcut
*
* @param item
*/
public void add(ShortcutInfo item, boolean animate) {
contents.add(item);
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onAdd(item);
}
itemsChanged(animate);
}
/**
* Remove an app or shortcut. Does not change the DB.
*
* @param item
*/
public void remove(ShortcutInfo item, boolean animate) {
contents.remove(item);
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onRemove(item);
}
itemsChanged(animate);
}
public void setTitle(CharSequence title) {
this.title = title;
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onTitleChanged(title);
}
}
@Override
void onAddToDatabase(Context context, ContentValues values) {
super.onAddToDatabase(context, values);
values.put(LauncherSettings.Favorites.TITLE, title.toString());
values.put(LauncherSettings.Favorites.OPTIONS, options);
}
public void addListener(FolderListener listener) {
listeners.add(listener);
}
public void removeListener(FolderListener listener) {
listeners.remove(listener);
}
public void itemsChanged(boolean animate) {
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onItemsChanged(animate);
}
}
public interface FolderListener {
public void onAdd(ShortcutInfo item);
public void onRemove(ShortcutInfo item);
public void onTitleChanged(CharSequence title);
public void onItemsChanged(boolean animate);
}
public boolean hasOption(int optionFlag) {
return (options & optionFlag) != 0;
}
/**
* @param option flag to set or clear
* @param isEnabled whether to set or clear the flag
* @param context if not null, save changes to the db.
*/
public void setOption(int option, boolean isEnabled, Context context) {
int oldOptions = options;
if (isEnabled) {
options |= option;
} else {
options &= ~option;
}
if (context != null && oldOptions != options) {
LauncherModel.updateItemInDatabase(context, this);
}
}
}
| YAJATapps/FlickLauncher | src/com/android/launcher3/FolderInfo.java | Java | apache-2.0 | 4,146 |
/*
* Copyright 2014 BrightTag, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.brighttag.agathon.dao;
import javax.annotation.Nullable;
import com.google.common.collect.ImmutableSet;
import com.brighttag.agathon.model.CassandraInstance;
/**
* DAO for Cassandra Instances.
*
* @author codyaray
* @since 5/12/2012
*/
public interface CassandraInstanceDao {
/**
* Returns the set of Cassandra instances in a ring.
*
* @param ring name of the Cassandra ring
* @return set of Cassandra instances in the ring
* @throws BackingStoreException if there was a problem communicating with the backing store.
*/
ImmutableSet<CassandraInstance> findAll(String ring) throws BackingStoreException;
/**
* Returns the Cassandra instance with the given {@code id} or {@code null} if not found.
*
* @param ring name of the Cassandra ring
* @param id the Cassandra instance ID
* @return the Cassandra instance or {@code null} if not found
* @throws BackingStoreException if there was a problem communicating with the backing store.
*/
@Nullable CassandraInstance findById(String ring, int id) throws BackingStoreException;
/**
* Saves the Cassandra {@code instance}.
*
* @param ring name of the Cassandra ring
* @param instance the Cassandra instance
*/
void save(String ring, CassandraInstance instance);
/**
* Deletes the Cassandra {@code instance}.
*
* @param ring name of the Cassandra ring
* @param instance the Cassandra instance
*/
void delete(String ring, CassandraInstance instance);
}
| BrightTag/agathon | agathon-manager/src/main/java/com/brighttag/agathon/dao/CassandraInstanceDao.java | Java | apache-2.0 | 2,133 |
/*
* MainActivity.java
*
* Copyright (C) 2013 6 Wunderkinder GmbH.
*
* @author Jose L Ugia - @Jl_Ugia
* @author Antonio Consuegra - @aconsuegra
* @author Cesar Valiente - @CesarValiente
* @author Benedikt Lehnert - @blehnert
* @author Timothy Achumba - @iam_timm
* @version 1.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.wunderlist.slidinglayersample;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.SharedPreferences;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.KeyEvent;
import android.view.MenuItem;
import android.view.View;
import android.widget.RelativeLayout.LayoutParams;
import android.widget.TextView;
import com.wunderlist.slidinglayer.LayerTransformer;
import com.wunderlist.slidinglayer.SlidingLayer;
import com.wunderlist.slidinglayer.transformer.AlphaTransformer;
import com.wunderlist.slidinglayer.transformer.RotationTransformer;
import com.wunderlist.slidinglayer.transformer.SlideJoyTransformer;
public class MainActivity extends Activity {
private SlidingLayer mSlidingLayer;
private TextView swipeText;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
bindViews();
initState();
}
@SuppressLint("NewApi")
@Override
protected void onResume() {
super.onResume();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
getActionBar().setDisplayHomeAsUpEnabled(true);
}
}
/**
* View binding
*/
private void bindViews() {
mSlidingLayer = (SlidingLayer) findViewById(R.id.slidingLayer1);
swipeText = (TextView) findViewById(R.id.swipeText);
}
/**
* Initializes the origin state of the layer
*/
private void initState() {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
setupSlidingLayerPosition(prefs.getString("layer_location", "right"));
setupSlidingLayerTransform(prefs.getString("layer_transform", "none"));
setupShadow(prefs.getBoolean("layer_has_shadow", false));
setupLayerOffset(prefs.getBoolean("layer_has_offset", false));
setupPreviewMode(prefs.getBoolean("preview_mode_enabled", false));
}
private void setupSlidingLayerPosition(String layerPosition) {
LayoutParams rlp = (LayoutParams) mSlidingLayer.getLayoutParams();
int textResource;
Drawable d;
switch (layerPosition) {
case "right":
textResource = R.string.swipe_right_label;
d = getResources().getDrawable(R.drawable.container_rocket_right);
mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_RIGHT);
break;
case "left":
textResource = R.string.swipe_left_label;
d = getResources().getDrawable(R.drawable.container_rocket_left);
mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_LEFT);
break;
case "top":
textResource = R.string.swipe_up_label;
d = getResources().getDrawable(R.drawable.container_rocket);
mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_TOP);
rlp.width = LayoutParams.MATCH_PARENT;
rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size);
break;
default:
textResource = R.string.swipe_down_label;
d = getResources().getDrawable(R.drawable.container_rocket);
mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_BOTTOM);
rlp.width = LayoutParams.MATCH_PARENT;
rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size);
}
d.setBounds(0, 0, d.getIntrinsicWidth(), d.getIntrinsicHeight());
swipeText.setCompoundDrawables(null, d, null, null);
swipeText.setText(getResources().getString(textResource));
mSlidingLayer.setLayoutParams(rlp);
}
private void setupSlidingLayerTransform(String layerTransform) {
LayerTransformer transformer;
switch (layerTransform) {
case "alpha":
transformer = new AlphaTransformer();
break;
case "rotation":
transformer = new RotationTransformer();
break;
case "slide":
transformer = new SlideJoyTransformer();
break;
default:
return;
}
mSlidingLayer.setLayerTransformer(transformer);
}
private void setupShadow(boolean enabled) {
if (enabled) {
mSlidingLayer.setShadowSizeRes(R.dimen.shadow_size);
mSlidingLayer.setShadowDrawable(R.drawable.sidebar_shadow);
} else {
mSlidingLayer.setShadowSize(0);
mSlidingLayer.setShadowDrawable(null);
}
}
private void setupLayerOffset(boolean enabled) {
int offsetDistance = enabled ? getResources().getDimensionPixelOffset(R.dimen.offset_distance) : 0;
mSlidingLayer.setOffsetDistance(offsetDistance);
}
private void setupPreviewMode(boolean enabled) {
int previewOffset = enabled ? getResources().getDimensionPixelOffset(R.dimen.preview_offset_distance) : -1;
mSlidingLayer.setPreviewOffsetDistance(previewOffset);
}
public void buttonClicked(View v) {
switch (v.getId()) {
case R.id.buttonOpen:
mSlidingLayer.openLayer(true);
break;
case R.id.buttonClose:
mSlidingLayer.closeLayer(true);
break;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_BACK:
if (mSlidingLayer.isOpened()) {
mSlidingLayer.closeLayer(true);
return true;
}
default:
return super.onKeyDown(keyCode, event);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
finish();
return true;
}
}
| yadihaoku/android-sliding-layer-lib | SlidingLayerSample/src/main/java/com/wunderlist/slidinglayersample/MainActivity.java | Java | apache-2.0 | 6,768 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* ClientInfo.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: #axisVersion# #today#
*/
package org.apache.axis2.databinding;
import org.apache.axiom.om.OMFactory;
import org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
/** ClientInfo bean class */
public class ClientInfo
implements org.apache.axis2.databinding.ADBBean {
/* This type was generated from the piece of schema that had
name = ClientInfo
Namespace URI = http://www.wso2.com/types
Namespace Prefix = ns1
*/
public ClientInfo(String localName, String localSsn) {
this.localName = localName;
this.localSsn = localSsn;
}
public ClientInfo() {
}
/** field for Name */
protected java.lang.String localName;
/**
* Auto generated getter method
*
* @return java.lang.String
*/
public java.lang.String getName() {
return localName;
}
/**
* Auto generated setter method
*
* @param param Name
*/
public void setName(java.lang.String param) {
this.localName = param;
}
/** field for Ssn */
protected java.lang.String localSsn;
/**
* Auto generated getter method
*
* @return java.lang.String
*/
public java.lang.String getSsn() {
return localSsn;
}
/**
* Auto generated setter method
*
* @param param Ssn
*/
public void setSsn(java.lang.String param) {
this.localSsn = param;
}
/** databinding method to get an XML representation of this object */
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) {
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types",
"name"));
elementList
.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localName));
elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types",
"ssn"));
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSsn));
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl
(qName, elementList.toArray(), attribList.toArray());
}
public void serialize(final QName parentQName,
final OMFactory factory,
MTOMAwareXMLStreamWriter xmlWriter)
throws XMLStreamException, ADBException {
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final QName parentQName,
final OMFactory factory,
MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws XMLStreamException, ADBException {
throw new UnsupportedOperationException("Un implemented method");
}
/** Factory class that keeps the parse method */
public static class Factory {
/** static method to create the object */
public static ClientInfo parse(javax.xml.stream.XMLStreamReader reader)
throws java.lang.Exception {
ClientInfo object = new ClientInfo();
try {
int event = reader.getEventType();
int count = 0;
int argumentCount = 2;
boolean done = false;
//event better be a START_ELEMENT. if not we should go up to the start element here
while (!reader.isStartElement()) {
event = reader.next();
}
while (!done) {
if (javax.xml.stream.XMLStreamConstants.START_ELEMENT == event) {
if ("name".equals(reader.getLocalName())) {
String content = reader.getElementText();
object.setName(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
content));
count++;
}
if ("ssn".equals(reader.getLocalName())) {
String content = reader.getElementText();
object.setSsn(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
content));
count++;
}
}
if (argumentCount == count) {
done = true;
}
if (!done) {
event = reader.next();
}
}
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| intalio/axis2 | modules/adb/test/org/apache/axis2/databinding/ClientInfo.java | Java | apache-2.0 | 6,169 |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.datavec.api.transform.transform.time;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.datavec.api.transform.ColumnType;
import org.datavec.api.transform.Transform;
import org.datavec.api.transform.metadata.ColumnMetaData;
import org.datavec.api.transform.metadata.IntegerMetaData;
import org.datavec.api.transform.metadata.StringMetaData;
import org.datavec.api.transform.metadata.TimeMetaData;
import org.datavec.api.transform.schema.Schema;
import org.datavec.api.util.jackson.DateTimeFieldTypeDeserializer;
import org.datavec.api.util.jackson.DateTimeFieldTypeSerializer;
import org.datavec.api.writable.IntWritable;
import org.datavec.api.writable.Text;
import org.datavec.api.writable.Writable;
import org.joda.time.DateTime;
import org.joda.time.DateTimeFieldType;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.nd4j.shade.jackson.annotation.JsonIgnore;
import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties;
import org.nd4j.shade.jackson.annotation.JsonInclude;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import org.nd4j.shade.jackson.databind.annotation.JsonDeserialize;
import org.nd4j.shade.jackson.databind.annotation.JsonSerialize;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* Create a number of new columns by deriving their values from a Time column.
* Can be used for example to create new columns with the year, month, day, hour, minute, second etc.
*
* @author Alex Black
*/
@JsonIgnoreProperties({"inputSchema", "insertAfterIdx", "deriveFromIdx"})
@EqualsAndHashCode(exclude = {"inputSchema", "insertAfterIdx", "deriveFromIdx"})
@Data
public class DeriveColumnsFromTimeTransform implements Transform {
private final String columnName;
private final String insertAfter;
private DateTimeZone inputTimeZone;
private final List<DerivedColumn> derivedColumns;
private Schema inputSchema;
private int insertAfterIdx = -1;
private int deriveFromIdx = -1;
private DeriveColumnsFromTimeTransform(Builder builder) {
this.derivedColumns = builder.derivedColumns;
this.columnName = builder.columnName;
this.insertAfter = builder.insertAfter;
}
public DeriveColumnsFromTimeTransform(@JsonProperty("columnName") String columnName,
@JsonProperty("insertAfter") String insertAfter,
@JsonProperty("inputTimeZone") DateTimeZone inputTimeZone,
@JsonProperty("derivedColumns") List<DerivedColumn> derivedColumns) {
this.columnName = columnName;
this.insertAfter = insertAfter;
this.inputTimeZone = inputTimeZone;
this.derivedColumns = derivedColumns;
}
@Override
public Schema transform(Schema inputSchema) {
List<ColumnMetaData> oldMeta = inputSchema.getColumnMetaData();
List<ColumnMetaData> newMeta = new ArrayList<>(oldMeta.size() + derivedColumns.size());
List<String> oldNames = inputSchema.getColumnNames();
for (int i = 0; i < oldMeta.size(); i++) {
String current = oldNames.get(i);
newMeta.add(oldMeta.get(i));
if (insertAfter.equals(current)) {
//Insert the derived columns here
for (DerivedColumn d : derivedColumns) {
switch (d.columnType) {
case String:
newMeta.add(new StringMetaData(d.columnName));
break;
case Integer:
newMeta.add(new IntegerMetaData(d.columnName)); //TODO: ranges... if it's a day, we know it must be 1 to 31, etc...
break;
default:
throw new IllegalStateException("Unexpected column type: " + d.columnType);
}
}
}
}
return inputSchema.newSchema(newMeta);
}
@Override
public void setInputSchema(Schema inputSchema) {
insertAfterIdx = inputSchema.getColumnNames().indexOf(insertAfter);
if (insertAfterIdx == -1) {
throw new IllegalStateException(
"Invalid schema/insert after column: input schema does not contain column \"" + insertAfter
+ "\"");
}
deriveFromIdx = inputSchema.getColumnNames().indexOf(columnName);
if (deriveFromIdx == -1) {
throw new IllegalStateException(
"Invalid source column: input schema does not contain column \"" + columnName + "\"");
}
this.inputSchema = inputSchema;
if (!(inputSchema.getMetaData(columnName) instanceof TimeMetaData))
throw new IllegalStateException("Invalid state: input column \"" + columnName
+ "\" is not a time column. Is: " + inputSchema.getMetaData(columnName));
TimeMetaData meta = (TimeMetaData) inputSchema.getMetaData(columnName);
inputTimeZone = meta.getTimeZone();
}
@Override
public Schema getInputSchema() {
return inputSchema;
}
@Override
public List<Writable> map(List<Writable> writables) {
if (writables.size() != inputSchema.numColumns()) {
throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size()
+ ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns()
+ "). Transform = " + toString());
}
int i = 0;
Writable source = writables.get(deriveFromIdx);
List<Writable> list = new ArrayList<>(writables.size() + derivedColumns.size());
for (Writable w : writables) {
list.add(w);
if (i++ == insertAfterIdx) {
for (DerivedColumn d : derivedColumns) {
switch (d.columnType) {
case String:
list.add(new Text(d.dateTimeFormatter.print(source.toLong())));
break;
case Integer:
DateTime dt = new DateTime(source.toLong(), inputTimeZone);
list.add(new IntWritable(dt.get(d.fieldType)));
break;
default:
throw new IllegalStateException("Unexpected column type: " + d.columnType);
}
}
}
}
return list;
}
@Override
public List<List<Writable>> mapSequence(List<List<Writable>> sequence) {
List<List<Writable>> out = new ArrayList<>(sequence.size());
for (List<Writable> step : sequence) {
out.add(map(step));
}
return out;
}
/**
* Transform an object
* in to another object
*
* @param input the record to transform
* @return the transformed writable
*/
@Override
public Object map(Object input) {
List<Object> ret = new ArrayList<>();
Long l = (Long) input;
for (DerivedColumn d : derivedColumns) {
switch (d.columnType) {
case String:
ret.add(d.dateTimeFormatter.print(l));
break;
case Integer:
DateTime dt = new DateTime(l, inputTimeZone);
ret.add(dt.get(d.fieldType));
break;
default:
throw new IllegalStateException("Unexpected column type: " + d.columnType);
}
}
return ret;
}
/**
* Transform a sequence
*
* @param sequence
*/
@Override
public Object mapSequence(Object sequence) {
List<Long> longs = (List<Long>) sequence;
List<List<Object>> ret = new ArrayList<>();
for (Long l : longs)
ret.add((List<Object>) map(l));
return ret;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("DeriveColumnsFromTimeTransform(timeColumn=\"").append(columnName).append("\",insertAfter=\"")
.append(insertAfter).append("\",derivedColumns=(");
boolean first = true;
for (DerivedColumn d : derivedColumns) {
if (!first)
sb.append(",");
sb.append(d);
first = false;
}
sb.append("))");
return sb.toString();
}
/**
* The output column name
* after the operation has been applied
*
* @return the output column name
*/
@Override
public String outputColumnName() {
return outputColumnNames()[0];
}
/**
* The output column names
* This will often be the same as the input
*
* @return the output column names
*/
@Override
public String[] outputColumnNames() {
String[] ret = new String[derivedColumns.size()];
for (int i = 0; i < ret.length; i++)
ret[i] = derivedColumns.get(i).columnName;
return ret;
}
/**
* Returns column names
* this op is meant to run on
*
* @return
*/
@Override
public String[] columnNames() {
return new String[] {columnName()};
}
/**
* Returns a singular column name
* this op is meant to run on
*
* @return
*/
@Override
public String columnName() {
return columnName;
}
public static class Builder {
private final String columnName;
private String insertAfter;
private final List<DerivedColumn> derivedColumns = new ArrayList<>();
/**
* @param timeColumnName The name of the time column from which to derive the new values
*/
public Builder(String timeColumnName) {
this.columnName = timeColumnName;
this.insertAfter = timeColumnName;
}
/**
* Where should the new columns be inserted?
* By default, they will be inserted after the source column
*
* @param columnName Name of the column to insert the derived columns after
*/
public Builder insertAfter(String columnName) {
this.insertAfter = columnName;
return this;
}
/**
* Add a String column (for example, human readable format), derived from the time
*
* @param columnName Name of the new/derived column
* @param format Joda time format, as per <a href="http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html">http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html</a>
* @param timeZone Timezone to use for formatting
*/
public Builder addStringDerivedColumn(String columnName, String format, DateTimeZone timeZone) {
derivedColumns.add(new DerivedColumn(columnName, ColumnType.String, format, timeZone, null));
return this;
}
/**
* Add an integer derived column - for example, the hour of day, etc. Uses timezone from the time column metadata
*
* @param columnName Name of the column
* @param type Type of field (for example, DateTimeFieldType.hourOfDay() etc)
*/
public Builder addIntegerDerivedColumn(String columnName, DateTimeFieldType type) {
derivedColumns.add(new DerivedColumn(columnName, ColumnType.Integer, null, null, type));
return this;
}
/**
* Create the transform instance
*/
public DeriveColumnsFromTimeTransform build() {
return new DeriveColumnsFromTimeTransform(this);
}
}
@JsonInclude(JsonInclude.Include.NON_NULL)
@EqualsAndHashCode(exclude = "dateTimeFormatter")
@Data
@JsonIgnoreProperties({"dateTimeFormatter"})
public static class DerivedColumn implements Serializable {
private final String columnName;
private final ColumnType columnType;
private final String format;
private final DateTimeZone dateTimeZone;
@JsonSerialize(using = DateTimeFieldTypeSerializer.class)
@JsonDeserialize(using = DateTimeFieldTypeDeserializer.class)
private final DateTimeFieldType fieldType;
private transient DateTimeFormatter dateTimeFormatter;
// public DerivedColumn(String columnName, ColumnType columnType, String format, DateTimeZone dateTimeZone, DateTimeFieldType fieldType) {
public DerivedColumn(@JsonProperty("columnName") String columnName,
@JsonProperty("columnType") ColumnType columnType, @JsonProperty("format") String format,
@JsonProperty("dateTimeZone") DateTimeZone dateTimeZone,
@JsonProperty("fieldType") DateTimeFieldType fieldType) {
this.columnName = columnName;
this.columnType = columnType;
this.format = format;
this.dateTimeZone = dateTimeZone;
this.fieldType = fieldType;
if (format != null)
dateTimeFormatter = DateTimeFormat.forPattern(this.format).withZone(dateTimeZone);
}
@Override
public String toString() {
return "(name=" + columnName + ",type=" + columnType + ",derived=" + (format != null ? format : fieldType)
+ ")";
}
//Custom serialization methods, because Joda Time doesn't allow DateTimeFormatter objects to be serialized :(
private void writeObject(ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
if (format != null)
dateTimeFormatter = DateTimeFormat.forPattern(format).withZone(dateTimeZone);
}
}
}
| deeplearning4j/deeplearning4j | datavec/datavec-api/src/main/java/org/datavec/api/transform/transform/time/DeriveColumnsFromTimeTransform.java | Java | apache-2.0 | 15,108 |
package com.humbinal.ssm.test;
public class User {
private long user_Id;
private String user_name;
private int user_age;
public User() {
}
public long getUser_Id() {
return user_Id;
}
public void setUser_Id(long user_Id) {
this.user_Id = user_Id;
}
public String getUser_name() {
return user_name;
}
public void setUser_name(String user_name) {
this.user_name = user_name;
}
public int getUser_age() {
return user_age;
}
public void setUser_age(int user_age) {
this.user_age = user_age;
}
}
| Humbinal/java-items | hum-web/hum-ssm/src/test/java/com/humbinal/ssm/test/User.java | Java | apache-2.0 | 616 |
// Copyright 2004 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry.vlib.ejb.impl;
import java.rmi.RemoteException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ejb.CreateException;
import javax.ejb.FinderException;
import javax.ejb.RemoveException;
import javax.ejb.SessionBean;
import javax.ejb.SessionContext;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.rmi.PortableRemoteObject;
import javax.sql.DataSource;
import org.apache.tapestry.Tapestry;
import org.apache.tapestry.contrib.ejb.XCreateException;
import org.apache.tapestry.contrib.ejb.XEJBException;
import org.apache.tapestry.contrib.ejb.XRemoveException;
import org.apache.tapestry.contrib.jdbc.IStatement;
import org.apache.tapestry.contrib.jdbc.StatementAssembly;
import org.apache.tapestry.vlib.ejb.Book;
import org.apache.tapestry.vlib.ejb.BorrowException;
import org.apache.tapestry.vlib.ejb.IBook;
import org.apache.tapestry.vlib.ejb.IBookHome;
import org.apache.tapestry.vlib.ejb.IPerson;
import org.apache.tapestry.vlib.ejb.IPersonHome;
import org.apache.tapestry.vlib.ejb.IPublisher;
import org.apache.tapestry.vlib.ejb.IPublisherHome;
import org.apache.tapestry.vlib.ejb.LoginException;
import org.apache.tapestry.vlib.ejb.Person;
import org.apache.tapestry.vlib.ejb.Publisher;
import org.apache.tapestry.vlib.ejb.RegistrationException;
import org.apache.tapestry.vlib.ejb.SortColumn;
import org.apache.tapestry.vlib.ejb.SortOrdering;
/**
* Implementation of the {@link org.apache.tapestry.vlib.ejb.IOperations}
* stateless session bean.
*
* <p>Implenents a number of stateless operations for the front end.
*
* @version $Id$
* @author Howard Lewis Ship
*
**/
public class OperationsBean implements SessionBean
{
private SessionContext _context;
private transient Context _environment;
private transient IBookHome _bookHome;
private transient IPersonHome _personHome;
private transient IPublisherHome _publisherHome;
/**
* Data source, retrieved from the ENC property
* "jdbc/dataSource".
*
**/
private transient DataSource _dataSource;
/**
* Sets up the bean. Locates the {@link DataSource} for the bean
* as <code>jdbc/dataSource</code> within the ENC; this data source is
* later used by {@link #getConnection()}.
*
**/
public void ejbCreate()
{
Context initial;
try
{
initial = new InitialContext();
_environment = (Context) initial.lookup("java:comp/env");
}
catch (NamingException e)
{
throw new XEJBException("Could not lookup environment.", e);
}
try
{
_dataSource = (DataSource) _environment.lookup("jdbc/dataSource");
}
catch (NamingException e)
{
e.printStackTrace();
throw new XEJBException("Could not lookup data source.", e);
}
}
public void ejbRemove()
{
}
/**
* Does nothing, not invoked in stateless session beans.
**/
public void ejbPassivate()
{
}
public void setSessionContext(SessionContext value)
{
_context = value;
}
/**
* Does nothing, not invoked in stateless session beans.
*
**/
public void ejbActivate()
{
}
/**
* Finds the book and borrower (by thier primary keys) and updates the book.
*
* <p>The {@link Book} value object is returned.
*
**/
public Book borrowBook(Integer bookId, Integer borrowerId)
throws FinderException, RemoteException, BorrowException
{
IBookHome bookHome = getBookHome();
IPersonHome personHome = getPersonHome();
IBook book = bookHome.findByPrimaryKey(bookId);
if (!book.getLendable())
throw new BorrowException("Book may not be borrowed.");
// Verify that the borrower exists.
personHome.findByPrimaryKey(borrowerId);
// TBD: Check that borrower has authenticated
// findByPrimaryKey() throws an exception if the EJB doesn't exist,
// so we're safe.
personHome.findByPrimaryKey(book.getOwnerId());
// Here's the real work; just setting the holder of the book
// to be the borrower.
book.setHolderId(borrowerId);
return getBook(bookId);
}
/**
* Adds a new book, verifying that the publisher and holder actually exist.
*
**/
public Integer addBook(Map attributes) throws CreateException, RemoteException
{
IBookHome home = getBookHome();
attributes.put("dateAdded", new Timestamp(System.currentTimeMillis()));
IBook book = home.create(attributes);
return (Integer) book.getPrimaryKey();
}
/**
* Adds a book, which will be owned and held by the specified owner.
*
* <p>The publisherName may either be the name of a known publisher, or
* a new name. A new {@link IPublisher} will be created as necessary.
*
* <p>Returns the newly created book, as a {@link Map} of attributes.
*
**/
public Integer addBook(Map attributes, String publisherName)
throws CreateException, RemoteException
{
IPublisher publisher = null;
IPublisherHome publisherHome = getPublisherHome();
// Find or create the publisher.
try
{
publisher = publisherHome.findByName(publisherName);
}
catch (FinderException e)
{
// Ignore, means that no publisher with the given name already exists.
}
if (publisher == null)
publisher = publisherHome.create(publisherName);
attributes.put("publisherId", publisher.getPrimaryKey());
return addBook(attributes);
}
/**
* Updates a book.
*
* <p>Returns the updated book.
*
* @param bookId The primary key of the book to update.
*
**/
public void updateBook(Integer bookId, Map attributes) throws FinderException, RemoteException
{
IBookHome bookHome = getBookHome();
IBook book = bookHome.findByPrimaryKey(bookId);
book.updateEntityAttributes(attributes);
}
/**
* Updates a book, adding a new Publisher at the same time.
*
*
* @param bookPK The primary key of the book to update.
* @param attributes attributes to change
* @param publisherName The name of the new publisher.
* @throws FinderException if the book, holder or publisher can not be located.
* @throws CreateException if the {@link IPublisher} can not be created.
**/
public void updateBook(Integer bookId, Map attributes, String publisherName)
throws CreateException, FinderException, RemoteException
{
IPublisher publisher = null;
IPublisherHome publisherHome = getPublisherHome();
try
{
publisher = publisherHome.findByName(publisherName);
}
catch (FinderException e)
{
// Ignore, means we need to create the Publisher
}
if (publisher == null)
publisher = publisherHome.create(publisherName);
// Don't duplicate all that other code!
attributes.put("publisherId", publisher.getPrimaryKey());
updateBook(bookId, attributes);
}
public void updatePerson(Integer personId, Map attributes)
throws FinderException, RemoteException
{
IPersonHome home = getPersonHome();
IPerson person = home.findByPrimaryKey(personId);
person.updateEntityAttributes(attributes);
}
public Publisher[] getPublishers()
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
List list = new ArrayList();
try
{
connection = getConnection();
StatementAssembly assembly = new StatementAssembly();
assembly.newLine("SELECT PUBLISHER_ID, NAME");
assembly.newLine("FROM PUBLISHER");
assembly.newLine("ORDER BY NAME");
statement = assembly.createStatement(connection);
set = statement.executeQuery();
while (set.next())
{
Integer primaryKey = (Integer) set.getObject(1);
String name = set.getString(2);
list.add(new Publisher(primaryKey, name));
}
}
catch (SQLException ex)
{
ex.printStackTrace();
throw new XEJBException("Could not fetch all Publishers.", ex);
}
finally
{
close(connection, statement, set);
}
// Convert from List to Publisher[]
return (Publisher[]) list.toArray(new Publisher[list.size()]);
}
/**
* Fetchs all {@link IPerson} beans in the database and converts them
* to {@link Person} objects.
*
* Returns the {@link Person}s sorted by last name, then first.
**/
public Person[] getPersons()
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
List list = new ArrayList();
try
{
connection = getConnection();
StatementAssembly assembly = buildBasePersonQuery();
assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME");
statement = assembly.createStatement(connection);
set = statement.executeQuery();
Object[] columns = new Object[Person.N_COLUMNS];
while (set.next())
{
list.add(convertRowToPerson(set, columns));
}
}
catch (SQLException ex)
{
throw new XEJBException("Could not fetch all Persons.", ex);
}
finally
{
close(connection, statement, set);
}
return (Person[]) list.toArray(new Person[list.size()]);
}
/**
* Gets the {@link Person} for primary key.
*
* @throws FinderException if the Person does not exist.
**/
public Person getPerson(Integer personId) throws FinderException
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
Person result = null;
try
{
connection = getConnection();
StatementAssembly assembly = buildBasePersonQuery();
assembly.newLine("WHERE ");
assembly.add("PERSON_ID = ");
assembly.addParameter(personId);
assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME");
statement = assembly.createStatement(connection);
set = statement.executeQuery();
if (!set.next())
throw new FinderException("Person #" + personId + " does not exist.");
Object[] columns = new Object[Person.N_COLUMNS];
result = convertRowToPerson(set, columns);
}
catch (SQLException ex)
{
throw new XEJBException("Unable to perform database query.", ex);
}
finally
{
close(connection, statement, set);
}
return result;
}
public Person login(String email, String password) throws RemoteException, LoginException
{
IPersonHome home = getPersonHome();
IPerson person = null;
Person result = null;
try
{
person = home.findByEmail(email);
}
catch (FinderException ex)
{
throw new LoginException("Unknown e-mail address.", false);
}
if (!person.getPassword().equals(password))
throw new LoginException("Invalid password.", true);
try
{
result = getPerson((Integer) person.getPrimaryKey());
}
catch (FinderException ex)
{
throw new LoginException("Could not read person.", false);
}
if (result.isLockedOut())
throw new LoginException("You have been locked out of the Virtual Library.", false);
// Set the last access time for any subsequent login.
person.setLastAccess(new Timestamp(System.currentTimeMillis()));
return result;
}
public Map getPersonAttributes(Integer personId) throws FinderException, RemoteException
{
IPersonHome home = getPersonHome();
IPerson person = home.findByPrimaryKey(personId);
return person.getEntityAttributes();
}
/**
* Retrieves a single {@link Book} by its primary key.
*
* @throws FinderException if the Book does not exist.
*
**/
public Book getBook(Integer bookId) throws FinderException
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
Book result = null;
try
{
connection = getConnection();
StatementAssembly assembly = buildBaseBookQuery();
assembly.addSep(" AND ");
assembly.add("book.BOOK_ID = ");
assembly.addParameter(bookId);
statement = assembly.createStatement(connection);
set = statement.executeQuery();
if (!set.next())
throw new FinderException("Book " + bookId + " does not exist.");
Object[] columns = new Object[Book.N_COLUMNS];
result = convertRowToBook(set, columns);
}
catch (SQLException ex)
{
throw new XEJBException("Unable to perform database query.", ex);
}
finally
{
close(connection, statement, set);
}
return result;
}
public Map getBookAttributes(Integer bookId) throws FinderException, RemoteException
{
IBookHome home = getBookHome();
IBook book = home.findByPrimaryKey(bookId);
return book.getEntityAttributes();
}
/**
* Attempts to register a new user, first checking that the
* e-mail and names are unique. Returns the primary key of the
* new {@link IPerson}.
*
**/
public Person registerNewUser(String firstName, String lastName, String email, String password)
throws RegistrationException, CreateException, RemoteException
{
IPersonHome home;
if (password == null || password.trim().length() == 0)
throw new RegistrationException("Must specify a password.");
validateUniquePerson(firstName, lastName, email);
home = getPersonHome();
Map attributes = new HashMap();
attributes.put("lastName", lastName.trim());
attributes.put("firstName", firstName.trim());
attributes.put("email", email.trim());
attributes.put("password", password.trim());
attributes.put("lastAccess", new Timestamp(System.currentTimeMillis()));
IPerson person = home.create(attributes);
Integer personId = (Integer) person.getPrimaryKey();
try
{
return getPerson(personId);
}
catch (FinderException ex)
{
throw new XCreateException("Unable to find newly created Person.", ex);
}
}
public Book deleteBook(Integer bookId) throws RemoveException, RemoteException
{
IBookHome home = getBookHome();
Book result = null;
try
{
result = getBook(bookId);
}
catch (FinderException ex)
{
throw new XRemoveException(ex);
}
home.remove(bookId);
return result;
}
/**
* Transfers a number of books to a new owner.
*
**/
public void transferBooks(Integer newOwnerId, Integer[] bookIds)
throws FinderException, RemoteException
{
if (bookIds == null)
throw new RemoteException("Must supply non-null list of books to transfer.");
if (newOwnerId == null)
throw new RemoteException("Must provide an owner for the books.");
// Verify that the new owner exists.
IPersonHome personHome = getPersonHome();
personHome.findByPrimaryKey(newOwnerId);
// Direct SQL would be more efficient, but this'll probably do.
IBookHome home = getBookHome();
for (int i = 0; i < bookIds.length; i++)
{
IBook book = home.findByPrimaryKey(bookIds[i]);
book.setOwnerId(newOwnerId);
}
}
public void updatePublishers(Publisher[] updated, Integer[] deleted)
throws FinderException, RemoveException, RemoteException
{
IPublisherHome home = getPublisherHome();
if (updated != null)
{
for (int i = 0; i < updated.length; i++)
{
IPublisher publisher = home.findByPrimaryKey(updated[i].getId());
publisher.setName(updated[i].getName());
}
}
if (deleted != null)
{
for (int i = 0; i < deleted.length; i++)
{
home.remove(deleted[i]);
}
}
}
public void updatePersons(
Person[] updated,
Integer[] resetPassword,
String newPassword,
Integer[] deleted,
Integer adminId)
throws FinderException, RemoveException, RemoteException
{
IPersonHome home = getPersonHome();
int count = Tapestry.size(updated);
for (int i = 0; i < count; i++)
{
Person u = updated[i];
IPerson person = home.findByPrimaryKey(u.getId());
person.setAdmin(u.isAdmin());
person.setLockedOut(u.isLockedOut());
}
count = Tapestry.size(resetPassword);
for (int i = 0; i < count; i++)
{
IPerson person = home.findByPrimaryKey(resetPassword[i]);
person.setPassword(newPassword);
}
count = Tapestry.size(deleted);
if (count > 0)
{
returnBooksFromDeletedPersons(deleted);
moveBooksFromDeletedPersons(deleted, adminId);
}
for (int i = 0; i < count; i++)
home.remove(deleted[i]);
}
/**
* Invoked to update all books owned by people about to be deleted, to
* reassign the books holder back to the owner.
*
**/
private void returnBooksFromDeletedPersons(Integer deletedPersonIds[]) throws RemoveException
{
StatementAssembly assembly = new StatementAssembly();
assembly.add("UPDATE BOOK");
assembly.newLine("SET HOLDER_ID = OWNER_ID");
assembly.newLine("WHERE HOLDER_ID IN (");
assembly.addParameterList(deletedPersonIds, ", ");
assembly.add(")");
executeUpdate(assembly);
}
/**
* Invoked to execute a bulk update that moves books to the new admin.
*
**/
private void moveBooksFromDeletedPersons(Integer deletedPersonIds[], Integer adminId)
throws RemoveException
{
StatementAssembly assembly = new StatementAssembly();
assembly.add("UPDATE BOOK");
assembly.newLine("SET OWNER_ID = ");
assembly.addParameter(adminId);
assembly.newLine("WHERE OWNER_ID IN (");
assembly.addParameterList(deletedPersonIds, ", ");
assembly.add(")");
executeUpdate(assembly);
}
private void executeUpdate(StatementAssembly assembly) throws XRemoveException
{
Connection connection = null;
IStatement statement = null;
try
{
connection = getConnection();
statement = assembly.createStatement(connection);
statement.executeUpdate();
statement.close();
statement = null;
connection.close();
connection = null;
}
catch (SQLException ex)
{
throw new XRemoveException(
"Unable to execute " + assembly + ": " + ex.getMessage(),
ex);
}
finally
{
close(connection, statement, null);
}
}
/**
* Translates the next row from the result set into a {@link Book}.
*
* <p>This works with queries generated by {@link #buildBaseBookQuery()}.
*
**/
protected Book convertRowToBook(ResultSet set, Object[] columns) throws SQLException
{
int column = 1;
columns[Book.ID_COLUMN] = set.getObject(column++);
columns[Book.TITLE_COLUMN] = set.getString(column++);
columns[Book.DESCRIPTION_COLUMN] = set.getString(column++);
columns[Book.ISBN_COLUMN] = set.getString(column++);
columns[Book.OWNER_ID_COLUMN] = set.getObject(column++);
columns[Book.OWNER_NAME_COLUMN] =
buildName(set.getString(column++), set.getString(column++));
columns[Book.HOLDER_ID_COLUMN] = set.getObject(column++);
columns[Book.HOLDER_NAME_COLUMN] =
buildName(set.getString(column++), set.getString(column++));
columns[Book.PUBLISHER_ID_COLUMN] = set.getObject(column++);
columns[Book.PUBLISHER_NAME_COLUMN] = set.getString(column++);
columns[Book.AUTHOR_COLUMN] = set.getString(column++);
columns[Book.HIDDEN_COLUMN] = getBoolean(set, column++);
columns[Book.LENDABLE_COLUMN] = getBoolean(set, column++);
columns[Book.DATE_ADDED_COLUMN] = set.getTimestamp(column++);
return new Book(columns);
}
private String buildName(String firstName, String lastName)
{
if (firstName == null)
return lastName;
return firstName + " " + lastName;
}
/**
* All queries must use this exact set of select columns, so that
* {@link #convertRow(ResultSet, Object[])} can build
* the correct {@link Book} from each row.
*
**/
private static final String[] BOOK_SELECT_COLUMNS =
{
"book.BOOK_ID",
"book.TITLE",
"book.DESCRIPTION",
"book.ISBN",
"owner.PERSON_ID",
"owner.FIRST_NAME",
"owner.LAST_NAME",
"holder.PERSON_ID",
"holder.FIRST_NAME",
"holder.LAST_NAME",
"publisher.PUBLISHER_ID",
"publisher.NAME",
"book.AUTHOR",
"book.HIDDEN",
"book.LENDABLE",
"book.DATE_ADDED" };
private static final String[] BOOK_ALIAS_COLUMNS =
{ "BOOK book", "PERSON owner", "PERSON holder", "PUBLISHER publisher" };
private static final String[] BOOK_JOINS =
{
"book.OWNER_ID = owner.PERSON_ID",
"book.HOLDER_ID = holder.PERSON_ID",
"book.PUBLISHER_ID = publisher.PUBLISHER_ID" };
private static final Map BOOK_SORT_ASCENDING = new HashMap();
private static final Map BOOK_SORT_DESCENDING = new HashMap();
static {
BOOK_SORT_ASCENDING.put(SortColumn.TITLE, "book.TITLE");
BOOK_SORT_ASCENDING.put(SortColumn.HOLDER, "holder.LAST_NAME, holder.FIRST_NAME");
BOOK_SORT_ASCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME, owner.LAST_NAME");
BOOK_SORT_ASCENDING.put(SortColumn.PUBLISHER, "publisher.NAME");
BOOK_SORT_ASCENDING.put(SortColumn.AUTHOR, "book.AUTHOR");
BOOK_SORT_DESCENDING.put(SortColumn.TITLE, "book.TITLE DESC");
BOOK_SORT_DESCENDING.put(
SortColumn.HOLDER,
"holder.LAST_NAME DESC, holder.FIRST_NAME DESC");
BOOK_SORT_DESCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME DESC, owner.LAST_NAME DESC");
BOOK_SORT_DESCENDING.put(SortColumn.PUBLISHER, "publisher.NAME DESC");
BOOK_SORT_DESCENDING.put(SortColumn.AUTHOR, "book.AUTHOR DESC");
}
protected StatementAssembly buildBaseBookQuery()
{
StatementAssembly result = new StatementAssembly();
result.newLine("SELECT ");
result.addList(BOOK_SELECT_COLUMNS, ", ");
result.newLine("FROM ");
result.addList(BOOK_ALIAS_COLUMNS, ", ");
result.newLine("WHERE ");
result.addList(BOOK_JOINS, " AND ");
return result;
}
/**
* Adds a sort ordering clause to the statement. If ordering is null,
* orders by book title.
*
* @param assembly to update
* @param ordering defines the column to sort on, and the order (ascending or descending)
* @since 3.0
*
*
**/
protected void addSortOrdering(StatementAssembly assembly, SortOrdering ordering)
{
if (ordering == null)
{
assembly.newLine("ORDER BY book.TITLE");
return;
}
Map sorts = ordering.isDescending() ? BOOK_SORT_DESCENDING : BOOK_SORT_ASCENDING;
String term = (String) sorts.get(ordering.getColumn());
assembly.newLine("ORDER BY ");
assembly.add(term);
}
protected void addSubstringSearch(StatementAssembly assembly, String column, String value)
{
if (value == null)
return;
String trimmed = value.trim();
if (trimmed.length() == 0)
return;
// Here's the McKoi dependency: LOWER() is a database-specific
// SQL function.
assembly.addSep(" AND LOWER(");
assembly.add(column);
assembly.add(") LIKE");
assembly.addParameter("%" + trimmed.toLowerCase() + "%");
}
/**
* Closes the resultSet (if not null), then the statement (if not null),
* then the Connection (if not null). Exceptions are written to System.out.
*
**/
protected void close(Connection connection, IStatement statement, ResultSet resultSet)
{
if (resultSet != null)
{
try
{
resultSet.close();
}
catch (SQLException ex)
{
System.out.println("Exception closing result set.");
ex.printStackTrace();
}
}
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException ex)
{
System.out.println("Exception closing statement.");
ex.printStackTrace();
}
}
if (connection != null)
{
try
{
connection.close();
}
catch (SQLException ex)
{
System.out.println("Exception closing connection.");
ex.printStackTrace();
}
}
}
private IPersonHome getPersonHome()
{
if (_personHome == null)
{
try
{
Object raw = _environment.lookup("ejb/Person");
_personHome = (IPersonHome) PortableRemoteObject.narrow(raw, IPersonHome.class);
}
catch (NamingException ex)
{
throw new XEJBException("Could not lookup Person home interface.", ex);
}
}
return _personHome;
}
private IPublisherHome getPublisherHome()
{
if (_publisherHome == null)
{
try
{
Object raw = _environment.lookup("ejb/Publisher");
_publisherHome =
(IPublisherHome) PortableRemoteObject.narrow(raw, IPublisherHome.class);
}
catch (NamingException e)
{
throw new XEJBException("Could not lookup Publisher home interface.", e);
}
}
return _publisherHome;
}
private IBookHome getBookHome()
{
if (_bookHome == null)
{
try
{
Object raw = _environment.lookup("ejb/Book");
_bookHome = (IBookHome) PortableRemoteObject.narrow(raw, IBookHome.class);
}
catch (NamingException e)
{
throw new XEJBException("Could not lookup Book home interface.", e);
}
}
return _bookHome;
}
/**
* Gets a new connection from the data source.
*
**/
protected Connection getConnection()
{
try
{
return _dataSource.getConnection();
}
catch (SQLException e)
{
throw new XEJBException("Unable to get database connection from pool.", e);
}
}
protected StatementAssembly buildBasePersonQuery()
{
StatementAssembly result;
result = new StatementAssembly();
result.newLine("SELECT PERSON_ID, FIRST_NAME, LAST_NAME, EMAIL, ");
result.newLine(" LOCKED_OUT, ADMIN, LAST_ACCESS");
result.newLine("FROM PERSON");
return result;
}
/**
* Translates the next row from the result set into a {@link Person}.
*
* <p>This works with queries generated by {@link #buildBasePersonQuery()}.
*
**/
protected Person convertRowToPerson(ResultSet set, Object[] columns) throws SQLException
{
int column = 1;
columns[Person.ID_COLUMN] = set.getObject(column++);
columns[Person.FIRST_NAME_COLUMN] = set.getString(column++);
columns[Person.LAST_NAME_COLUMN] = set.getString(column++);
columns[Person.EMAIL_COLUMN] = set.getString(column++);
columns[Person.LOCKED_OUT_COLUMN] = getBoolean(set, column++);
columns[Person.ADMIN_COLUMN] = getBoolean(set, column++);
columns[Person.LAST_ACCESS_COLUMN] = set.getTimestamp(column++);
return new Person(columns);
}
private Boolean getBoolean(ResultSet set, int index) throws SQLException
{
return set.getBoolean(index) ? Boolean.TRUE : Boolean.FALSE;
}
private void validateUniquePerson(String firstName, String lastName, String email)
throws RegistrationException
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
String trimmedEmail = email.trim().toLowerCase();
String trimmedLastName = lastName.trim().toLowerCase();
String trimmedFirstName = firstName.trim().toLowerCase();
try
{
connection = getConnection();
StatementAssembly assembly = new StatementAssembly();
assembly.newLine("SELECT PERSON_ID");
assembly.newLine("FROM PERSON");
assembly.newLine("WHERE ");
assembly.add("LOWER(EMAIL) = ");
assembly.addParameter(trimmedEmail);
statement = assembly.createStatement(connection);
set = statement.executeQuery();
if (set.next())
throw new RegistrationException("Email address is already in use by another user.");
close(null, statement, set);
assembly = new StatementAssembly();
assembly.newLine("SELECT PERSON_ID");
assembly.newLine("FROM PERSON");
assembly.newLine("WHERE ");
assembly.add("LOWER(FIRST_NAME) = ");
assembly.addParameter(trimmedFirstName);
assembly.addSep(" AND ");
assembly.add("LOWER(LAST_NAME) = ");
assembly.addParameter(trimmedLastName);
statement = assembly.createStatement(connection);
set = statement.executeQuery();
if (set.next())
throw new RegistrationException("Name provided is already in use by another user.");
}
catch (SQLException e)
{
throw new RegistrationException("Could not access database: " + e.getMessage(), e);
}
finally
{
close(connection, statement, set);
}
}
public Book returnBook(Integer bookId) throws RemoteException, FinderException
{
IBookHome bookHome = getBookHome();
IBook book = bookHome.findByPrimaryKey(bookId);
Integer ownerPK = book.getOwnerId();
book.setHolderId(ownerPK);
return getBook(bookId);
}
} | apache/tapestry3 | tapestry-examples/VlibBeans/src/org/apache/tapestry/vlib/ejb/impl/OperationsBean.java | Java | apache-2.0 | 33,019 |
/*
* Copyright © 2014 - 2018 Leipzig University (Database Research Group)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Contains implementations graph pattern matching on a single input graph.
*/
package org.gradoop.flink.model.impl.operators.matching.transactional.function;
| niklasteichmann/gradoop | gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/matching/transactional/function/package-info.java | Java | apache-2.0 | 802 |
package org.sakaiproject.scorm.ui.player.behaviors;
import org.adl.api.ecmascript.SCORM13APIInterface;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.scorm.model.api.ScoBean;
import org.sakaiproject.scorm.model.api.SessionBean;
import org.sakaiproject.scorm.navigation.INavigable;
import org.sakaiproject.scorm.navigation.INavigationEvent;
import org.sakaiproject.scorm.service.api.ScormApplicationService;
import org.sakaiproject.scorm.service.api.ScormSequencingService;
public abstract class SCORM13API implements SCORM13APIInterface {
private static Log log = LogFactory.getLog(SCORM13API.class);
// String value of FALSE for JavaScript returns.
protected static final String STRING_FALSE = "false";
// String value of TRUE for JavaScript returns.
protected static final String STRING_TRUE = "true";
public abstract SessionBean getSessionBean();
public abstract ScormApplicationService getApplicationService();
public abstract ScormSequencingService getSequencingService();
public abstract ScoBean getScoBean();
public abstract INavigable getAgent();
public abstract Object getTarget();
// Implementation of SCORM13APIInterface
public String Commit(String parameter) {
// TODO: Disable UI controls -- or throttle them on server -- don't mess with js
// Assume failure
String result = STRING_FALSE;
if (null == getSessionBean()) {
log.error("Null run state!");
}
if (getApplicationService().commit(parameter, getSessionBean(), getScoBean()))
result = STRING_TRUE;
// TODO: Enable UI controls
return result;
}
public String GetDiagnostic(String errorCode) {
return getApplicationService().getDiagnostic(errorCode, getSessionBean());
}
public String GetErrorString(String errorCode) {
return getApplicationService().getErrorString(errorCode, getSessionBean());
}
public String GetLastError() {
return getApplicationService().getLastError(getSessionBean());
}
public String GetValue(String parameter) {
return getApplicationService().getValue(parameter, getSessionBean(), getScoBean());
}
public String Initialize(String parameter) {
// Assume failure
String result = STRING_FALSE;
if (getApplicationService().initialize(parameter, getSessionBean(), getScoBean()))
result = STRING_TRUE;
return result;
}
public String SetValue(String dataModelElement, String value) {
// Assume failure
String result = STRING_FALSE;
if (getApplicationService().setValue(dataModelElement, value, getSessionBean(), getScoBean())) {
result = STRING_TRUE;
}
return result;
}
public String Terminate(String parameter) {
// Assume failure
String result = STRING_FALSE;
if (null == getSessionBean()) {
log.error("Null run state!");
return result;
}
INavigationEvent navigationEvent = getApplicationService().newNavigationEvent();
boolean isSuccessful = getApplicationService().terminate(parameter, navigationEvent,
getSessionBean(), getScoBean());
if (isSuccessful) {
result = STRING_TRUE;
if (navigationEvent.isChoiceEvent()) {
getSequencingService().navigate(navigationEvent.getChoiceEvent(), getSessionBean(), getAgent(), getTarget());
} else {
getSequencingService().navigate(navigationEvent.getEvent(), getSessionBean(), getAgent(), getTarget());
}
}
return result;
}
}
| marktriggs/nyu-sakai-10.4 | scorm/scorm-tool/src/java/org/sakaiproject/scorm/ui/player/behaviors/SCORM13API.java | Java | apache-2.0 | 3,425 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import junit.framework.TestCase;
/**
* @version
*/
public class CaseInsensitiveMapTest extends TestCase {
public void testLookupCaseAgnostic() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals("cheese", map.get("FOO"));
}
public void testLookupCaseAgnosticAddHeader() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals("cheese", map.get("FOO"));
assertNull(map.get("unknown"));
map.put("bar", "beer");
assertEquals("beer", map.get("bar"));
assertEquals("beer", map.get("Bar"));
assertEquals("beer", map.get("BAR"));
assertNull(map.get("unknown"));
}
public void testLookupCaseAgnosticAddHeader2() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertNull(map.get("unknown"));
map.put("bar", "beer");
assertEquals("beer", map.get("BAR"));
assertEquals("beer", map.get("bar"));
assertEquals("beer", map.get("Bar"));
assertNull(map.get("unknown"));
}
public void testLookupCaseAgnosticAddHeaderRemoveHeader() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals("cheese", map.get("FOO"));
assertNull(map.get("unknown"));
map.put("bar", "beer");
assertEquals("beer", map.get("bar"));
assertEquals("beer", map.get("Bar"));
assertEquals("beer", map.get("BAR"));
assertNull(map.get("unknown"));
map.remove("bar");
assertNull(map.get("bar"));
assertNull(map.get("unknown"));
}
public void testSetWithDifferentCase() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
map.put("Foo", "bar");
assertEquals("bar", map.get("FOO"));
assertEquals("bar", map.get("foo"));
assertEquals("bar", map.get("Foo"));
}
public void testRemoveWithDifferentCase() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
map.put("Foo", "bar");
assertEquals("bar", map.get("FOO"));
assertEquals("bar", map.get("foo"));
assertEquals("bar", map.get("Foo"));
map.remove("FOO");
assertEquals(null, map.get("foo"));
assertEquals(null, map.get("Foo"));
assertEquals(null, map.get("FOO"));
assertTrue(map.isEmpty());
}
public void testPutAll() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
Map<String, Object> other = new CaseInsensitiveMap();
other.put("Foo", "cheese");
other.put("bar", 123);
map.putAll(other);
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals(123, map.get("BAR"));
assertEquals(123, map.get("bar"));
assertEquals(123, map.get("BaR"));
// key case should be preserved
Map<String, Object> keys = new HashMap<String, Object>();
keys.putAll(map);
assertEquals("cheese", keys.get("Foo"));
assertNull(keys.get("foo"));
assertNull(keys.get("FOO"));
assertEquals(123, keys.get("bar"));
assertNull(keys.get("Bar"));
assertNull(keys.get("BAR"));
}
public void testPutAllOther() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
Map<String, Object> other = new HashMap<String, Object>();
other.put("Foo", "cheese");
other.put("bar", 123);
map.putAll(other);
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals(123, map.get("BAR"));
assertEquals(123, map.get("bar"));
assertEquals(123, map.get("BaR"));
}
public void testPutAllEmpty() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("foo", "cheese");
Map<String, Object> other = new HashMap<String, Object>();
map.putAll(other);
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals(1, map.size());
}
public void testConstructFromOther() {
Map<String, Object> other = new HashMap<String, Object>();
other.put("Foo", "cheese");
other.put("bar", 123);
Map<String, Object> map = new CaseInsensitiveMap(other);
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals(123, map.get("BAR"));
assertEquals(123, map.get("bar"));
assertEquals(123, map.get("BaR"));
}
public void testKeySet() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", 123);
map.put("baZ", "beer");
Set keys = map.keySet();
// we should be able to lookup no matter what case
assertTrue(keys.contains("Foo"));
assertTrue(keys.contains("foo"));
assertTrue(keys.contains("FOO"));
assertTrue(keys.contains("BAR"));
assertTrue(keys.contains("bar"));
assertTrue(keys.contains("Bar"));
assertTrue(keys.contains("baZ"));
assertTrue(keys.contains("baz"));
assertTrue(keys.contains("Baz"));
assertTrue(keys.contains("BAZ"));
}
public void testRetainKeysCopyToAnotherMap() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", 123);
map.put("baZ", "beer");
Map<String, Object> other = new HashMap<String, Object>(map);
// we should retain the cases of the original keys
// when its copied to another map
assertTrue(other.containsKey("Foo"));
assertFalse(other.containsKey("foo"));
assertFalse(other.containsKey("FOO"));
assertTrue(other.containsKey("BAR"));
assertFalse(other.containsKey("bar"));
assertFalse(other.containsKey("Bar"));
assertTrue(other.containsKey("baZ"));
assertFalse(other.containsKey("baz"));
assertFalse(other.containsKey("Baz"));
assertFalse(other.containsKey("BAZ"));
}
public void testValues() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "123");
map.put("baZ", "Beer");
Iterator it = map.values().iterator();
// should be String values
assertEquals("String", it.next().getClass().getSimpleName());
assertEquals("String", it.next().getClass().getSimpleName());
assertEquals("String", it.next().getClass().getSimpleName());
Collection values = map.values();
assertEquals(3, values.size());
assertTrue(values.contains("cheese"));
assertTrue(values.contains("123"));
assertTrue(values.contains("Beer"));
}
public void testRomeks() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("foo", "cheese");
assertEquals(1, map.size());
assertEquals("cheese", map.get("fOo"));
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("FOO"));
assertEquals(true, map.keySet().contains("FOO"));
map.put("FOO", "cake");
assertEquals(1, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("FOO"));
assertEquals("cake", map.get("fOo"));
}
public void testRomeksUsingRegularHashMap() {
Map<String, Object> map = new HashMap<String, Object>();
map.put("foo", "cheese");
assertEquals(1, map.size());
assertEquals(null, map.get("fOo"));
assertEquals(true, map.containsKey("foo"));
assertEquals(false, map.containsKey("FOO"));
assertEquals(false, map.keySet().contains("FOO"));
map.put("FOO", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("FOO"));
assertEquals(null, map.get("fOo"));
assertEquals("cheese", map.get("foo"));
assertEquals("cake", map.get("FOO"));
}
public void testRomeksTransferredToHashMapAfterwards() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("FOO", "cake");
assertEquals(1, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("FOO"));
Map<String, Object> other = new HashMap<String, Object>(map);
assertEquals(false, other.containsKey("foo"));
assertEquals(true, other.containsKey("FOO"));
assertEquals(1, other.size());
}
public void testSerialization() throws Exception {
CaseInsensitiveMap testMap = new CaseInsensitiveMap();
testMap.put("key", "value");
// force entry set to be created which could cause the map to be non serializable
testMap.entrySet();
ByteArrayOutputStream bStream = new ByteArrayOutputStream();
ObjectOutputStream objStream = new ObjectOutputStream(bStream);
objStream.writeObject(testMap);
ObjectInputStream inStream = new ObjectInputStream(new ByteArrayInputStream(bStream.toByteArray()));
CaseInsensitiveMap testMapCopy = (CaseInsensitiveMap) inStream.readObject();
assertTrue(testMapCopy.containsKey("key"));
}
public void testCopyToAnotherMapPreserveKeyCaseEntrySet() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("bar"));
Map<String, Object> other = new HashMap<String, Object>();
for (Map.Entry<String, Object> entry : map.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
other.put(key, value);
}
assertEquals(false, other.containsKey("foo"));
assertEquals(true, other.containsKey("Foo"));
assertEquals(false, other.containsKey("bar"));
assertEquals(true, other.containsKey("BAR"));
assertEquals(2, other.size());
}
public void testCopyToAnotherMapPreserveKeyCasePutAll() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("bar"));
Map<String, Object> other = new HashMap<String, Object>();
other.putAll(map);
assertEquals(false, other.containsKey("foo"));
assertEquals(true, other.containsKey("Foo"));
assertEquals(false, other.containsKey("bar"));
assertEquals(true, other.containsKey("BAR"));
assertEquals(2, other.size());
}
public void testCopyToAnotherMapPreserveKeyCaseCtr() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("bar"));
Map<String, Object> other = new HashMap<String, Object>(map);
assertEquals(false, other.containsKey("foo"));
assertEquals(true, other.containsKey("Foo"));
assertEquals(false, other.containsKey("bar"));
assertEquals(true, other.containsKey("BAR"));
assertEquals(2, other.size());
}
public void testCopyToAnotherMapPreserveKeyKeySet() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("bar"));
Map<String, Object> other = new HashMap<String, Object>();
// this is wrong!!! you should use entrySet
for (String key : map.keySet()) {
Object value = map.get(key);
other.put(key, value);
}
// now the keys will be in lower case
assertEquals(true, other.containsKey("foo"));
assertEquals(false, other.containsKey("Foo"));
assertEquals(true, other.containsKey("bar"));
assertEquals(false, other.containsKey("BAR"));
assertEquals(2, other.size());
}
public void testConcurrent() throws Exception {
ExecutorService service = Executors.newFixedThreadPool(5);
final CountDownLatch latch = new CountDownLatch(1000);
final Map<String, Object> map = new CaseInsensitiveMap();
// do some stuff concurrently
for (int i = 0; i < 1000; i++) {
final int count = i;
service.submit(new Runnable() {
public void run() {
Map<String, Object> foo = new CaseInsensitiveMap();
foo.put("counter" + count, count);
foo.put("foo", 123);
foo.put("bar", 456);
foo.put("cake", "cheese");
// copy foo to map as map is a shared resource
map.putAll(foo);
latch.countDown();
}
});
}
latch.await(10, TimeUnit.SECONDS);
assertEquals(1003, map.size());
assertEquals(true, map.containsKey("counter0"));
assertEquals(true, map.containsKey("counter500"));
assertEquals(true, map.containsKey("counter999"));
assertEquals(123, map.get("FOO"));
assertEquals(456, map.get("Bar"));
assertEquals("cheese", map.get("cAKe"));
service.shutdownNow();
}
public void testCopyMapWithCamelHeadersTest() throws Exception {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("CamelA", "A");
map.put("CamelB", "B");
map.put("CamelC", "C");
// retain maps so we can profile that the map doesn't duplicate
// camel keys as they are intern
List<Map> maps = new ArrayList<Map>();
for (int i = 0; i < 10000; i++) {
Map<String, Object> copy = new CaseInsensitiveMap(map);
assertEquals(3, copy.size());
assertEquals("A", copy.get("CamelA"));
assertEquals("B", copy.get("CamelB"));
assertEquals("C", copy.get("CamelC"));
maps.add(copy);
}
assertEquals(10000, maps.size());
assertEquals(3, map.size());
assertEquals("A", map.get("CamelA"));
assertEquals("B", map.get("CamelB"));
assertEquals("C", map.get("CamelC"));
// use a memory profiler to see memory allocation
// often you may want to give it time to run so you
// have chance to capture memory snapshot in profiler
// Thread.sleep(9999999);
}
} | chicagozer/rheosoft | camel-core/src/test/java/org/apache/camel/util/CaseInsensitiveMapTest.java | Java | apache-2.0 | 17,444 |
/*
* Copyright DbMaintain.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dbmaintain.script.parser.impl;
import org.dbmaintain.script.parser.ScriptParser;
import org.dbmaintain.script.parser.parsingstate.ParsingState;
import org.dbmaintain.util.DbMaintainException;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.Properties;
/**
* A class for parsing statements out of sql scripts.
* <p/>
* All statements should be separated with a semicolon (;). The last statement will be
* added even if it does not end with a semicolon. The semicolons will not be included in the returned statements.
* <p/>
* This parser also takes quoted literals, double quoted text and in-line (--comment) and block (/ * comment * /)
* into account when parsing the statements.
*
* @author Tim Ducheyne
* @author Filip Neven
* @author Stefan Bangels
*/
public class DefaultScriptParser implements ScriptParser {
/**
* The reader for the script content stream
*/
protected Reader scriptReader;
/**
* Whether backslash escaping is enabled
*/
protected boolean backSlashEscapingEnabled;
/**
* Parameters that must be replaced in the script. Null if there are no such parameters
*/
protected Properties scriptParameters;
/**
* The starting state
*/
protected ParsingState initialParsingState;
/**
* True if the script has ended
*/
protected boolean endOfScriptReached = false;
/**
* The current parsed character
*/
protected Character currentChar, nextChar;
/**
* Constructor for DefaultScriptParser.
*
* @param scriptReader the reader that will provide the script content, not null
* @param initialParsingState the inial state when starting to parse a script, not null
* @param backSlashEscapingEnabled true if backslash escaping is enabled
* @param scriptParameters parameters that must be replaced in the script. null if there are no such parameters.
*/
public DefaultScriptParser(Reader scriptReader, ParsingState initialParsingState, boolean backSlashEscapingEnabled,
Properties scriptParameters) {
this.scriptReader = scriptReader;
this.backSlashEscapingEnabled = backSlashEscapingEnabled;
this.initialParsingState = initialParsingState;
this.scriptParameters = scriptParameters;
this.scriptReader = new BufferedReader(scriptReader);
}
/**
* Parses the next statement out of the given script stream.
*
* @return the statements, null if no more statements
*/
public String getNextStatement() {
try {
return getNextStatementImpl();
} catch (IOException e) {
throw new DbMaintainException("Unable to parse next statement from script.", e);
}
}
/**
* Actual implementation of getNextStatement.
*
* @return the statements, null if no more statements
* @throws IOException if a problem occurs reading the script from the file system
*/
protected String getNextStatementImpl() throws IOException {
StatementBuilder statementBuilder = createStatementBuilder();
// Make sure that we read currentChar when we start reading a new script. If not null, currentChar was already
// set to the first character of the next statement when we read the previous statement.
if (currentChar == null) {
currentChar = readNextCharacter();
}
while (!endOfScriptReached) {
if (currentChar == null) {
endOfScriptReached = true;
}
nextChar = readNextCharacter();
statementBuilder.addCharacter(currentChar, nextChar);
currentChar = nextChar;
if (statementBuilder.isComplete()) {
if (statementBuilder.hasExecutableContent()) {
return statementBuilder.buildStatement();
}
statementBuilder = createStatementBuilder();
}
}
if (!statementBuilder.isComplete() && statementBuilder.hasExecutableContent()) {
throw new DbMaintainException("Last statement in script was not ended correctly.");
}
return null;
}
protected Character readNextCharacter() throws IOException {
int charAsInt = scriptReader.read();
return charAsInt == -1 ? null : (char) charAsInt;
}
/**
* Factory method for the statement builder.
*
* @return The statement builder, not null
*/
protected StatementBuilder createStatementBuilder() {
return new StatementBuilder(initialParsingState, scriptParameters);
}
}
| fcamblor/dbmaintain-maven-plugin | dbmaintain/src/main/java/org/dbmaintain/script/parser/impl/DefaultScriptParser.java | Java | apache-2.0 | 5,339 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.platform;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteAtomicSequence;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cluster.BaselineNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.configuration.PlatformConfiguration;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.binary.BinaryRawReaderEx;
import org.apache.ignite.internal.binary.BinaryRawWriterEx;
import org.apache.ignite.internal.cluster.DetachedClusterNode;
import org.apache.ignite.internal.logger.platform.PlatformLogger;
import org.apache.ignite.internal.processors.GridProcessorAdapter;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
import org.apache.ignite.internal.processors.datastreamer.DataStreamerImpl;
import org.apache.ignite.internal.processors.datastructures.GridCacheAtomicLongImpl;
import org.apache.ignite.internal.processors.platform.binary.PlatformBinaryProcessor;
import org.apache.ignite.internal.processors.platform.cache.PlatformCache;
import org.apache.ignite.internal.processors.platform.cache.PlatformCacheExtension;
import org.apache.ignite.internal.processors.platform.cache.affinity.PlatformAffinity;
import org.apache.ignite.internal.processors.platform.cache.store.PlatformCacheStore;
import org.apache.ignite.internal.processors.platform.cluster.PlatformClusterGroup;
import org.apache.ignite.internal.processors.platform.datastreamer.PlatformDataStreamer;
import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicLong;
import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicReference;
import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicSequence;
import org.apache.ignite.internal.processors.platform.dotnet.PlatformDotNetCacheStore;
import org.apache.ignite.internal.processors.platform.memory.PlatformMemory;
import org.apache.ignite.internal.processors.platform.memory.PlatformOutputStream;
import org.apache.ignite.internal.processors.platform.transactions.PlatformTransactions;
import org.apache.ignite.internal.processors.platform.utils.PlatformConfigurationUtils;
import org.apache.ignite.internal.processors.platform.utils.PlatformUtils;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteFuture;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.apache.ignite.internal.processors.platform.PlatformAbstractTarget.FALSE;
import static org.apache.ignite.internal.processors.platform.PlatformAbstractTarget.TRUE;
import static org.apache.ignite.internal.processors.platform.client.ClientConnectionContext.CURRENT_VER;
/**
* GridGain platform processor.
*/
@SuppressWarnings({"unchecked"})
public class PlatformProcessorImpl extends GridProcessorAdapter implements PlatformProcessor, PlatformTarget {
/** */
private static final int OP_GET_CACHE = 1;
/** */
private static final int OP_CREATE_CACHE = 2;
/** */
private static final int OP_GET_OR_CREATE_CACHE = 3;
/** */
private static final int OP_CREATE_CACHE_FROM_CONFIG = 4;
/** */
private static final int OP_GET_OR_CREATE_CACHE_FROM_CONFIG = 5;
/** */
private static final int OP_DESTROY_CACHE = 6;
/** */
private static final int OP_GET_AFFINITY = 7;
/** */
private static final int OP_GET_DATA_STREAMER = 8;
/** */
private static final int OP_GET_TRANSACTIONS = 9;
/** */
private static final int OP_GET_CLUSTER_GROUP = 10;
/** */
private static final int OP_GET_EXTENSION = 11;
/** */
private static final int OP_GET_ATOMIC_LONG = 12;
/** */
private static final int OP_GET_ATOMIC_REFERENCE = 13;
/** */
private static final int OP_GET_ATOMIC_SEQUENCE = 14;
/** */
private static final int OP_GET_IGNITE_CONFIGURATION = 15;
/** */
private static final int OP_GET_CACHE_NAMES = 16;
/** */
private static final int OP_CREATE_NEAR_CACHE = 17;
/** */
private static final int OP_GET_OR_CREATE_NEAR_CACHE = 18;
/** */
private static final int OP_LOGGER_IS_LEVEL_ENABLED = 19;
/** */
private static final int OP_LOGGER_LOG = 20;
/** */
private static final int OP_GET_BINARY_PROCESSOR = 21;
/** */
private static final int OP_RELEASE_START = 22;
/** */
private static final int OP_ADD_CACHE_CONFIGURATION = 23;
/** */
private static final int OP_SET_BASELINE_TOPOLOGY_VER = 24;
/** */
private static final int OP_SET_BASELINE_TOPOLOGY_NODES = 25;
/** */
private static final int OP_GET_BASELINE_TOPOLOGY = 26;
/** */
private static final int OP_DISABLE_WAL = 27;
/** */
private static final int OP_ENABLE_WAL = 28;
/** */
private static final int OP_IS_WAL_ENABLED = 29;
/** */
private static final int OP_SET_TX_TIMEOUT_ON_PME = 30;
/** Start latch. */
private final CountDownLatch startLatch = new CountDownLatch(1);
/** Stores pending initialization. */
private final Collection<StoreInfo> pendingStores =
Collections.newSetFromMap(new ConcurrentHashMap<StoreInfo, Boolean>());
/** Lock for store lifecycle operations. */
private final ReadWriteLock storeLock = new ReentrantReadWriteLock();
/** Logger. */
@SuppressWarnings("FieldCanBeLocal")
private final IgniteLogger log;
/** Context. */
private final PlatformContext platformCtx;
/** Interop configuration. */
private final PlatformConfigurationEx interopCfg;
/** Extensions. */
private final PlatformPluginExtension[] extensions;
/** Whether processor is started. */
private boolean started;
/** Whether processor if stopped (or stopping). */
private volatile boolean stopped;
/** Cache extensions. */
private final PlatformCacheExtension[] cacheExts;
/** Cluster restart flag for the reconnect callback. */
private volatile boolean clusterRestarted;
/**
* Constructor.
*
* @param ctx Kernal context.
*/
public PlatformProcessorImpl(GridKernalContext ctx) {
super(ctx);
log = ctx.log(PlatformProcessorImpl.class);
PlatformConfiguration interopCfg0 = ctx.config().getPlatformConfiguration();
assert interopCfg0 != null : "Must be checked earlier during component creation.";
if (!(interopCfg0 instanceof PlatformConfigurationEx))
throw new IgniteException("Unsupported platform configuration: " + interopCfg0.getClass().getName());
interopCfg = (PlatformConfigurationEx)interopCfg0;
if (!F.isEmpty(interopCfg.warnings())) {
for (String w : interopCfg.warnings())
U.warn(log, w);
}
platformCtx = new PlatformContextImpl(ctx, interopCfg.gate(), interopCfg.memory(), interopCfg.platform());
// Initialize cache extensions (if any).
cacheExts = prepareCacheExtensions(interopCfg.cacheExtensions());
if (interopCfg.logger() != null)
interopCfg.logger().setContext(platformCtx);
// Initialize extensions (if any).
extensions = prepareExtensions(ctx.plugins().extensions(PlatformPluginExtension.class));
}
/** {@inheritDoc} */
@Override public void start() throws IgniteCheckedException {
try (PlatformMemory mem = platformCtx.memory().allocate()) {
PlatformOutputStream out = mem.output();
BinaryRawWriterEx writer = platformCtx.writer(out);
writer.writeString(ctx.igniteInstanceName());
out.synchronize();
platformCtx.gateway().onStart(new PlatformTargetProxyImpl(this, platformCtx), mem.pointer());
}
// At this moment all necessary native libraries must be loaded, so we can process with store creation.
storeLock.writeLock().lock();
try {
for (StoreInfo store : pendingStores)
registerStore0(store.store, store.convertBinary);
pendingStores.clear();
started = true;
}
finally {
storeLock.writeLock().unlock();
}
// Add Interop node attributes.
ctx.addNodeAttribute(PlatformUtils.ATTR_PLATFORM, interopCfg.platform());
}
/** {@inheritDoc} */
@Override public void onKernalStop(boolean cancel) {
startLatch.countDown();
}
/** {@inheritDoc} */
@Override public void stop(boolean cancel) throws IgniteCheckedException {
if (platformCtx != null) {
stopped = true;
platformCtx.gateway().onStop();
}
}
/** {@inheritDoc} */
@Override public Ignite ignite() {
return ctx.grid();
}
/** {@inheritDoc} */
@Override public long environmentPointer() {
return platformCtx.gateway().environmentPointer();
}
/** {@inheritDoc} */
@Override public void releaseStart() {
startLatch.countDown();
}
/** {@inheritDoc} */
@Override public void awaitStart() throws IgniteCheckedException {
U.await(startLatch);
}
/** {@inheritDoc} */
@Override public PlatformContext context() {
return platformCtx;
}
/** {@inheritDoc} */
@Override public void registerStore(PlatformCacheStore store, boolean convertBinary)
throws IgniteCheckedException {
storeLock.readLock().lock();
try {
if (stopped)
throw new IgniteCheckedException("Failed to initialize interop store because node is stopping: " +
store);
if (started)
registerStore0(store, convertBinary);
else
pendingStores.add(new StoreInfo(store, convertBinary));
}
finally {
storeLock.readLock().unlock();
}
}
/** {@inheritDoc} */
@Override public void onDisconnected(IgniteFuture<?> reconnectFut) throws IgniteCheckedException {
platformCtx.gateway().onClientDisconnected();
// 1) onReconnected is called on all grid components.
// 2) After all of grid components have completed their reconnection, reconnectFut is completed.
reconnectFut.listen(new CI1<IgniteFuture<?>>() {
@Override public void apply(IgniteFuture<?> future) {
platformCtx.gateway().onClientReconnected(clusterRestarted);
}
});
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> onReconnected(boolean clusterRestarted) throws IgniteCheckedException {
// Save the flag value for callback of reconnectFut.
this.clusterRestarted = clusterRestarted;
return null;
}
/**
* Creates new platform cache.
*/
private PlatformTarget createPlatformCache(IgniteCacheProxy cache) {
assert cache != null;
return new PlatformCache(platformCtx, cache, false, cacheExts);
}
/**
* Checks whether logger level is enabled.
*
* @param level Level.
* @return Result.
*/
private boolean loggerIsLevelEnabled(int level) {
IgniteLogger log = ctx.grid().log();
switch (level) {
case PlatformLogger.LVL_TRACE:
return log.isTraceEnabled();
case PlatformLogger.LVL_DEBUG:
return log.isDebugEnabled();
case PlatformLogger.LVL_INFO:
return log.isInfoEnabled();
case PlatformLogger.LVL_WARN:
return true;
case PlatformLogger.LVL_ERROR:
return true;
default:
assert false;
}
return false;
}
/**
* Logs to the Ignite logger.
*
* @param level Level.
* @param message Message.
* @param category Category.
* @param errorInfo Exception.
*/
private void loggerLog(int level, String message, String category, String errorInfo) {
IgniteLogger log = ctx.grid().log();
if (category != null)
log = log.getLogger(category);
Throwable err = errorInfo == null ? null : new IgniteException("Platform error:" + errorInfo);
switch (level) {
case PlatformLogger.LVL_TRACE:
log.trace(message);
break;
case PlatformLogger.LVL_DEBUG:
log.debug(message);
break;
case PlatformLogger.LVL_INFO:
log.info(message);
break;
case PlatformLogger.LVL_WARN:
log.warning(message, err);
break;
case PlatformLogger.LVL_ERROR:
log.error(message, err);
break;
default:
assert false;
}
}
/** {@inheritDoc} */
@Override public long processInLongOutLong(int type, long val) throws IgniteCheckedException {
switch (type) {
case OP_LOGGER_IS_LEVEL_ENABLED: {
return loggerIsLevelEnabled((int) val) ? TRUE : FALSE;
}
case OP_RELEASE_START: {
releaseStart();
return 0;
}
case OP_SET_BASELINE_TOPOLOGY_VER: {
ctx.grid().cluster().setBaselineTopology(val);
return 0;
}
}
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public long processInStreamOutLong(int type, BinaryRawReaderEx reader) throws IgniteCheckedException {
switch (type) {
case OP_DESTROY_CACHE: {
ctx.grid().destroyCache(reader.readString());
return 0;
}
case OP_LOGGER_LOG: {
loggerLog(reader.readInt(), reader.readString(), reader.readString(), reader.readString());
return 0;
}
case OP_SET_BASELINE_TOPOLOGY_NODES: {
int cnt = reader.readInt();
Collection<BaselineNode> nodes = new ArrayList<>(cnt);
for (int i = 0; i < cnt; i++) {
Object consId = reader.readObjectDetached();
Map<String, Object> attrs = PlatformUtils.readNodeAttributes(reader);
nodes.add(new DetachedClusterNode(consId, attrs));
}
ctx.grid().cluster().setBaselineTopology(nodes);
return 0;
}
case OP_ADD_CACHE_CONFIGURATION:
CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER);
ctx.grid().addCacheConfiguration(cfg);
return 0;
case OP_DISABLE_WAL:
ctx.grid().cluster().disableWal(reader.readString());
return 0;
case OP_ENABLE_WAL:
ctx.grid().cluster().enableWal(reader.readString());
return 0;
case OP_SET_TX_TIMEOUT_ON_PME:
ctx.grid().cluster().setTxTimeoutOnPartitionMapExchange(reader.readLong());
return 0;
case OP_IS_WAL_ENABLED:
return ctx.grid().cluster().isWalEnabled(reader.readString()) ? TRUE : FALSE;
}
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public long processInStreamOutLong(int type, BinaryRawReaderEx reader, PlatformMemory mem) throws IgniteCheckedException {
return processInStreamOutLong(type, reader);
}
/** {@inheritDoc} */
@Override public void processInStreamOutStream(int type, BinaryRawReaderEx reader, BinaryRawWriterEx writer) throws IgniteCheckedException {
PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public PlatformTarget processInStreamOutObject(int type, BinaryRawReaderEx reader) throws IgniteCheckedException {
switch (type) {
case OP_GET_CACHE: {
String name = reader.readString();
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().cache(name);
if (cache == null)
throw new IllegalArgumentException("Cache doesn't exist: " + name);
return createPlatformCache(cache);
}
case OP_CREATE_CACHE: {
String name = reader.readString();
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createCache(name);
return createPlatformCache(cache);
}
case OP_GET_OR_CREATE_CACHE: {
String name = reader.readString();
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateCache(name);
return createPlatformCache(cache);
}
case OP_CREATE_CACHE_FROM_CONFIG: {
CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER);
IgniteCacheProxy cache = reader.readBoolean()
? (IgniteCacheProxy)ctx.grid().createCache(cfg, PlatformConfigurationUtils.readNearConfiguration(reader))
: (IgniteCacheProxy)ctx.grid().createCache(cfg);
return createPlatformCache(cache);
}
case OP_GET_OR_CREATE_CACHE_FROM_CONFIG: {
CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER);
IgniteCacheProxy cache = reader.readBoolean()
? (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg,
PlatformConfigurationUtils.readNearConfiguration(reader))
: (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg);
return createPlatformCache(cache);
}
case OP_GET_AFFINITY: {
return new PlatformAffinity(platformCtx, ctx, reader.readString());
}
case OP_GET_DATA_STREAMER: {
String cacheName = reader.readString();
boolean keepBinary = reader.readBoolean();
IgniteDataStreamer ldr = ctx.dataStream().dataStreamer(cacheName);
ldr.keepBinary(true);
return new PlatformDataStreamer(platformCtx, cacheName, (DataStreamerImpl)ldr, keepBinary);
}
case OP_GET_EXTENSION: {
int id = reader.readInt();
if (extensions != null && id < extensions.length) {
PlatformPluginExtension ext = extensions[id];
if (ext != null) {
return ext.createTarget();
}
}
throw new IgniteException("Platform extension is not registered [id=" + id + ']');
}
case OP_GET_ATOMIC_LONG: {
String name = reader.readString();
long initVal = reader.readLong();
boolean create = reader.readBoolean();
GridCacheAtomicLongImpl atomicLong = (GridCacheAtomicLongImpl)ignite().atomicLong(name, initVal, create);
if (atomicLong == null)
return null;
return new PlatformAtomicLong(platformCtx, atomicLong);
}
case OP_GET_ATOMIC_REFERENCE: {
String name = reader.readString();
Object initVal = reader.readObjectDetached();
boolean create = reader.readBoolean();
return PlatformAtomicReference.createInstance(platformCtx, name, initVal, create);
}
case OP_GET_ATOMIC_SEQUENCE: {
String name = reader.readString();
long initVal = reader.readLong();
boolean create = reader.readBoolean();
IgniteAtomicSequence atomicSeq = ignite().atomicSequence(name, initVal, create);
if (atomicSeq == null)
return null;
return new PlatformAtomicSequence(platformCtx, atomicSeq);
}
case OP_CREATE_NEAR_CACHE: {
String cacheName = reader.readString();
NearCacheConfiguration cfg = PlatformConfigurationUtils.readNearConfiguration(reader);
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createNearCache(cacheName, cfg);
return createPlatformCache(cache);
}
case OP_GET_OR_CREATE_NEAR_CACHE: {
String cacheName = reader.readString();
NearCacheConfiguration cfg = PlatformConfigurationUtils.readNearConfiguration(reader);
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateNearCache(cacheName, cfg);
return createPlatformCache(cache);
}
case OP_GET_TRANSACTIONS: {
String lbl = reader.readString();
return new PlatformTransactions(platformCtx, lbl);
}
}
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public PlatformTarget processInObjectStreamOutObjectStream(int type, @Nullable PlatformTarget arg,
BinaryRawReaderEx reader,
BinaryRawWriterEx writer)
throws IgniteCheckedException {
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public void processOutStream(int type, BinaryRawWriterEx writer) throws IgniteCheckedException {
switch (type) {
case OP_GET_IGNITE_CONFIGURATION: {
PlatformConfigurationUtils.writeIgniteConfiguration(writer, ignite().configuration(), CURRENT_VER);
return;
}
case OP_GET_CACHE_NAMES: {
Collection<String> names = ignite().cacheNames();
writer.writeInt(names.size());
for (String name : names)
writer.writeString(name);
return;
}
case OP_GET_BASELINE_TOPOLOGY: {
Collection<BaselineNode> blt = ignite().cluster().currentBaselineTopology();
writer.writeInt(blt.size());
for (BaselineNode n : blt) {
writer.writeObjectDetached(n.consistentId());
PlatformUtils.writeNodeAttributes(writer, n.attributes());
}
return;
}
}
PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public PlatformTarget processOutObject(int type) throws IgniteCheckedException {
switch (type) {
case OP_GET_TRANSACTIONS:
return new PlatformTransactions(platformCtx);
case OP_GET_CLUSTER_GROUP:
return new PlatformClusterGroup(platformCtx, ctx.grid().cluster());
case OP_GET_BINARY_PROCESSOR: {
return new PlatformBinaryProcessor(platformCtx);
}
}
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public PlatformAsyncResult processInStreamAsync(int type, BinaryRawReaderEx reader) throws IgniteCheckedException {
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public Exception convertException(Exception e) {
return e;
}
/**
* Internal store initialization routine.
*
* @param store Store.
* @param convertBinary Convert binary flag.
* @throws IgniteCheckedException If failed.
*/
private void registerStore0(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException {
if (store instanceof PlatformDotNetCacheStore) {
PlatformDotNetCacheStore store0 = (PlatformDotNetCacheStore)store;
store0.initialize(ctx, convertBinary);
}
else
throw new IgniteCheckedException("Unsupported interop store: " + store);
}
/**
* Prepare cache extensions.
*
* @param cacheExts Original extensions.
* @return Prepared extensions.
*/
private static PlatformCacheExtension[] prepareCacheExtensions(Collection<PlatformCacheExtension> cacheExts) {
if (!F.isEmpty(cacheExts)) {
int maxExtId = 0;
Map<Integer, PlatformCacheExtension> idToExt = new HashMap<>();
for (PlatformCacheExtension cacheExt : cacheExts) {
if (cacheExt == null)
throw new IgniteException("Platform cache extension cannot be null.");
if (cacheExt.id() < 0)
throw new IgniteException("Platform cache extension ID cannot be negative: " + cacheExt);
PlatformCacheExtension oldCacheExt = idToExt.put(cacheExt.id(), cacheExt);
if (oldCacheExt != null)
throw new IgniteException("Platform cache extensions cannot have the same ID [" +
"id=" + cacheExt.id() + ", first=" + oldCacheExt + ", second=" + cacheExt + ']');
if (cacheExt.id() > maxExtId)
maxExtId = cacheExt.id();
}
PlatformCacheExtension[] res = new PlatformCacheExtension[maxExtId + 1];
for (PlatformCacheExtension cacheExt : cacheExts)
res[cacheExt.id()]= cacheExt;
return res;
}
else
//noinspection ZeroLengthArrayAllocation
return new PlatformCacheExtension[0];
}
/**
* Prepare extensions.
*
* @param exts Original extensions.
* @return Prepared extensions.
*/
private static PlatformPluginExtension[] prepareExtensions(PlatformPluginExtension[] exts) {
if (!F.isEmpty(exts)) {
int maxExtId = 0;
Map<Integer, PlatformPluginExtension> idToExt = new HashMap<>();
for (PlatformPluginExtension ext : exts) {
if (ext == null)
throw new IgniteException("Platform extension cannot be null.");
if (ext.id() < 0)
throw new IgniteException("Platform extension ID cannot be negative: " + ext);
PlatformPluginExtension oldCacheExt = idToExt.put(ext.id(), ext);
if (oldCacheExt != null)
throw new IgniteException("Platform extensions cannot have the same ID [" +
"id=" + ext.id() + ", first=" + oldCacheExt + ", second=" + ext + ']');
if (ext.id() > maxExtId)
maxExtId = ext.id();
}
PlatformPluginExtension[] res = new PlatformPluginExtension[maxExtId + 1];
for (PlatformPluginExtension ext : exts)
res[ext.id()]= ext;
return res;
}
else
//noinspection ZeroLengthArrayAllocation
return new PlatformPluginExtension[0];
}
/**
* Store and manager pair.
*/
private static class StoreInfo {
/** Store. */
private final PlatformCacheStore store;
/** Convert binary flag. */
private final boolean convertBinary;
/**
* Constructor.
*
* @param store Store.
* @param convertBinary Convert binary flag.
*/
private StoreInfo(PlatformCacheStore store, boolean convertBinary) {
this.store = store;
this.convertBinary = convertBinary;
}
}
}
| ptupitsyn/ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/platform/PlatformProcessorImpl.java | Java | apache-2.0 | 29,379 |
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.util.diff;
import java.util.List;
/**
* Initially copied from https://code.google.com/p/java-diff-utils/.
* <p>
* Describes the delete-delta between original and revised texts.
*
* @author <a href="dm.naumenko@gmail.com">Dmitry Naumenko</a>
* @param <T> The type of the compared elements in the 'lines'.
*/
public class DeleteDelta<T> extends Delta<T> {
/**
* Creates a change delta with the two given chunks.
*
* @param original
* The original chunk. Must not be {@code null}.
* @param revised
* The original chunk. Must not be {@code null}.
*/
public DeleteDelta(Chunk<T> original, Chunk<T> revised) {
super(original, revised);
}
/**
* {@inheritDoc}
*/
@Override
public void applyTo(List<T> target) throws IllegalStateException {
verify(target);
int position = getOriginal().getPosition();
int size = getOriginal().size();
for (int i = 0; i < size; i++) {
target.remove(position);
}
}
@Override
public TYPE getType() {
return Delta.TYPE.DELETE;
}
@Override
public void verify(List<T> target) throws IllegalStateException {
getOriginal().verify(target);
}
}
| ChrisCanCompute/assertj-core | src/main/java/org/assertj/core/util/diff/DeleteDelta.java | Java | apache-2.0 | 1,820 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms;
import java.util.HashMap;
import java.util.Map;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.Session;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.support.ExchangeHelper;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Before;
import org.junit.Test;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.core.MessageCreator;
import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge;
public class ConsumeJmsMapMessageTest extends CamelTestSupport {
protected JmsTemplate jmsTemplate;
private MockEndpoint endpoint;
@Test
public void testConsumeMapMessage() throws Exception {
endpoint.expectedMessageCount(1);
jmsTemplate.setPubSubDomain(false);
jmsTemplate.send("test.map", new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
MapMessage mapMessage = session.createMapMessage();
mapMessage.setString("foo", "abc");
mapMessage.setString("bar", "xyz");
return mapMessage;
}
});
endpoint.assertIsSatisfied();
assertCorrectMapReceived();
}
protected void assertCorrectMapReceived() {
Exchange exchange = endpoint.getReceivedExchanges().get(0);
// This should be a JMS Exchange
assertNotNull(ExchangeHelper.getBinding(exchange, JmsBinding.class));
JmsMessage in = (JmsMessage) exchange.getIn();
assertNotNull(in);
Map<?, ?> map = exchange.getIn().getBody(Map.class);
log.info("Received map: " + map);
assertNotNull("Should have received a map message!", map);
assertIsInstanceOf(MapMessage.class, in.getJmsMessage());
assertEquals("map.foo", "abc", map.get("foo"));
assertEquals("map.bar", "xyz", map.get("bar"));
assertEquals("map.size", 2, map.size());
}
@Test
public void testSendMapMessage() throws Exception {
endpoint.expectedMessageCount(1);
Map<String, String> map = new HashMap<>();
map.put("foo", "abc");
map.put("bar", "xyz");
template.sendBody("direct:test", map);
endpoint.assertIsSatisfied();
assertCorrectMapReceived();
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
endpoint = getMockEndpoint("mock:result");
}
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
jmsTemplate = new JmsTemplate(connectionFactory);
camelContext.addComponent("activemq", jmsComponentAutoAcknowledge(connectionFactory));
return camelContext;
}
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
from("activemq:test.map").to("mock:result");
from("direct:test").to("activemq:test.map");
}
};
}
}
| Fabryprog/camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeJmsMapMessageTest.java | Java | apache-2.0 | 4,265 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.constructionheuristic.greedyFit.decider;
public enum ConstructionHeuristicPickEarlyType {
NEVER,
FIRST_LAST_STEP_SCORE_EQUAL_OR_IMPROVING;
}
| psiroky/optaplanner | optaplanner-core/src/main/java/org/optaplanner/core/impl/constructionheuristic/greedyFit/decider/ConstructionHeuristicPickEarlyType.java | Java | apache-2.0 | 779 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.flink.translation.wrappers.streaming.io;
import com.google.common.annotations.VisibleForTesting;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import org.apache.beam.runners.flink.metrics.FlinkMetricContainer;
import org.apache.beam.runners.flink.metrics.ReaderInvocationUtil;
import org.apache.beam.runners.flink.translation.types.CoderTypeInformation;
import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.SerializableCoder;
import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.sdk.values.ValueWithRecordId;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.functions.StoppableFunction;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.OperatorStateStore;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.CheckpointListener;
import org.apache.flink.runtime.state.DefaultOperatorStateBackend;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Wrapper for executing {@link UnboundedSource UnboundedSources} as a Flink Source.
*/
public class UnboundedSourceWrapper<
OutputT, CheckpointMarkT extends UnboundedSource.CheckpointMark>
extends RichParallelSourceFunction<WindowedValue<ValueWithRecordId<OutputT>>>
implements ProcessingTimeCallback, StoppableFunction,
CheckpointListener, CheckpointedFunction {
private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceWrapper.class);
private final String stepName;
/**
* Keep the options so that we can initialize the localReaders.
*/
private final SerializedPipelineOptions serializedOptions;
/**
* For snapshot and restore.
*/
private final KvCoder<
? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> checkpointCoder;
/**
* The split sources. We split them in the constructor to ensure that all parallel
* sources are consistent about the split sources.
*/
private final List<? extends UnboundedSource<OutputT, CheckpointMarkT>> splitSources;
/**
* The local split sources. Assigned at runtime when the wrapper is executed in parallel.
*/
private transient List<UnboundedSource<OutputT, CheckpointMarkT>> localSplitSources;
/**
* The local split readers. Assigned at runtime when the wrapper is executed in parallel.
* Make it a field so that we can access it in {@link #onProcessingTime(long)} for
* emitting watermarks.
*/
private transient List<UnboundedSource.UnboundedReader<OutputT>> localReaders;
/**
* Flag to indicate whether the source is running.
* Initialize here and not in run() to prevent races where we cancel a job before run() is
* ever called or run() is called after cancel().
*/
private volatile boolean isRunning = true;
/**
* Make it a field so that we can access it in {@link #onProcessingTime(long)} for registering new
* triggers.
*/
private transient StreamingRuntimeContext runtimeContext;
/**
* Make it a field so that we can access it in {@link #onProcessingTime(long)} for emitting
* watermarks.
*/
private transient SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> context;
/**
* Pending checkpoints which have not been acknowledged yet.
*/
private transient LinkedHashMap<Long, List<CheckpointMarkT>> pendingCheckpoints;
/**
* Keep a maximum of 32 checkpoints for {@code CheckpointMark.finalizeCheckpoint()}.
*/
private static final int MAX_NUMBER_PENDING_CHECKPOINTS = 32;
private transient ListState<KV<? extends
UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> stateForCheckpoint;
/**
* false if checkpointCoder is null or no restore state by starting first.
*/
private transient boolean isRestored = false;
@SuppressWarnings("unchecked")
public UnboundedSourceWrapper(
String stepName,
PipelineOptions pipelineOptions,
UnboundedSource<OutputT, CheckpointMarkT> source,
int parallelism) throws Exception {
this.stepName = stepName;
this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
if (source.requiresDeduping()) {
LOG.warn("Source {} requires deduping but Flink runner doesn't support this yet.", source);
}
Coder<CheckpointMarkT> checkpointMarkCoder = source.getCheckpointMarkCoder();
if (checkpointMarkCoder == null) {
LOG.info("No CheckpointMarkCoder specified for this source. Won't create snapshots.");
checkpointCoder = null;
} else {
Coder<? extends UnboundedSource<OutputT, CheckpointMarkT>> sourceCoder =
(Coder) SerializableCoder.of(new TypeDescriptor<UnboundedSource>() {
});
checkpointCoder = KvCoder.of(sourceCoder, checkpointMarkCoder);
}
// get the splits early. we assume that the generated splits are stable,
// this is necessary so that the mapping of state to source is correct
// when restoring
splitSources = source.split(parallelism, pipelineOptions);
}
/**
* Initialize and restore state before starting execution of the source.
*/
@Override
public void open(Configuration parameters) throws Exception {
runtimeContext = (StreamingRuntimeContext) getRuntimeContext();
// figure out which split sources we're responsible for
int subtaskIndex = runtimeContext.getIndexOfThisSubtask();
int numSubtasks = runtimeContext.getNumberOfParallelSubtasks();
localSplitSources = new ArrayList<>();
localReaders = new ArrayList<>();
pendingCheckpoints = new LinkedHashMap<>();
if (isRestored) {
// restore the splitSources from the checkpoint to ensure consistent ordering
for (KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> restored:
stateForCheckpoint.get()) {
localSplitSources.add(restored.getKey());
localReaders.add(restored.getKey().createReader(
serializedOptions.getPipelineOptions(), restored.getValue()));
}
} else {
// initialize localReaders and localSources from scratch
for (int i = 0; i < splitSources.size(); i++) {
if (i % numSubtasks == subtaskIndex) {
UnboundedSource<OutputT, CheckpointMarkT> source =
splitSources.get(i);
UnboundedSource.UnboundedReader<OutputT> reader =
source.createReader(serializedOptions.getPipelineOptions(), null);
localSplitSources.add(source);
localReaders.add(reader);
}
}
}
LOG.info("Unbounded Flink Source {}/{} is reading from sources: {}",
subtaskIndex,
numSubtasks,
localSplitSources);
}
@Override
public void run(SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx) throws Exception {
context = ctx;
FlinkMetricContainer metricContainer = new FlinkMetricContainer(getRuntimeContext());
ReaderInvocationUtil<OutputT, UnboundedSource.UnboundedReader<OutputT>> readerInvoker =
new ReaderInvocationUtil<>(
stepName,
serializedOptions.getPipelineOptions(),
metricContainer);
if (localReaders.size() == 0) {
// do nothing, but still look busy ...
// also, output a Long.MAX_VALUE watermark since we know that we're not
// going to emit anything
// we can't return here since Flink requires that all operators stay up,
// otherwise checkpointing would not work correctly anymore
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
// wait until this is canceled
final Object waitLock = new Object();
while (isRunning) {
try {
// Flink will interrupt us at some point
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (waitLock) {
// don't wait indefinitely, in case something goes horribly wrong
waitLock.wait(1000);
}
} catch (InterruptedException e) {
if (!isRunning) {
// restore the interrupted state, and fall through the loop
Thread.currentThread().interrupt();
}
}
}
} else if (localReaders.size() == 1) {
// the easy case, we just read from one reader
UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(0);
boolean dataAvailable = readerInvoker.invokeStart(reader);
if (dataAvailable) {
emitElement(ctx, reader);
}
setNextWatermarkTimer(this.runtimeContext);
while (isRunning) {
dataAvailable = readerInvoker.invokeAdvance(reader);
if (dataAvailable) {
emitElement(ctx, reader);
} else {
Thread.sleep(50);
}
}
} else {
// a bit more complicated, we are responsible for several localReaders
// loop through them and sleep if none of them had any data
int numReaders = localReaders.size();
int currentReader = 0;
// start each reader and emit data if immediately available
for (UnboundedSource.UnboundedReader<OutputT> reader : localReaders) {
boolean dataAvailable = readerInvoker.invokeStart(reader);
if (dataAvailable) {
emitElement(ctx, reader);
}
}
// a flag telling us whether any of the localReaders had data
// if no reader had data, sleep for bit
boolean hadData = false;
while (isRunning) {
UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(currentReader);
boolean dataAvailable = readerInvoker.invokeAdvance(reader);
if (dataAvailable) {
emitElement(ctx, reader);
hadData = true;
}
currentReader = (currentReader + 1) % numReaders;
if (currentReader == 0 && !hadData) {
Thread.sleep(50);
} else if (currentReader == 0) {
hadData = false;
}
}
}
}
/**
* Emit the current element from the given Reader. The reader is guaranteed to have data.
*/
private void emitElement(
SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx,
UnboundedSource.UnboundedReader<OutputT> reader) {
// make sure that reader state update and element emission are atomic
// with respect to snapshots
synchronized (ctx.getCheckpointLock()) {
OutputT item = reader.getCurrent();
byte[] recordId = reader.getCurrentRecordId();
Instant timestamp = reader.getCurrentTimestamp();
WindowedValue<ValueWithRecordId<OutputT>> windowedValue =
WindowedValue.of(new ValueWithRecordId<>(item, recordId), timestamp,
GlobalWindow.INSTANCE, PaneInfo.NO_FIRING);
ctx.collectWithTimestamp(windowedValue, timestamp.getMillis());
}
}
@Override
public void close() throws Exception {
super.close();
if (localReaders != null) {
for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) {
reader.close();
}
}
}
@Override
public void cancel() {
isRunning = false;
}
@Override
public void stop() {
isRunning = false;
}
// ------------------------------------------------------------------------
// Checkpoint and restore
// ------------------------------------------------------------------------
@Override
public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception {
if (!isRunning) {
LOG.debug("snapshotState() called on closed source");
} else {
if (checkpointCoder == null) {
// no checkpoint coder available in this source
return;
}
stateForCheckpoint.clear();
long checkpointId = functionSnapshotContext.getCheckpointId();
// we checkpoint the sources along with the CheckpointMarkT to ensure
// than we have a correct mapping of checkpoints to sources when
// restoring
List<CheckpointMarkT> checkpointMarks = new ArrayList<>(localSplitSources.size());
for (int i = 0; i < localSplitSources.size(); i++) {
UnboundedSource<OutputT, CheckpointMarkT> source = localSplitSources.get(i);
UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(i);
@SuppressWarnings("unchecked")
CheckpointMarkT mark = (CheckpointMarkT) reader.getCheckpointMark();
checkpointMarks.add(mark);
KV<UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> kv =
KV.of(source, mark);
stateForCheckpoint.add(kv);
}
// cleanup old pending checkpoints and add new checkpoint
int diff = pendingCheckpoints.size() - MAX_NUMBER_PENDING_CHECKPOINTS;
if (diff >= 0) {
for (Iterator<Long> iterator = pendingCheckpoints.keySet().iterator();
diff >= 0;
diff--) {
iterator.next();
iterator.remove();
}
}
pendingCheckpoints.put(checkpointId, checkpointMarks);
}
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
if (checkpointCoder == null) {
// no checkpoint coder available in this source
return;
}
OperatorStateStore stateStore = context.getOperatorStateStore();
CoderTypeInformation<
KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>>
typeInformation = (CoderTypeInformation) new CoderTypeInformation<>(checkpointCoder);
stateForCheckpoint = stateStore.getOperatorState(
new ListStateDescriptor<>(DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME,
typeInformation.createSerializer(new ExecutionConfig())));
if (context.isRestored()) {
isRestored = true;
LOG.info("Having restore state in the UnbounedSourceWrapper.");
} else {
LOG.info("No restore state for UnbounedSourceWrapper.");
}
}
@Override
public void onProcessingTime(long timestamp) throws Exception {
if (this.isRunning) {
synchronized (context.getCheckpointLock()) {
// find minimum watermark over all localReaders
long watermarkMillis = Long.MAX_VALUE;
for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) {
Instant watermark = reader.getWatermark();
if (watermark != null) {
watermarkMillis = Math.min(watermark.getMillis(), watermarkMillis);
}
}
context.emitWatermark(new Watermark(watermarkMillis));
}
setNextWatermarkTimer(this.runtimeContext);
}
}
private void setNextWatermarkTimer(StreamingRuntimeContext runtime) {
if (this.isRunning) {
long watermarkInterval = runtime.getExecutionConfig().getAutoWatermarkInterval();
long timeToNextWatermark = getTimeToNextWatermark(watermarkInterval);
runtime.getProcessingTimeService().registerTimer(timeToNextWatermark, this);
}
}
private long getTimeToNextWatermark(long watermarkInterval) {
return System.currentTimeMillis() + watermarkInterval;
}
/**
* Visible so that we can check this in tests. Must not be used for anything else.
*/
@VisibleForTesting
public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getSplitSources() {
return splitSources;
}
/**
* Visible so that we can check this in tests. Must not be used for anything else.
*/
@VisibleForTesting
public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getLocalSplitSources() {
return localSplitSources;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
List<CheckpointMarkT> checkpointMarks = pendingCheckpoints.get(checkpointId);
if (checkpointMarks != null) {
// remove old checkpoints including the current one
Iterator<Long> iterator = pendingCheckpoints.keySet().iterator();
long currentId;
do {
currentId = iterator.next();
iterator.remove();
} while (currentId != checkpointId);
// confirm all marks
for (CheckpointMarkT mark : checkpointMarks) {
mark.finalizeCheckpoint();
}
}
}
}
| dhalperi/beam | runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java | Java | apache-2.0 | 18,014 |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.mss.internal.mime;
import org.junit.Assert;
import org.junit.Test;
/**
* Test the functionality of MimeMapper
*/
public class MimeMapperTest {
@Test
public void testMimeMappingForKnownExtension() throws MimeMappingException {
String mimeType = MimeMapper.getMimeType("png");
Assert.assertEquals("image/png", mimeType);
}
@Test(expected = MimeMappingException.class)
public void testMimeMappingForUnknownExtension() throws MimeMappingException {
MimeMapper.getMimeType("unknownext");
}
}
| susinda/product-mss | carbon-mss/components/org.wso2.carbon.mss/src/test/java/org/wso2/carbon/mss/internal/mime/MimeMapperTest.java | Java | apache-2.0 | 1,232 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.elasticjob.lite.spring.namespace.job;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.elasticjob.infra.concurrent.BlockUtils;
import org.apache.shardingsphere.elasticjob.lite.api.bootstrap.impl.OneOffJobBootstrap;
import org.apache.shardingsphere.elasticjob.lite.internal.schedule.JobRegistry;
import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.DataflowElasticJob;
import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.FooSimpleElasticJob;
import org.apache.shardingsphere.elasticjob.lite.spring.namespace.test.AbstractZookeeperJUnit4SpringContextTests;
import org.apache.shardingsphere.elasticjob.reg.base.CoordinatorRegistryCenter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import javax.annotation.Resource;
import static org.junit.Assert.assertTrue;
@RequiredArgsConstructor
public abstract class AbstractOneOffJobSpringIntegrateTest extends AbstractZookeeperJUnit4SpringContextTests {
private final String simpleJobName;
private final String throughputDataflowJobName;
@Resource
private CoordinatorRegistryCenter regCenter;
@Before
@After
public void reset() {
FooSimpleElasticJob.reset();
DataflowElasticJob.reset();
}
@After
public void tearDown() {
JobRegistry.getInstance().shutdown(simpleJobName);
JobRegistry.getInstance().shutdown(throughputDataflowJobName);
}
@Test
public void assertSpringJobBean() {
assertSimpleElasticJobBean();
assertThroughputDataflowElasticJobBean();
}
private void assertSimpleElasticJobBean() {
OneOffJobBootstrap bootstrap = applicationContext.getBean(simpleJobName, OneOffJobBootstrap.class);
bootstrap.execute();
while (!FooSimpleElasticJob.isCompleted()) {
BlockUtils.waitingShortTime();
}
assertTrue(FooSimpleElasticJob.isCompleted());
assertTrue(regCenter.isExisted("/" + simpleJobName + "/sharding"));
}
private void assertThroughputDataflowElasticJobBean() {
OneOffJobBootstrap bootstrap = applicationContext.getBean(throughputDataflowJobName, OneOffJobBootstrap.class);
bootstrap.execute();
while (!DataflowElasticJob.isCompleted()) {
BlockUtils.waitingShortTime();
}
assertTrue(DataflowElasticJob.isCompleted());
assertTrue(regCenter.isExisted("/" + throughputDataflowJobName + "/sharding"));
}
}
| dangdangdotcom/elastic-job | elasticjob-lite/elasticjob-lite-spring/elasticjob-lite-spring-namespace/src/test/java/org/apache/shardingsphere/elasticjob/lite/spring/namespace/job/AbstractOneOffJobSpringIntegrateTest.java | Java | apache-2.0 | 3,370 |
/*-
* #%L
* ELK Reasoner Core
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2011 - 2016 Department of Computer Science, University of Oxford
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.semanticweb.elk.reasoner.entailments.impl;
import java.util.Collections;
import java.util.List;
import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyAssertionAxiom;
import org.semanticweb.elk.reasoner.entailments.model.DerivedClassInclusionEntailsObjectPropertyAssertionAxiom;
import org.semanticweb.elk.reasoner.entailments.model.Entailment;
import org.semanticweb.elk.reasoner.entailments.model.EntailmentInference;
import org.semanticweb.elk.reasoner.entailments.model.ObjectPropertyAssertionAxiomEntailment;
import org.semanticweb.elk.reasoner.saturation.conclusions.model.SubClassInclusionComposed;
public class DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl
extends
AbstractAxiomEntailmentInference<ElkObjectPropertyAssertionAxiom, ObjectPropertyAssertionAxiomEntailment>
implements DerivedClassInclusionEntailsObjectPropertyAssertionAxiom {
private final SubClassInclusionComposed reason_;
public DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl(
final ObjectPropertyAssertionAxiomEntailment conclusion,
final SubClassInclusionComposed reason) {
super(conclusion);
this.reason_ = reason;
}
@Override
public List<? extends Entailment> getPremises() {
return Collections.emptyList();
}
@Override
public SubClassInclusionComposed getReason() {
return reason_;
}
@Override
public <O> O accept(final EntailmentInference.Visitor<O> visitor) {
return visitor.visit(this);
}
}
| liveontologies/elk-reasoner | elk-reasoner/src/main/java/org/semanticweb/elk/reasoner/entailments/impl/DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl.java | Java | apache-2.0 | 2,181 |
/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud;
import org.dasein.cloud.admin.AdminServices;
import org.dasein.cloud.ci.CIServices;
import org.dasein.cloud.compute.ComputeServices;
import org.dasein.cloud.identity.IdentityServices;
import org.dasein.cloud.network.NetworkServices;
import org.dasein.cloud.platform.PlatformServices;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* Simple base implementation of a cloud provider bootstrap object that defaults all services to <code>null</code>.
* @author George Reese
* @version 2013.07 added javadoc, fixed annotations on data center services, made it return an NPE
* @since unknown
*/
public abstract class AbstractCloud extends CloudProvider {
/**
* Constructs a cloud provider instance.
*/
public AbstractCloud() { }
@Override
public @Nullable AdminServices getAdminServices() {
return null;
}
@Override
public @Nullable ComputeServices getComputeServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getComputeServices());
}
@Override
public @Nonnull ContextRequirements getContextRequirements() {
return new ContextRequirements(
new ContextRequirements.Field("apiKeys", ContextRequirements.FieldType.KEYPAIR),
new ContextRequirements.Field("x509", ContextRequirements.FieldType.KEYPAIR, false)
);
}
@Override
public @Nullable CIServices getCIServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getCIServices());
}
@Override
public @Nullable IdentityServices getIdentityServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getIdentityServices());
}
@Override
public @Nullable NetworkServices getNetworkServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getNetworkServices());
}
@Override
public @Nullable PlatformServices getPlatformServices() {
CloudProvider compute = getComputeCloud();
return ( compute == null ? null : compute.getPlatformServices() );
}
}
| OSS-TheWeatherCompany/dasein-cloud-core | src/main/java/org/dasein/cloud/AbstractCloud.java | Java | apache-2.0 | 3,068 |
package io.katharsis.jpa.meta;
import java.io.Serializable;
import java.util.UUID;
import org.junit.Assert;
import org.junit.Test;
import io.katharsis.meta.model.MetaPrimitiveType;
public class MetaPrimitiveTypeTest {
@Test
public void testString() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(String.class);
}
@Test
public void testInteger() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Integer.class);
}
@Test
public void testShort() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Short.class);
}
@Test
public void testLong() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Long.class);
}
@Test
public void testFloat() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Float.class);
}
@Test
public void testDouble() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Double.class);
}
@Test
public void testBoolean() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Boolean.class);
}
@Test
public void testByte() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Byte.class);
}
@Test
public void testUUID() {
UUID uuid = UUID.randomUUID();
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(UUID.class);
}
enum TestEnum {
A
}
@Test
public void testEnum() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(TestEnum.class);
}
public static class TestObjectWithParse {
int value;
public static TestObjectWithParse parse(String value) {
TestObjectWithParse parser = new TestObjectWithParse();
parser.value = Integer.parseInt(value);
return parser;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TestObjectWithParse other = (TestObjectWithParse) obj;
if (value != other.value)
return false;
return true;
}
}
public static class TestObjectWithConstructor implements Serializable {
int value;
public TestObjectWithConstructor() {
}
public TestObjectWithConstructor(String value) {
this.value = Integer.parseInt(value);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TestObjectWithConstructor other = (TestObjectWithConstructor) obj;
if (value != other.value)
return false;
return true;
}
}
@Test
public void testParse() {
TestObjectWithParse value = new TestObjectWithParse();
value.value = 12;
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(TestObjectWithParse.class);
}
@Test
public void testOther() {
TestObjectWithConstructor value = new TestObjectWithConstructor();
value.value = 12;
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(TestObjectWithConstructor.class);
}
}
| apetrucci/katharsis-framework | katharsis-jpa/src/test/java/io/katharsis/jpa/meta/MetaPrimitiveTypeTest.java | Java | apache-2.0 | 3,187 |
package eu.atos.sla.dao.jpa;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityNotFoundException;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import eu.atos.sla.dao.ITemplateDAO;
import eu.atos.sla.datamodel.ITemplate;
import eu.atos.sla.datamodel.bean.Template;
@Repository("TemplateRepository")
public class TemplateDAOJpa implements ITemplateDAO {
private static Logger logger = LoggerFactory.getLogger(TemplateDAOJpa.class);
private EntityManager entityManager;
@PersistenceContext(unitName = "slarepositoryDB")
public void setEntityManager(EntityManager entityManager) {
this.entityManager = entityManager;
}
public EntityManager getEntityManager() {
return entityManager;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public Template getById(Long id) {
return entityManager.find(Template.class, id);
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public Template getByUuid(String uuid) {
try {
Query query = entityManager
.createNamedQuery(Template.QUERY_FIND_BY_UUID);
query.setParameter("uuid", uuid);
Template template = null;
template = (Template) query.getSingleResult();
return template;
} catch (NoResultException e) {
logger.debug("No Result found: " + e);
return null;
}
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public List<ITemplate> search(String providerId, String []serviceIds) {
TypedQuery<ITemplate> query = entityManager.createNamedQuery(
Template.QUERY_SEARCH, ITemplate.class);
query.setParameter("providerId", providerId);
query.setParameter("serviceIds", (serviceIds!=null)?Arrays.asList(serviceIds):null);
query.setParameter("flagServiceIds", (serviceIds!=null)?"flag":null);
logger.debug("providerId:{} - serviceIds:{}" , providerId, (serviceIds!=null)?Arrays.asList(serviceIds):null);
List<ITemplate> templates = new ArrayList<ITemplate>();
templates = (List<ITemplate>) query.getResultList();
if (templates != null) {
logger.debug("Number of templates:" + templates.size());
} else {
logger.debug("No Result found.");
}
return templates;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public List<ITemplate> getByAgreement(String agreement) {
TypedQuery<ITemplate> query = entityManager.createNamedQuery(
Template.QUERY_FIND_BY_AGREEMENT, ITemplate.class);
query.setParameter("agreement", agreement);
List<ITemplate> templates = new ArrayList<ITemplate>();
templates = (List<ITemplate>) query.getResultList();
if (templates != null) {
logger.debug("Number of templates:" + templates.size());
} else {
logger.debug("No Result found.");
}
return templates;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public List<ITemplate> getAll() {
TypedQuery<ITemplate> query = entityManager.createNamedQuery(
Template.QUERY_FIND_ALL, ITemplate.class);
List<ITemplate> templates = new ArrayList<ITemplate>();
templates = (List<ITemplate>) query.getResultList();
if (templates != null) {
logger.debug("Number of templates:" + templates.size());
} else {
logger.debug("No Result found.");
}
return templates;
}
@Override
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public ITemplate save(ITemplate template) {
logger.info("template.getUuid() "+template.getUuid());
entityManager.persist(template);
entityManager.flush();
return template;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public boolean update(String uuid, ITemplate template) {
Template templateDB = null;
try {
Query query = entityManager.createNamedQuery(Template.QUERY_FIND_BY_UUID);
query.setParameter("uuid", uuid);
templateDB = (Template)query.getSingleResult();
} catch (NoResultException e) {
logger.debug("No Result found: " + e);
}
if (templateDB!=null){
template.setId(templateDB.getId());
logger.info("template to update with id"+template.getId());
entityManager.merge(template);
entityManager.flush();
}else
return false;
return true;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public boolean delete(ITemplate template) {
try {
Template templateDeleted = entityManager.getReference(Template.class, template.getId());
entityManager.remove(templateDeleted);
entityManager.flush();
return true;
} catch (EntityNotFoundException e) {
logger.debug("Template[{}] not found", template.getId());
return false;
}
}
}
| Atos-FiwareOps/sla-framework | sla-core/sla-repository/src/main/java/eu/atos/sla/dao/jpa/TemplateDAOJpa.java | Java | apache-2.0 | 5,164 |
package org.apereo.cas.ticket.code;
import org.apereo.cas.authentication.Authentication;
import org.apereo.cas.authentication.principal.Service;
import org.apereo.cas.ticket.ExpirationPolicy;
import org.apereo.cas.ticket.Ticket;
import org.apereo.cas.ticket.TicketFactory;
import org.apereo.cas.ticket.UniqueTicketIdGenerator;
import org.apereo.cas.util.DefaultUniqueTicketIdGenerator;
/**
* Default OAuth code factory.
*
* @author Jerome Leleu
* @since 5.0.0
*/
public class DefaultOAuthCodeFactory implements OAuthCodeFactory {
/** Default instance for the ticket id generator. */
protected final UniqueTicketIdGenerator oAuthCodeIdGenerator;
/** ExpirationPolicy for refresh tokens. */
protected final ExpirationPolicy expirationPolicy;
public DefaultOAuthCodeFactory(final ExpirationPolicy expirationPolicy) {
this(new DefaultUniqueTicketIdGenerator(), expirationPolicy);
}
public DefaultOAuthCodeFactory(final UniqueTicketIdGenerator refreshTokenIdGenerator, final ExpirationPolicy expirationPolicy) {
this.oAuthCodeIdGenerator = refreshTokenIdGenerator;
this.expirationPolicy = expirationPolicy;
}
@Override
public OAuthCode create(final Service service, final Authentication authentication) {
final String codeId = this.oAuthCodeIdGenerator.getNewTicketId(OAuthCode.PREFIX);
return new OAuthCodeImpl(codeId, service, authentication, this.expirationPolicy);
}
@Override
public <T extends TicketFactory> T get(final Class<? extends Ticket> clazz) {
return (T) this;
}
}
| gabedwrds/cas | support/cas-server-support-oauth/src/main/java/org/apereo/cas/ticket/code/DefaultOAuthCodeFactory.java | Java | apache-2.0 | 1,592 |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeHighlighting.EditorBoundHighlightingPass;
import com.intellij.codeHighlighting.HighlightingPass;
import com.intellij.codeHighlighting.TextEditorHighlightingPass;
import com.intellij.codeHighlighting.TextEditorHighlightingPassRegistrar;
import com.intellij.concurrency.Job;
import com.intellij.concurrency.JobLauncher;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.application.ex.ApplicationUtil;
import com.intellij.openapi.application.impl.ApplicationImpl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.util.Functions;
import com.intellij.util.containers.CollectionFactory;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.HashingStrategy;
import com.intellij.util.ui.UIUtil;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
final class PassExecutorService implements Disposable {
static final Logger LOG = Logger.getInstance(PassExecutorService.class);
private static final boolean CHECK_CONSISTENCY = ApplicationManager.getApplication().isUnitTestMode();
private final Map<ScheduledPass, Job<Void>> mySubmittedPasses = new ConcurrentHashMap<>();
private final Project myProject;
private volatile boolean isDisposed;
private final AtomicInteger nextAvailablePassId; // used to assign random id to a pass if not set
PassExecutorService(@NotNull Project project) {
myProject = project;
nextAvailablePassId = ((TextEditorHighlightingPassRegistrarImpl)TextEditorHighlightingPassRegistrar.getInstance(myProject)).getNextAvailableId();
}
@Override
public void dispose() {
cancelAll(true);
// some workers could, although idle, still retain some thread references for some time causing leak hunter to frown
ForkJoinPool.commonPool().awaitQuiescence(1, TimeUnit.SECONDS);
isDisposed = true;
}
void cancelAll(boolean waitForTermination) {
for (Map.Entry<ScheduledPass, Job<Void>> entry : mySubmittedPasses.entrySet()) {
Job<Void> job = entry.getValue();
ScheduledPass pass = entry.getKey();
pass.myUpdateProgress.cancel();
job.cancel();
}
try {
if (waitForTermination) {
while (!waitFor(50)) {
int i = 0;
}
}
}
catch (ProcessCanceledException ignored) {
}
catch (Error | RuntimeException e) {
throw e;
}
catch (Throwable throwable) {
LOG.error(throwable);
}
finally {
mySubmittedPasses.clear();
}
}
void submitPasses(@NotNull Map<FileEditor, HighlightingPass[]> passesMap,
// a list of opened FileEditors for each Document. The first FileEditor in the list is the preferred one
@NotNull Map<Document, List<FileEditor>> documentToEditors,
@NotNull DaemonProgressIndicator updateProgress) {
if (isDisposed()) return;
Map<FileEditor, List<TextEditorHighlightingPass>> documentBoundPasses = new HashMap<>();
Map<FileEditor, List<EditorBoundHighlightingPass>> editorBoundPasses = new HashMap<>();
Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass = new HashMap<>();
List<ScheduledPass> freePasses = new ArrayList<>(documentToEditors.size() * 5);
AtomicInteger threadsToStartCountdown = new AtomicInteger(0);
for (Map.Entry<FileEditor, HighlightingPass[]> entry : passesMap.entrySet()) {
FileEditor fileEditor = entry.getKey();
HighlightingPass[] passes = entry.getValue();
for (HighlightingPass pass : passes) {
Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(30));
if (pass instanceof EditorBoundHighlightingPass) {
EditorBoundHighlightingPass editorPass = (EditorBoundHighlightingPass)pass;
// have to make ids unique for this document
assignUniqueId(editorPass, thisEditorId2Pass);
editorBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(editorPass);
}
else if (pass instanceof TextEditorHighlightingPass) {
TextEditorHighlightingPass tePass = (TextEditorHighlightingPass)pass;
assignUniqueId(tePass, thisEditorId2Pass);
documentBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(tePass);
}
else {
// generic HighlightingPass, run all of them concurrently
freePasses.add(new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown));
}
}
}
List<ScheduledPass> dependentPasses = new ArrayList<>(documentToEditors.size() * 10);
// fileEditor-> (passId -> created pass)
Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted = new HashMap<>(passesMap.size());
for (Map.Entry<Document, List<FileEditor>> entry : documentToEditors.entrySet()) {
List<FileEditor> fileEditors = entry.getValue();
FileEditor preferredFileEditor = fileEditors.get(0); // assumption: the preferred fileEditor is stored first
List<TextEditorHighlightingPass> passes = documentBoundPasses.get(preferredFileEditor);
if (passes == null || passes.isEmpty()) {
continue;
}
sortById(passes);
for (TextEditorHighlightingPass pass : passes) {
createScheduledPass(preferredFileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress,
threadsToStartCountdown);
}
}
for (Map.Entry<FileEditor, List<EditorBoundHighlightingPass>> entry : editorBoundPasses.entrySet()) {
FileEditor fileEditor = entry.getKey();
Collection<EditorBoundHighlightingPass> createdEditorBoundPasses = entry.getValue();
for (EditorBoundHighlightingPass pass : createdEditorBoundPasses) {
createScheduledPass(fileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown);
}
}
if (CHECK_CONSISTENCY && !ApplicationManagerEx.isInStressTest()) {
assertConsistency(freePasses, toBeSubmitted, threadsToStartCountdown);
}
if (LOG.isDebugEnabled()) {
Set<VirtualFile> vFiles = ContainerUtil.map2Set(passesMap.keySet(), FileEditor::getFile);
log(updateProgress, null, vFiles + " ----- starting " + threadsToStartCountdown.get(), freePasses);
}
for (ScheduledPass dependentPass : dependentPasses) {
mySubmittedPasses.put(dependentPass, Job.nullJob());
}
for (ScheduledPass freePass : freePasses) {
submit(freePass);
}
}
private void assignUniqueId(@NotNull TextEditorHighlightingPass pass, @NotNull Int2ObjectMap<TextEditorHighlightingPass> id2Pass) {
int id = pass.getId();
if (id == -1 || id == 0) {
id = nextAvailablePassId.incrementAndGet();
pass.setId(id);
}
TextEditorHighlightingPass prevPass = id2Pass.put(id, pass);
if (prevPass != null) {
LOG.error("Duplicate pass id found: "+id+". Both passes returned the same getId(): "+prevPass+" ("+prevPass.getClass() +") and "+pass+" ("+pass.getClass()+")");
}
}
private void assertConsistency(@NotNull List<ScheduledPass> freePasses,
@NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted,
@NotNull AtomicInteger threadsToStartCountdown) {
assert threadsToStartCountdown.get() == toBeSubmitted.values().stream().mapToInt(m->m.size()).sum();
Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits = CollectionFactory.createCustomHashingStrategyMap(new HashingStrategy<>() {
@Override
public int hashCode(@Nullable PassExecutorService.ScheduledPass sp) {
if (sp == null) return 0;
return ((TextEditorHighlightingPass)sp.myPass).getId() * 31 + sp.myFileEditor.hashCode();
}
@Override
public boolean equals(@Nullable PassExecutorService.ScheduledPass sp1, @Nullable PassExecutorService.ScheduledPass sp2) {
if (sp1 == null || sp2 == null) return sp1 == sp2;
int id1 = ((TextEditorHighlightingPass)sp1.myPass).getId();
int id2 = ((TextEditorHighlightingPass)sp2.myPass).getId();
return id1 == id2 && sp1.myFileEditor == sp2.myFileEditor;
}
});
for (ScheduledPass freePass : freePasses) {
HighlightingPass pass = freePass.myPass;
if (pass instanceof TextEditorHighlightingPass) {
id2Visits.put(freePass, Pair.create(freePass, 0));
checkConsistency(freePass, id2Visits);
}
}
for (Map.Entry<ScheduledPass, Pair<ScheduledPass, Integer>> entry : id2Visits.entrySet()) {
int count = entry.getValue().second;
assert count == 0 : entry.getKey();
}
assert id2Visits.size() == threadsToStartCountdown.get() : "Expected "+threadsToStartCountdown+" but got "+id2Visits.size()+": "+id2Visits;
}
private void checkConsistency(@NotNull ScheduledPass pass, Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits) {
for (ScheduledPass succ : ContainerUtil.concat(pass.mySuccessorsOnCompletion, pass.mySuccessorsOnSubmit)) {
Pair<ScheduledPass, Integer> succPair = id2Visits.get(succ);
if (succPair == null) {
succPair = Pair.create(succ, succ.myRunningPredecessorsCount.get());
id2Visits.put(succ, succPair);
}
int newPred = succPair.second - 1;
id2Visits.put(succ, Pair.create(succ, newPred));
assert newPred >= 0;
if (newPred == 0) {
checkConsistency(succ, id2Visits);
}
}
}
@NotNull
private ScheduledPass createScheduledPass(@NotNull FileEditor fileEditor,
@NotNull TextEditorHighlightingPass pass,
@NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted,
@NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass,
@NotNull List<ScheduledPass> freePasses,
@NotNull List<ScheduledPass> dependentPasses,
@NotNull DaemonProgressIndicator updateProgress,
@NotNull AtomicInteger threadsToStartCountdown) {
Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass = toBeSubmitted.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20));
Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20));
int passId = pass.getId();
ScheduledPass scheduledPass = thisEditorId2ScheduledPass.get(passId);
if (scheduledPass != null) return scheduledPass;
scheduledPass = new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown);
threadsToStartCountdown.incrementAndGet();
thisEditorId2ScheduledPass.put(passId, scheduledPass);
for (int predecessorId : pass.getCompletionPredecessorIds()) {
ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses,
updateProgress, threadsToStartCountdown, predecessorId,
thisEditorId2ScheduledPass, thisEditorId2Pass);
if (predecessor != null) {
predecessor.addSuccessorOnCompletion(scheduledPass);
}
}
for (int predecessorId : pass.getStartingPredecessorIds()) {
ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses,
updateProgress, threadsToStartCountdown, predecessorId,
thisEditorId2ScheduledPass, thisEditorId2Pass);
if (predecessor != null) {
predecessor.addSuccessorOnSubmit(scheduledPass);
}
}
if (scheduledPass.myRunningPredecessorsCount.get() == 0 && !freePasses.contains(scheduledPass)) {
freePasses.add(scheduledPass);
}
else if (!dependentPasses.contains(scheduledPass)) {
dependentPasses.add(scheduledPass);
}
if (pass.isRunIntentionPassAfter() && fileEditor instanceof TextEditor) {
Editor editor = ((TextEditor)fileEditor).getEditor();
VirtualFile virtualFile = fileEditor.getFile();
PsiFile psiFile = virtualFile == null ? null : ReadAction.compute(() -> PsiManager.getInstance(myProject).findFile(virtualFile));
if (psiFile != null) {
ShowIntentionsPass ip = new ShowIntentionsPass(psiFile, editor, false);
assignUniqueId(ip, thisEditorId2Pass);
ip.setCompletionPredecessorIds(new int[]{passId});
createScheduledPass(fileEditor, ip, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown);
}
}
return scheduledPass;
}
private ScheduledPass findOrCreatePredecessorPass(@NotNull FileEditor fileEditor,
@NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted,
@NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass,
@NotNull List<ScheduledPass> freePasses,
@NotNull List<ScheduledPass> dependentPasses,
@NotNull DaemonProgressIndicator updateProgress,
@NotNull AtomicInteger myThreadsToStartCountdown,
int predecessorId,
@NotNull Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass,
@NotNull Int2ObjectMap<? extends TextEditorHighlightingPass> thisEditorId2Pass) {
ScheduledPass predecessor = thisEditorId2ScheduledPass.get(predecessorId);
if (predecessor == null) {
TextEditorHighlightingPass textEditorPass = thisEditorId2Pass.get(predecessorId);
predecessor = textEditorPass == null ? null : createScheduledPass(fileEditor, textEditorPass, toBeSubmitted,
id2Pass, freePasses,
dependentPasses, updateProgress, myThreadsToStartCountdown);
}
return predecessor;
}
private void submit(@NotNull ScheduledPass pass) {
if (!pass.myUpdateProgress.isCanceled()) {
Job<Void> job = JobLauncher.getInstance().submitToJobThread(pass, future -> {
try {
if (!future.isCancelled()) { // for canceled task .get() generates CancellationException which is expensive
future.get();
}
}
catch (CancellationException | InterruptedException ignored) {
}
catch (ExecutionException e) {
LOG.error(e.getCause());
}
});
mySubmittedPasses.put(pass, job);
}
}
private final class ScheduledPass implements Runnable {
private final FileEditor myFileEditor;
private final HighlightingPass myPass;
private final AtomicInteger myThreadsToStartCountdown;
private final AtomicInteger myRunningPredecessorsCount = new AtomicInteger(0);
private final List<ScheduledPass> mySuccessorsOnCompletion = new ArrayList<>();
private final List<ScheduledPass> mySuccessorsOnSubmit = new ArrayList<>();
@NotNull private final DaemonProgressIndicator myUpdateProgress;
private ScheduledPass(@NotNull FileEditor fileEditor,
@NotNull HighlightingPass pass,
@NotNull DaemonProgressIndicator progressIndicator,
@NotNull AtomicInteger threadsToStartCountdown) {
myFileEditor = fileEditor;
myPass = pass;
myThreadsToStartCountdown = threadsToStartCountdown;
myUpdateProgress = progressIndicator;
}
@Override
public void run() {
((ApplicationImpl)ApplicationManager.getApplication()).executeByImpatientReader(() -> {
try {
doRun();
}
catch (ApplicationUtil.CannotRunReadActionException e) {
myUpdateProgress.cancel();
}
catch (RuntimeException | Error e) {
saveException(e, myUpdateProgress);
throw e;
}
});
}
private void doRun() {
if (myUpdateProgress.isCanceled()) return;
log(myUpdateProgress, myPass, "Started. ");
for (ScheduledPass successor : mySuccessorsOnSubmit) {
int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet();
if (predecessorsToRun == 0) {
submit(successor);
}
}
ProgressManager.getInstance().executeProcessUnderProgress(() -> {
boolean success = ApplicationManagerEx.getApplicationEx().tryRunReadAction(() -> {
try {
if (DumbService.getInstance(myProject).isDumb() && !DumbService.isDumbAware(myPass)) {
return;
}
if (!myUpdateProgress.isCanceled() && !myProject.isDisposed()) {
myPass.collectInformation(myUpdateProgress);
}
}
catch (ProcessCanceledException e) {
log(myUpdateProgress, myPass, "Canceled ");
if (!myUpdateProgress.isCanceled()) {
myUpdateProgress.cancel(e); //in case when some smart asses throw PCE just for fun
}
}
catch (RuntimeException | Error e) {
myUpdateProgress.cancel(e);
LOG.error(e);
throw e;
}
});
if (!success) {
myUpdateProgress.cancel();
}
}, myUpdateProgress);
log(myUpdateProgress, myPass, "Finished. ");
if (!myUpdateProgress.isCanceled()) {
applyInformationToEditorsLater(myFileEditor, myPass, myUpdateProgress, myThreadsToStartCountdown, ()->{
for (ScheduledPass successor : mySuccessorsOnCompletion) {
int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet();
if (predecessorsToRun == 0) {
submit(successor);
}
}
});
}
}
@NonNls
@Override
public String toString() {
return "SP: " + myPass;
}
private void addSuccessorOnCompletion(@NotNull ScheduledPass successor) {
mySuccessorsOnCompletion.add(successor);
successor.myRunningPredecessorsCount.incrementAndGet();
}
private void addSuccessorOnSubmit(@NotNull ScheduledPass successor) {
mySuccessorsOnSubmit.add(successor);
successor.myRunningPredecessorsCount.incrementAndGet();
}
}
private void applyInformationToEditorsLater(@NotNull FileEditor fileEditor,
@NotNull HighlightingPass pass,
@NotNull DaemonProgressIndicator updateProgress,
@NotNull AtomicInteger threadsToStartCountdown,
@NotNull Runnable callbackOnApplied) {
ApplicationManager.getApplication().invokeLater(() -> {
if (isDisposed() || !fileEditor.isValid()) {
updateProgress.cancel();
}
if (updateProgress.isCanceled()) {
log(updateProgress, pass, " is canceled during apply, sorry");
return;
}
try {
if (UIUtil.isShowing(fileEditor.getComponent())) {
pass.applyInformationToEditor();
repaintErrorStripeAndIcon(fileEditor);
if (pass instanceof TextEditorHighlightingPass) {
FileStatusMap fileStatusMap = DaemonCodeAnalyzerEx.getInstanceEx(myProject).getFileStatusMap();
Document document = ((TextEditorHighlightingPass)pass).getDocument();
int passId = ((TextEditorHighlightingPass)pass).getId();
fileStatusMap.markFileUpToDate(document, passId);
}
log(updateProgress, pass, " Applied");
}
}
catch (ProcessCanceledException e) {
log(updateProgress, pass, "Error " + e);
throw e;
}
catch (RuntimeException e) {
VirtualFile file = fileEditor.getFile();
FileType fileType = file == null ? null : file.getFileType();
String message = "Exception while applying information to " + fileEditor + "("+fileType+")";
log(updateProgress, pass, message + e);
throw new RuntimeException(message, e);
}
if (threadsToStartCountdown.decrementAndGet() == 0) {
HighlightingSessionImpl.waitForAllSessionsHighlightInfosApplied(updateProgress);
log(updateProgress, pass, "Stopping ");
updateProgress.stopIfRunning();
clearStaleEntries();
}
else {
log(updateProgress, pass, "Finished but there are passes in the queue: " + threadsToStartCountdown.get());
}
callbackOnApplied.run();
}, updateProgress.getModalityState(), pass.getExpiredCondition());
}
private void clearStaleEntries() {
mySubmittedPasses.keySet().removeIf(pass -> pass.myUpdateProgress.isCanceled());
}
private void repaintErrorStripeAndIcon(@NotNull FileEditor fileEditor) {
if (fileEditor instanceof TextEditor) {
DefaultHighlightInfoProcessor.repaintErrorStripeAndIcon(((TextEditor)fileEditor).getEditor(), myProject);
}
}
private boolean isDisposed() {
return isDisposed || myProject.isDisposed();
}
@NotNull
List<HighlightingPass> getAllSubmittedPasses() {
List<HighlightingPass> result = new ArrayList<>(mySubmittedPasses.size());
for (ScheduledPass scheduledPass : mySubmittedPasses.keySet()) {
if (!scheduledPass.myUpdateProgress.isCanceled()) {
result.add(scheduledPass.myPass);
}
}
return result;
}
private static void sortById(@NotNull List<? extends TextEditorHighlightingPass> result) {
ContainerUtil.quickSort(result, Comparator.comparingInt(TextEditorHighlightingPass::getId));
}
private static int getThreadNum() {
Matcher matcher = Pattern.compile("JobScheduler FJ pool (\\d*)/(\\d*)").matcher(Thread.currentThread().getName());
String num = matcher.matches() ? matcher.group(1) : null;
return StringUtil.parseInt(num, 0);
}
static void log(ProgressIndicator progressIndicator, HighlightingPass pass, @NonNls Object @NotNull ... info) {
if (LOG.isDebugEnabled()) {
Document document = pass instanceof TextEditorHighlightingPass ? ((TextEditorHighlightingPass)pass).getDocument() : null;
CharSequence docText = document == null ? "" : ": '" + StringUtil.first(document.getCharsSequence(), 10, true)+ "'";
synchronized (PassExecutorService.class) {
String infos = StringUtil.join(info, Functions.TO_STRING(), " ");
String message = StringUtil.repeatSymbol(' ', getThreadNum() * 4)
+ " " + pass + " "
+ infos
+ "; progress=" + (progressIndicator == null ? null : progressIndicator.hashCode())
+ " " + (progressIndicator == null ? "?" : progressIndicator.isCanceled() ? "X" : "V")
+ docText;
LOG.debug(message);
//System.out.println(message);
}
}
}
private static final Key<Throwable> THROWABLE_KEY = Key.create("THROWABLE_KEY");
static void saveException(@NotNull Throwable e, @NotNull DaemonProgressIndicator indicator) {
indicator.putUserDataIfAbsent(THROWABLE_KEY, e);
}
@TestOnly
static Throwable getSavedException(@NotNull DaemonProgressIndicator indicator) {
return indicator.getUserData(THROWABLE_KEY);
}
// return true if terminated
boolean waitFor(int millis) throws Throwable {
try {
for (Job<Void> job : mySubmittedPasses.values()) {
job.waitForCompletion(millis);
}
return true;
}
catch (TimeoutException ignored) {
return false;
}
catch (InterruptedException e) {
return true;
}
catch (ExecutionException e) {
throw e.getCause();
}
}
}
| jwren/intellij-community | platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/PassExecutorService.java | Java | apache-2.0 | 25,837 |
/*
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.hal.client.runtime.subsystem.elytron.wizardpassword;
public enum PasswordState {
CHOOSE_PASSWORD_TYPE,
CONFIGURATION,
REVIEW
}
| hpehl/hal.next | app/src/main/java/org/jboss/hal/client/runtime/subsystem/elytron/wizardpassword/PasswordState.java | Java | apache-2.0 | 787 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rave.portal.repository.impl;
import org.apache.rave.exception.NotSupportedException;
import org.apache.commons.lang3.StringUtils;
import org.apache.rave.exception.DataSerializationException;
import org.apache.rave.model.ApplicationData;
import org.apache.rave.portal.model.JpaApplicationData;
import org.apache.rave.portal.model.conversion.JpaApplicationDataConverter;
import org.apache.rave.portal.repository.ApplicationDataRepository;
import org.apache.rave.util.CollectionUtils;
import org.apache.rave.util.JsonUtils;
import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EntityManager;
import javax.persistence.Lob;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.apache.rave.persistence.jpa.util.JpaUtil.getSingleResult;
import static org.apache.rave.persistence.jpa.util.JpaUtil.saveOrUpdate;
@Repository
public class JpaApplicationDataRepository implements ApplicationDataRepository {
@PersistenceContext
private EntityManager manager;
@Autowired
private JpaApplicationDataConverter converter;
@Override
public Class<? extends ApplicationData> getType() {
return JpaApplicationData.class;
}
@Override
public ApplicationData get(String id) {
JpaSerializableApplicationData applicationData = (JpaSerializableApplicationData) manager.find(JpaApplicationData.class, Long.parseLong(id));
if (applicationData != null) {
applicationData.deserializeData();
}
return applicationData;
}
@Override
@Transactional
public JpaApplicationData save(ApplicationData item) {
JpaApplicationData jpaAppData = converter.convert(item);
JpaSerializableApplicationData jpaSerializableApplicationData = getJpaSerializableApplicationData(jpaAppData);
jpaSerializableApplicationData.serializeData();
return saveOrUpdate(jpaSerializableApplicationData.getEntityId(), manager, jpaSerializableApplicationData);
}
@Override
public void delete(ApplicationData item) {
manager.remove(item instanceof JpaApplicationData ? item : get(item.getId()));
}
@Override
public List<ApplicationData> getAll() {
throw new NotSupportedException("This function is not yet implemented for this class.");
}
@Override
public List<ApplicationData> getLimitedList(int offset, int limit) {
throw new NotSupportedException("This function is not yet implemented for this class.");
}
@Override
public int getCountAll() {
throw new NotSupportedException("This function is not yet implemented for this class.");
}
@Override
public List<ApplicationData> getApplicationData(List<String> userIds, String appId) {
//if the call is only looking for data for a single user use the more efficient single user variant transparently
if (userIds.size() == 1) {
List<ApplicationData> data = new ArrayList<ApplicationData>();
ApplicationData applicationData = getApplicationData(userIds.get(0), appId);
if (applicationData != null) {
data.add(applicationData);
}
return data;
}
TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_IDS_AND_APP_ID,
JpaSerializableApplicationData.class);
query.setParameter(JpaApplicationData.USER_IDS_PARAM, userIds);
query.setParameter(JpaApplicationData.APP_URL_PARAM, appId);
List<JpaSerializableApplicationData> results = query.getResultList();
for (JpaSerializableApplicationData applicationData : results) {
applicationData.deserializeData();
}
return CollectionUtils.<ApplicationData>toBaseTypedList(results);
}
@Override
public JpaApplicationData getApplicationData(String personId, String appId) {
TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_ID_AND_APP_ID,
JpaSerializableApplicationData.class);
query.setParameter(JpaApplicationData.USER_ID_PARAM, personId);
query.setParameter(JpaApplicationData.APP_URL_PARAM, appId);
JpaSerializableApplicationData applicationData = getSingleResult(query.getResultList());
if (applicationData != null) {
applicationData.deserializeData();
}
return applicationData;
}
private JpaSerializableApplicationData getJpaSerializableApplicationData(JpaApplicationData applicationData) {
if (applicationData instanceof JpaSerializableApplicationData) {
return (JpaSerializableApplicationData) applicationData;
}
return new JpaSerializableApplicationData(applicationData.getEntityId(), applicationData.getUserId(),
applicationData.getAppUrl(), applicationData.getData());
}
/**
* This class is here so that the details of the persistence strategy in use for serializing the appdata map to a
* JSON string doesnt end up being reflected in any public API of the ApplicationData object itself.
* <p/>
* This allows the public API of this repository to deal in clean ApplicationData models, but under the covers it
* uses this model for the actual persistence to the database.
*/
@Entity
public static class JpaSerializableApplicationData extends JpaApplicationData {
@Lob
@Column(name = "serialized_data")
private String serializedData;
public JpaSerializableApplicationData() {
super();
}
public JpaSerializableApplicationData(Long entityId, String userId, String appUrl, Map<String, Object> data) {
super(entityId, userId, appUrl, data);
}
public void serializeData() {
Map<String, Object> data = this.getData();
if (data != null) {
serializedData = JsonUtils.stringify(data);
}
}
@SuppressWarnings("unchecked")
public void deserializeData() {
if (serializedData != null && StringUtils.isNotBlank(serializedData)) {
this.setData(JsonUtils.parse(serializedData, Map.class));
}
}
}
}
| kidaa/rave | rave-components/rave-jpa/src/main/java/org/apache/rave/portal/repository/impl/JpaApplicationDataRepository.java | Java | apache-2.0 | 7,525 |
/*
* Copyright 2015 John Ahlroos
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.tabsheet;
import com.vaadin.shared.ui.tabsheet.TabsheetState;
import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DDLayoutState;
import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DragAndDropAwareState;
public class DDTabSheetState extends TabsheetState
implements DragAndDropAwareState {
public static final float DEFAULT_HORIZONTAL_DROP_RATIO = 0.2f;
public float tabLeftRightDropRatio = DEFAULT_HORIZONTAL_DROP_RATIO;
public DDLayoutState ddState = new DDLayoutState();
@Override
public DDLayoutState getDragAndDropState() {
return ddState;
}
}
| dimone-kun/cuba | modules/web-widgets/src/com/haulmont/cuba/web/widgets/client/addons/dragdroplayouts/ui/tabsheet/DDTabSheetState.java | Java | apache-2.0 | 1,315 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.fontbox.ttf;
import java.io.IOException;
/**
* A table in a true type font.
*
* @author Ben Litchfield
*/
public class HorizontalMetricsTable extends TTFTable
{
/**
* A tag that identifies this table type.
*/
public static final String TAG = "hmtx";
private int[] advanceWidth;
private short[] leftSideBearing;
private short[] nonHorizontalLeftSideBearing;
private int numHMetrics;
HorizontalMetricsTable(TrueTypeFont font)
{
super(font);
}
/**
* This will read the required data from the stream.
*
* @param ttf The font that is being read.
* @param data The stream to read the data from.
* @throws IOException If there is an error reading the data.
*/
@Override
void read(TrueTypeFont ttf, TTFDataStream data) throws IOException
{
HorizontalHeaderTable hHeader = ttf.getHorizontalHeader();
if (hHeader == null)
{
throw new IOException("Could not get hmtx table");
}
numHMetrics = hHeader.getNumberOfHMetrics();
int numGlyphs = ttf.getNumberOfGlyphs();
int bytesRead = 0;
advanceWidth = new int[ numHMetrics ];
leftSideBearing = new short[ numHMetrics ];
for( int i=0; i<numHMetrics; i++ )
{
advanceWidth[i] = data.readUnsignedShort();
leftSideBearing[i] = data.readSignedShort();
bytesRead += 4;
}
int numberNonHorizontal = numGlyphs - numHMetrics;
// handle bad fonts with too many hmetrics
if (numberNonHorizontal < 0)
{
numberNonHorizontal = numGlyphs;
}
// make sure that table is never null and correct size, even with bad fonts that have no
// "leftSideBearing" table although they should
nonHorizontalLeftSideBearing = new short[numberNonHorizontal];
if (bytesRead < getLength())
{
for( int i=0; i<numberNonHorizontal; i++ )
{
if (bytesRead < getLength())
{
nonHorizontalLeftSideBearing[i] = data.readSignedShort();
bytesRead += 2;
}
}
}
initialized = true;
}
/**
* Returns the advance width for the given GID.
*
* @param gid GID
*/
public int getAdvanceWidth(int gid)
{
if (advanceWidth.length == 0)
{
return 250;
}
if (gid < numHMetrics)
{
return advanceWidth[gid];
}
else
{
// monospaced fonts may not have a width for every glyph
// the last one is for subsequent glyphs
return advanceWidth[advanceWidth.length -1];
}
}
/**
* Returns the left side bearing for the given GID.
*
* @param gid GID
*/
public int getLeftSideBearing(int gid)
{
if (leftSideBearing.length == 0)
{
return 0;
}
if (gid < numHMetrics)
{
return leftSideBearing[gid];
}
else
{
return nonHorizontalLeftSideBearing[gid - numHMetrics];
}
}
}
| apache/pdfbox | fontbox/src/main/java/org/apache/fontbox/ttf/HorizontalMetricsTable.java | Java | apache-2.0 | 4,062 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.asterix.external.library.java.base;
import org.apache.asterix.external.api.IJObject;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.util.container.IObjectPool;
public abstract class JComplexObject<T> implements IJObject<T> {
protected IObjectPool<IJObject, IAType> pool;
public void setPool(IObjectPool<IJObject, IAType> pool) {
this.pool = pool;
}
}
| apache/incubator-asterixdb | asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/base/JComplexObject.java | Java | apache-2.0 | 1,245 |
package com.cloudhopper.commons.charset.demo;
/*
* #%L
* ch-commons-charset
* %%
* Copyright (C) 2012 Cloudhopper by Twitter
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.cloudhopper.commons.charset.CharsetUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author joelauer
*/
public class Charset5Main {
private static final Logger logger = LoggerFactory.getLogger(Charset5Main.class);
static public void main(String[] args) throws Exception {
String sourceString = "h\u6025\u20ACllo";
String targetString = CharsetUtil.normalize(sourceString, CharsetUtil.CHARSET_UTF_8);
logger.debug("source string: " + sourceString);
logger.debug("target string: " + targetString);
}
}
| twitter/cloudhopper-commons | ch-commons-charset/src/test/java/com/cloudhopper/commons/charset/demo/Charset5Main.java | Java | apache-2.0 | 1,291 |
package org.anddev.andengine.opengl.texture;
import java.util.*;
import org.anddev.andengine.opengl.texture.source.*;
import org.anddev.andengine.util.*;
import org.anddev.andengine.opengl.texture.builder.*;
import android.graphics.*;
public class BuildableTexture extends Texture
{
private final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace;
public BuildableTexture(final int n, final int n2) {
super(n, n2, TextureOptions.DEFAULT, null);
this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>();
}
public BuildableTexture(final int n, final int n2, final ITextureStateListener textureStateListener) {
super(n, n2, TextureOptions.DEFAULT, textureStateListener);
this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>();
}
public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions) throws IllegalArgumentException {
super(n, n2, textureOptions, null);
this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>();
}
public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions, final ITextureStateListener textureStateListener) throws IllegalArgumentException {
super(n, n2, textureOptions, textureStateListener);
this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>();
}
@Deprecated
@Override
public TextureSourceWithLocation addTextureSource(final ITextureSource textureSource, final int n, final int n2) {
return super.addTextureSource(textureSource, n, n2);
}
public void addTextureSource(final ITextureSource textureSource, final Callback<TextureSourceWithLocation> callback) {
this.mTextureSourcesToPlace.add(new TextureSourceWithWithLocationCallback(textureSource, callback));
}
public void build(final ITextureBuilder textureBuilder) throws ITextureBuilder.TextureSourcePackingException {
textureBuilder.pack(this, this.mTextureSourcesToPlace);
this.mTextureSourcesToPlace.clear();
this.mUpdateOnHardwareNeeded = true;
}
@Override
public void clearTextureSources() {
super.clearTextureSources();
this.mTextureSourcesToPlace.clear();
}
public void removeTextureSource(final ITextureSource textureSource) {
final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace = this.mTextureSourcesToPlace;
for (int i = -1 + mTextureSourcesToPlace.size(); i >= 0; --i) {
if (mTextureSourcesToPlace.get(i).mTextureSource == textureSource) {
mTextureSourcesToPlace.remove(i);
this.mUpdateOnHardwareNeeded = true;
return;
}
}
}
public static class TextureSourceWithWithLocationCallback implements ITextureSource
{
private final Callback<TextureSourceWithLocation> mCallback;
private final ITextureSource mTextureSource;
public TextureSourceWithWithLocationCallback(final ITextureSource mTextureSource, final Callback<TextureSourceWithLocation> mCallback) {
super();
this.mTextureSource = mTextureSource;
this.mCallback = mCallback;
}
@Override
public TextureSourceWithWithLocationCallback clone() {
return null;
}
public Callback<TextureSourceWithLocation> getCallback() {
return this.mCallback;
}
@Override
public int getHeight() {
return this.mTextureSource.getHeight();
}
public ITextureSource getTextureSource() {
return this.mTextureSource;
}
@Override
public int getWidth() {
return this.mTextureSource.getWidth();
}
@Override
public Bitmap onLoadBitmap() {
return this.mTextureSource.onLoadBitmap();
}
@Override
public String toString() {
return this.mTextureSource.toString();
}
}
}
| rLadia/AttacknidPatch | decompiled_src/Procyon/org/anddev/andengine/opengl/texture/BuildableTexture.java | Java | apache-2.0 | 4,239 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.dmn.engine.impl.parser;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import org.activiti.dmn.engine.ActivitiDmnException;
import org.activiti.dmn.engine.DmnEngineConfiguration;
import org.activiti.dmn.engine.impl.context.Context;
import org.activiti.dmn.engine.impl.io.InputStreamSource;
import org.activiti.dmn.engine.impl.io.ResourceStreamSource;
import org.activiti.dmn.engine.impl.io.StreamSource;
import org.activiti.dmn.engine.impl.io.StringStreamSource;
import org.activiti.dmn.engine.impl.io.UrlStreamSource;
import org.activiti.dmn.engine.impl.persistence.entity.DecisionTableEntity;
import org.activiti.dmn.engine.impl.persistence.entity.DmnDeploymentEntity;
import org.activiti.dmn.model.Decision;
import org.activiti.dmn.model.DmnDefinition;
import org.activiti.dmn.xml.constants.DmnXMLConstants;
import org.activiti.dmn.xml.converter.DmnXMLConverter;
import org.activiti.dmn.xml.exception.DmnXMLException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Specific parsing of one BPMN 2.0 XML file, created by the {@link DmnParse}.
*
* @author Tijs Rademakers
* @author Joram Barrez
*/
public class DmnParse implements DmnXMLConstants {
protected static final Logger LOGGER = LoggerFactory.getLogger(DmnParse.class);
protected String name;
protected boolean validateSchema = true;
protected StreamSource streamSource;
protected String sourceSystemId;
protected DmnDefinition dmnDefinition;
protected String targetNamespace;
/** The deployment to which the parsed decision tables will be added. */
protected DmnDeploymentEntity deployment;
/** The end result of the parsing: a list of decision tables. */
protected List<DecisionTableEntity> decisionTables = new ArrayList<DecisionTableEntity>();
public DmnParse deployment(DmnDeploymentEntity deployment) {
this.deployment = deployment;
return this;
}
public DmnParse execute(DmnEngineConfiguration dmnEngineConfig) {
try {
DmnXMLConverter converter = new DmnXMLConverter();
boolean enableSafeDmnXml = dmnEngineConfig.isEnableSafeDmnXml();
String encoding = dmnEngineConfig.getXmlEncoding();
if (encoding != null) {
dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml, encoding);
} else {
dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml);
}
if (dmnDefinition != null && dmnDefinition.getDecisions() != null) {
for (Decision decision : dmnDefinition.getDecisions()) {
DecisionTableEntity decisionTableEntity = Context.getDmnEngineConfiguration().getDecisionTableEntityManager().create();
decisionTableEntity.setKey(decision.getId());
decisionTableEntity.setName(decision.getName());
decisionTableEntity.setResourceName(name);
decisionTableEntity.setDeploymentId(deployment.getId());
decisionTableEntity.setParentDeploymentId(deployment.getParentDeploymentId());
decisionTableEntity.setDescription(decision.getDescription());
decisionTables.add(decisionTableEntity);
}
}
} catch (Exception e) {
if (e instanceof ActivitiDmnException) {
throw (ActivitiDmnException) e;
} else if (e instanceof DmnXMLException) {
throw (DmnXMLException) e;
} else {
throw new ActivitiDmnException("Error parsing XML", e);
}
}
return this;
}
public DmnParse name(String name) {
this.name = name;
return this;
}
public DmnParse sourceInputStream(InputStream inputStream) {
if (name == null) {
name("inputStream");
}
setStreamSource(new InputStreamSource(inputStream));
return this;
}
public DmnParse sourceUrl(URL url) {
if (name == null) {
name(url.toString());
}
setStreamSource(new UrlStreamSource(url));
return this;
}
public DmnParse sourceUrl(String url) {
try {
return sourceUrl(new URL(url));
} catch (MalformedURLException e) {
throw new ActivitiDmnException("malformed url: " + url, e);
}
}
public DmnParse sourceResource(String resource) {
if (name == null) {
name(resource);
}
setStreamSource(new ResourceStreamSource(resource));
return this;
}
public DmnParse sourceString(String string) {
if (name == null) {
name("string");
}
setStreamSource(new StringStreamSource(string));
return this;
}
protected void setStreamSource(StreamSource streamSource) {
if (this.streamSource != null) {
throw new ActivitiDmnException("invalid: multiple sources " + this.streamSource + " and " + streamSource);
}
this.streamSource = streamSource;
}
public String getSourceSystemId() {
return sourceSystemId;
}
public DmnParse setSourceSystemId(String sourceSystemId) {
this.sourceSystemId = sourceSystemId;
return this;
}
/*
* ------------------- GETTERS AND SETTERS -------------------
*/
public boolean isValidateSchema() {
return validateSchema;
}
public void setValidateSchema(boolean validateSchema) {
this.validateSchema = validateSchema;
}
public List<DecisionTableEntity> getDecisionTables() {
return decisionTables;
}
public String getTargetNamespace() {
return targetNamespace;
}
public DmnDeploymentEntity getDeployment() {
return deployment;
}
public void setDeployment(DmnDeploymentEntity deployment) {
this.deployment = deployment;
}
public DmnDefinition getDmnDefinition() {
return dmnDefinition;
}
public void setDmnDefinition(DmnDefinition dmnDefinition) {
this.dmnDefinition = dmnDefinition;
}
}
| stefan-ziel/Activiti | modules/activiti-dmn-engine/src/main/java/org/activiti/dmn/engine/impl/parser/DmnParse.java | Java | apache-2.0 | 6,382 |
/*
Derby - Class org.apache.derbyBuild.MessageVetter
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyBuild;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Pattern;
/**
* Class that checks the message files for common problems.
*/
public class MessageVetter {
/**
* <p>
* Check all the message translations in the specified directories for
* common problems. Assume that all properties files in the directories
* are message translations.
* </p>
*
* <p>
* If a problem is found, an error will be raised.
* </p>
*
* @param args names of the directories to check
*/
public static void main(String[] args) throws IOException {
FileFilter filter = new FileFilter() {
public boolean accept(File pathname) {
return pathname.getName().endsWith(".properties");
}
};
for (String directory : args) {
for (File file : new File(directory).listFiles(filter)) {
new MessageVetter(file).vet();
}
}
}
/**
* A regular expression that matches a single-quote character that is
* neither preceeded nor followed by another single-quote character. Used
* by {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to
* verify that messages contain two single-quotes in order to produce a
* single apostrophe (dictated by {@code java.text.MessageFormat}).
*/
private static final Pattern LONE_QUOTE_PATTERN =
Pattern.compile("^'[^']|[^']'[^']|[^']'$");
/**
* A regular expression that matches a single-quote character that have
* no adjacent single-quote or curly brace character. Used by
* {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to
* verify that all single-quotes are either correctly formatted apostrophes
* or used for quoting curly braces, as required by
* {@code java.text.MessageFormat}.
*/
private static final Pattern LONE_QUOTE_ALLOWED_PATTERN =
Pattern.compile("^'[^'{}]|[^'{}]'[^'{}]|[^'{}]'$");
/**
* A set of message identifiers in whose messages single-quotes may legally
* appear with no adjacent single-quote character. This will be messages
* where the single-quotes are needed to quote curly braces that should
* appear literally in the message text.
*/
private static final Set<String> LONE_QUOTE_ALLOWED = new HashSet<String>();
static {
// The IJ help text contains curly braces that need quoting.
LONE_QUOTE_ALLOWED.add("IJ_HelpText");
// Some of the DRDA usage messages contain the text {on|off}, which
// needs quoting.
LONE_QUOTE_ALLOWED.add("DRDA_Usage8.I");
LONE_QUOTE_ALLOWED.add("DRDA_Usage11.I");
LONE_QUOTE_ALLOWED.add("PE_HelpText");
}
/** The message file to check. */
private final File file;
/** The properties found in the message file. */
private final Properties properties;
/**
* Create a new {@code MessageVetter} instance.
*
* @param file the file with the messages to check
* @throws IOException if the file cannot be loaded
*/
private MessageVetter(File file) throws IOException {
this.file = file;
properties = new Properties();
FileInputStream in = new FileInputStream(file);
try {
properties.load(in);
} finally {
in.close();
}
}
/**
* Vet the messages in this file. An error will be raised if an
* ill-formatted message is found.
*/
private void vet() {
Enumeration e = properties.propertyNames();
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
String message = properties.getProperty(key);
vetMessage(key, message);
}
}
/**
* Vet a specific message. Raise an error if it is not well-formed.
*
* @param key the message identifier
* @param message the message format specifier
*/
private void vetMessage(String key, String message) {
checkSingleQuotes(key, message);
checkValidMessageFormat(key, message);
}
/**
* Check that single-quote characters are doubled, as required by
* {@code java.text.MessageFormat}. Raise an error otherwise.
*
* @param key the message identifier
* @param message the message format specifier
*/
private void checkSingleQuotes(String key, String message) {
Pattern p;
if (LONE_QUOTE_ALLOWED.contains(key)) {
// In some messages we allow lone single-quote characters, but
// only if they are used to quote curly braces. Use a regular
// expression that finds all single-quotes that aren't adjacent to
// another single-quote or a curly brace character.
p = LONE_QUOTE_ALLOWED_PATTERN;
} else {
// Otherwise, we don't allow lone single-quote characters at all.
p = LONE_QUOTE_PATTERN;
}
if (p.matcher(message).find()) {
throw new AssertionError("Lone single-quote in message " + key +
" in " + file + ".\nThis is OK if it is used for quoting " +
"special characters in the message. If this is what the " +
"character is used for, add an exception in " +
getClass().getName() + ".LONE_QUOTE_ALLOWED.");
}
}
/**
* Check that a message format specifier is valid. Raise an error if it
* is not.
*
* @param key the message identifier
* @param message the message format specifier
*/
private void checkValidMessageFormat(String key, String message) {
try {
// See if a MessageFormat instance can be produced based on this
// message format specifier.
new MessageFormat(message);
} catch (Exception e) {
AssertionError ae = new AssertionError(
"Message " + key + " in " + file + " isn't a valid " +
"java.text.MessageFormat pattern.");
ae.initCause(e);
throw ae;
}
}
}
| apache/derby | java/build/org/apache/derbyBuild/MessageVetter.java | Java | apache-2.0 | 7,276 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.http.parser;
import java.io.IOException;
import java.io.StringReader;
import org.apache.tomcat.util.collections.ConcurrentCache;
/**
* Caches the results of parsing content-type headers.
*/
public class MediaTypeCache {
private final ConcurrentCache<String,String[]> cache;
public MediaTypeCache(int size) {
cache = new ConcurrentCache<>(size);
}
/**
* Looks in the cache and returns the cached value if one is present. If no
* match exists in the cache, a new parser is created, the input parsed and
* the results placed in the cache and returned to the user.
*
* @param input The content-type header value to parse
* @return The results are provided as a two element String array. The
* first element is the media type less the charset and
* the second element is the charset
*/
public String[] parse(String input) {
String[] result = cache.get(input);
if (result != null) {
return result;
}
MediaType m = null;
try {
m = MediaType.parseMediaType(new StringReader(input));
} catch (IOException e) {
// Ignore - return null
}
if (m != null) {
result = new String[] {m.toStringNoCharset(), m.getCharset()};
cache.put(input, result);
}
return result;
}
}
| IAMTJW/Tomcat-8.5.20 | tomcat-8.5.20/java/org/apache/tomcat/util/http/parser/MediaTypeCache.java | Java | apache-2.0 | 2,331 |
package io.agrest.it.fixture.cayenne;
import io.agrest.it.fixture.cayenne.auto._E15E1;
public class E15E1 extends _E15E1 {
private static final long serialVersionUID = 1L;
}
| AbleOne/link-rest | agrest/src/test/java/io/agrest/it/fixture/cayenne/E15E1.java | Java | apache-2.0 | 183 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2019 the original author or authors.
*/
package org.assertj.core.error;
import static java.lang.String.format;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveDeclaredFields;
import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveFields;
import static org.assertj.core.util.Sets.newLinkedHashSet;
import java.util.LinkedHashSet;
import org.assertj.core.description.Description;
import org.assertj.core.description.TextDescription;
import org.assertj.core.presentation.Representation;
import org.assertj.core.presentation.StandardRepresentation;
import org.assertj.core.test.Player;
import org.assertj.core.util.Sets;
import org.junit.jupiter.api.Test;
/**
* Tests for
* <code>{@link ShouldOnlyHaveFields#create(Description, Representation)}</code>
*
* @author Filip Hrisafov
*/
public class ShouldOnlyHaveFields_create_Test {
private static final LinkedHashSet<String> EMPTY_STRING_SET = Sets.<String> newLinkedHashSet();
@Test
public void should_create_error_message_for_fields() {
ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class,
newLinkedHashSet("name", "team"),
newLinkedHashSet("nickname"),
newLinkedHashSet("address"));
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following public accessible fields:%n" +
" <[\"name\", \"team\"]>%n" +
"fields not found:%n" +
" <[\"nickname\"]>%n" +
"and fields not expected:%n" +
" <[\"address\"]>"));
}
@Test
public void should_not_display_unexpected_fields_when_there_are_none_for_fields() {
ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class,
newLinkedHashSet("name", "team"),
newLinkedHashSet("nickname"),
EMPTY_STRING_SET);
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following public accessible fields:%n" +
" <[\"name\", \"team\"]>%n" +
"but could not find the following fields:%n" +
" <[\"nickname\"]>"));
}
@Test
public void should_not_display_fields_not_found_when_there_are_none_for_fields() {
ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class,
newLinkedHashSet("name", "team"),
EMPTY_STRING_SET,
newLinkedHashSet("address"));
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following public accessible fields:%n" +
" <[\"name\", \"team\"]>%n" +
"but the following fields were unexpected:%n" +
" <[\"address\"]>"));
}
@Test
public void should_create_error_message_for_declared_fields() {
ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class,
newLinkedHashSet("name", "team"),
newLinkedHashSet("nickname"),
newLinkedHashSet("address"));
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following declared fields:%n" +
" <[\"name\", \"team\"]>%n" +
"fields not found:%n" +
" <[\"nickname\"]>%n" +
"and fields not expected:%n" +
" <[\"address\"]>"));
}
@Test
public void should_not_display_unexpected_fields_when_there_are_none_for_declared_fields() {
ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class,
newLinkedHashSet("name", "team"),
newLinkedHashSet("nickname"),
EMPTY_STRING_SET);
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following declared fields:%n" +
" <[\"name\", \"team\"]>%n" +
"but could not find the following fields:%n" +
" <[\"nickname\"]>"));
}
@Test
public void should_not_display_fields_not_found_when_there_are_none_for_declared_fields() {
ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class,
newLinkedHashSet("name", "team"),
EMPTY_STRING_SET,
newLinkedHashSet("address"));
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(String.format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following declared fields:%n" +
" <[\"name\", \"team\"]>%n" +
"but the following fields were unexpected:%n" +
" <[\"address\"]>"));
}
}
| xasx/assertj-core | src/test/java/org/assertj/core/error/ShouldOnlyHaveFields_create_Test.java | Java | apache-2.0 | 8,163 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.diff.impl.settings;
import com.intellij.icons.AllIcons;
import com.intellij.idea.ActionsBundle;
import com.intellij.openapi.actionSystem.ActionGroup;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.ToggleAction;
import com.intellij.openapi.editor.Editor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
/**
* The "gear" action allowing to configure merge tool visual preferences, such as displaying whitespaces, line numbers and soft wraps.
*
* @see DiffMergeSettings
*/
public class DiffMergeSettingsAction extends ActionGroup {
@NotNull private final Collection<Editor> myEditors;
@NotNull private final DiffMergeSettings mySettings;
public DiffMergeSettingsAction(@NotNull Collection<Editor> editors, @NotNull DiffMergeSettings settings) {
super("Settings", null, AllIcons.General.GearPlain);
setPopup(true);
myEditors = editors;
mySettings = settings;
}
@NotNull
@Override
public AnAction[] getChildren(@Nullable AnActionEvent e) {
return new AnAction[] {
new DiffMergeToggleAction("EditorToggleShowWhitespaces", DiffMergeEditorSetting.WHITESPACES, myEditors, mySettings),
new DiffMergeToggleAction("EditorToggleShowLineNumbers", DiffMergeEditorSetting.LINE_NUMBERS, myEditors, mySettings),
new DiffMergeToggleAction("EditorToggleShowIndentLines", DiffMergeEditorSetting.INDENT_LINES, myEditors, mySettings),
new DiffMergeToggleAction("EditorToggleUseSoftWraps", DiffMergeEditorSetting.SOFT_WRAPS, myEditors, mySettings)
};
}
private static class DiffMergeToggleAction extends ToggleAction {
@NotNull private final DiffMergeEditorSetting mySetting;
@NotNull private final Collection<Editor> myEditors;
@NotNull private final DiffMergeSettings mySettings;
private DiffMergeToggleAction(@NotNull String actionId, @NotNull DiffMergeEditorSetting setting, @NotNull Collection<Editor> editors,
@NotNull DiffMergeSettings settings) {
super(ActionsBundle.actionText(actionId), ActionsBundle.actionDescription(actionId), null);
mySetting = setting;
myEditors = editors;
mySettings = settings;
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return getPreference(mySetting);
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
setPreference(mySetting, state);
for (Editor editor : myEditors) {
mySetting.apply(editor, state);
}
}
private void setPreference(DiffMergeEditorSetting preference, boolean state) {
mySettings.setPreference(preference, state);
}
private boolean getPreference(DiffMergeEditorSetting preference) {
return mySettings.getPreference(preference);
}
}
}
| goodwinnk/intellij-community | platform/platform-impl/src/com/intellij/openapi/diff/impl/settings/DiffMergeSettingsAction.java | Java | apache-2.0 | 3,098 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2019 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.servlet.test.response.cookies;
import java.util.Arrays;
import java.util.Comparator;
import javax.servlet.ServletException;
import io.undertow.servlet.api.ServletInfo;
import io.undertow.servlet.test.util.DeploymentUtils;
import io.undertow.testutils.DefaultServer;
import io.undertow.testutils.HttpClientUtils;
import io.undertow.testutils.TestHttpClient;
import io.undertow.util.StatusCodes;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Test for response.addCookie
*
* @author Flavia Rainone
*/
@RunWith(DefaultServer.class)
public class ResponseCookiesTestCase {
@BeforeClass
public static void setup() throws ServletException {
DeploymentUtils.setupServlet(
new ServletInfo("add-cookies", AddCookiesServlet.class)
.addMapping("/add-cookies"),
new ServletInfo("duplicate-cookies", DuplicateCookiesServlet.class)
.addMapping("/duplicate-cookies"),
new ServletInfo("overwrite-cookies", OverwriteCookiesServlet.class)
.addMapping("/overwrite-cookies"),
new ServletInfo("jsessionid-cookies", JSessionIDCookiesServlet.class)
.addMapping("/jsessionid-cookies"));
}
@Test
public void addCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/add-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(2, setCookieHeaders.length);
assertEquals("test1=test1; path=/test", setCookieHeaders[0].getValue());
assertEquals("test2=test2", setCookieHeaders[1].getValue());
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void duplicateCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/duplicate-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(7, setCookieHeaders.length);
Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString));
assertEquals("test1=test1; path=/test1_1", setCookieHeaders[0].getValue());
assertEquals("test1=test1; path=/test1_2", setCookieHeaders[1].getValue());
assertEquals("test2=test2; path=/test2", setCookieHeaders[2].getValue());
assertEquals("test2=test2; path=/test2; domain=www.domain2.com", setCookieHeaders[3].getValue());
assertEquals("test3=test3", setCookieHeaders[4].getValue());
assertEquals("test3=test3; domain=www.domain3-1.com", setCookieHeaders[5].getValue());
assertEquals("test3=test3; domain=www.domain3-2.com", setCookieHeaders[6].getValue());
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void overwriteCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/overwrite-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(5, setCookieHeaders.length);
Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString));
assertTrue("Header " + setCookieHeaders[0] + "didn't match expected regex",
setCookieHeaders[0].getValue().matches("JSESSIONID=.*; path=/servletContext"));
assertEquals("test=test10; domain=www.domain.com", setCookieHeaders[1].getValue());
assertEquals("test=test2; path=/test", setCookieHeaders[2].getValue());
assertEquals("test=test5", setCookieHeaders[3].getValue());
assertEquals("test=test8; path=/test; domain=www.domain.com", setCookieHeaders[4].getValue());
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void jsessionIdCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/jsessionid-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(3, setCookieHeaders.length);
assertTrue("Header " + setCookieHeaders[0] + "didn't start with expected prefix",
setCookieHeaders[0].getValue().startsWith("JSESSIONID=_bug_fix; path=/path3; Max-Age=500; Expires="));
assertTrue("Header " + setCookieHeaders[1] + "didn't start with expected prefix",
setCookieHeaders[1].getValue().startsWith("JSESSIONID=_bug_fix; path=/path4; Max-Age=1000; Expires="));
assertTrue("Header " + setCookieHeaders[2] + "didn't match expected regex",
setCookieHeaders[2].getValue().matches("JSESSIONID=.*; path=/servletContext"));
} finally {
client.getConnectionManager().shutdown();
}
}
}
| stuartwdouglas/undertow | servlet/src/test/java/io/undertow/servlet/test/response/cookies/ResponseCookiesTestCase.java | Java | apache-2.0 | 7,421 |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.grid.node.local;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.junit.Test;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.ImmutableCapabilities;
import org.openqa.selenium.events.local.GuavaEventBus;
import org.openqa.selenium.grid.data.CreateSessionRequest;
import org.openqa.selenium.grid.data.CreateSessionResponse;
import org.openqa.selenium.grid.data.Session;
import org.openqa.selenium.grid.node.Node;
import org.openqa.selenium.grid.testing.TestSessionFactory;
import org.openqa.selenium.json.Json;
import org.openqa.selenium.remote.ErrorCodes;
import org.openqa.selenium.remote.http.HttpRequest;
import org.openqa.selenium.remote.tracing.DefaultTestTracer;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Map;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.openqa.selenium.json.Json.MAP_TYPE;
import static org.openqa.selenium.remote.Dialect.OSS;
import static org.openqa.selenium.remote.Dialect.W3C;
import static org.openqa.selenium.remote.http.Contents.utf8String;
import static org.openqa.selenium.remote.http.HttpMethod.POST;
public class CreateSessionTest {
private final Json json = new Json();
private final Capabilities stereotype = new ImmutableCapabilities("cheese", "brie");
@Test
public void shouldAcceptAW3CPayload() throws URISyntaxException {
String payload = json.toJson(ImmutableMap.of(
"capabilities", ImmutableMap.of(
"alwaysMatch", ImmutableMap.of("cheese", "brie"))));
HttpRequest request = new HttpRequest(POST, "/session");
request.setContent(utf8String(payload));
URI uri = new URI("http://example.com");
Node node = LocalNode.builder(
DefaultTestTracer.createTracer(),
new GuavaEventBus(),
uri,
uri,
null)
.add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps)))
.build();
CreateSessionResponse sessionResponse = node.newSession(
new CreateSessionRequest(
ImmutableSet.of(W3C),
stereotype,
ImmutableMap.of()))
.orElseThrow(() -> new AssertionError("Unable to create session"));
Map<String, Object> all = json.toType(
new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8),
MAP_TYPE);
// Ensure that there's no status field (as this is used by the protocol handshake to determine
// whether the session is using the JWP or the W3C dialect.
assertThat(all.containsKey("status")).isFalse();
// Now check the fields required by the spec
Map<?, ?> value = (Map<?, ?>) all.get("value");
assertThat(value.get("sessionId")).isInstanceOf(String.class);
assertThat(value.get("capabilities")).isInstanceOf(Map.class);
}
@Test
public void shouldOnlyAcceptAJWPPayloadIfConfiguredTo() {
// TODO: implement shouldOnlyAcceptAJWPPayloadIfConfiguredTo test
}
@Test
public void ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured() {
// TODO: implement ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured test
}
@Test
public void ifOnlyJWPPayloadSentResponseShouldBeJWPOnlyIfJWPConfigured()
throws URISyntaxException {
String payload = json.toJson(ImmutableMap.of(
"desiredCapabilities", ImmutableMap.of("cheese", "brie")));
HttpRequest request = new HttpRequest(POST, "/session");
request.setContent(utf8String(payload));
URI uri = new URI("http://example.com");
Node node = LocalNode.builder(
DefaultTestTracer.createTracer(),
new GuavaEventBus(),
uri,
uri,
null)
.add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps)))
.build();
CreateSessionResponse sessionResponse = node.newSession(
new CreateSessionRequest(
ImmutableSet.of(OSS),
stereotype,
ImmutableMap.of()))
.orElseThrow(() -> new AssertionError("Unable to create session"));
Map<String, Object> all = json.toType(
new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8),
MAP_TYPE);
// The status field is used by local ends to determine whether or not the session is a JWP one.
assertThat(all.get("status")).matches(obj -> ((Number) obj).intValue() == ErrorCodes.SUCCESS);
// The session id is a top level field
assertThat(all.get("sessionId")).isInstanceOf(String.class);
// And the value should contain the capabilities.
assertThat(all.get("value")).isInstanceOf(Map.class);
}
@Test
public void shouldPreferUsingTheW3CProtocol() throws URISyntaxException {
String payload = json.toJson(ImmutableMap.of(
"desiredCapabilities", ImmutableMap.of(
"cheese", "brie"),
"capabilities", ImmutableMap.of(
"alwaysMatch", ImmutableMap.of("cheese", "brie"))));
HttpRequest request = new HttpRequest(POST, "/session");
request.setContent(utf8String(payload));
URI uri = new URI("http://example.com");
Node node = LocalNode.builder(
DefaultTestTracer.createTracer(),
new GuavaEventBus(),
uri,
uri,
null)
.add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps)))
.build();
CreateSessionResponse sessionResponse = node.newSession(
new CreateSessionRequest(
ImmutableSet.of(W3C),
stereotype,
ImmutableMap.of()))
.orElseThrow(() -> new AssertionError("Unable to create session"));
Map<String, Object> all = json.toType(
new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8),
MAP_TYPE);
// Ensure that there's no status field (as this is used by the protocol handshake to determine
// whether the session is using the JWP or the W3C dialect.
assertThat(all.containsKey("status")).isFalse();
// Now check the fields required by the spec
Map<?, ?> value = (Map<?, ?>) all.get("value");
assertThat(value.get("sessionId")).isInstanceOf(String.class);
assertThat(value.get("capabilities")).isInstanceOf(Map.class);
}
@Test
public void sessionDataShouldBeCorrectRegardlessOfPayloadProtocol() {
// TODO: implement sessionDataShouldBeCorrectRegardlessOfPayloadProtocol test
}
@Test
public void shouldSupportProtocolConversion() {
// TODO: implement shouldSupportProtocolConversion test
}
}
| asolntsev/selenium | java/server/test/org/openqa/selenium/grid/node/local/CreateSessionTest.java | Java | apache-2.0 | 7,428 |
package frc.team5333.lib;
import java.util.HashMap;
/**
* A static class that contains all kinds of Launch data for the robot,
* such as network ports, current state and more
*
* @author Jaci
*/
public class RobotData {
/**
* A blackboard containing objects that are common throughout the
* program, along with their String Identifier
*/
public static HashMap<String, Object> blackboard = new HashMap<String, Object>();
}
| FRC-Team5333/2015-RecycleRush | FRC2015/src/main/java/frc/team5333/lib/RobotData.java | Java | apache-2.0 | 455 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.openshift.build_configs;
import java.util.Map;
import io.fabric8.kubernetes.client.Watch;
import io.fabric8.kubernetes.client.dsl.FilterWatchListMultiDeletable;
import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation;
import io.fabric8.openshift.api.model.Build;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.BuildConfigList;
import io.fabric8.openshift.api.model.DoneableBuildConfig;
import io.fabric8.openshift.client.OpenShiftClient;
import io.fabric8.openshift.client.dsl.BuildConfigResource;
import org.apache.camel.Exchange;
import org.apache.camel.component.kubernetes.AbstractKubernetesEndpoint;
import org.apache.camel.component.kubernetes.KubernetesConstants;
import org.apache.camel.component.kubernetes.KubernetesOperations;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.support.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OpenshiftBuildConfigsProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(OpenshiftBuildConfigsProducer.class);
public OpenshiftBuildConfigsProducer(AbstractKubernetesEndpoint endpoint) {
super(endpoint);
}
@Override
public AbstractKubernetesEndpoint getEndpoint() {
return (AbstractKubernetesEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) throws Exception {
String operation;
if (ObjectHelper.isEmpty(getEndpoint().getKubernetesConfiguration().getOperation())) {
operation = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_OPERATION, String.class);
} else {
operation = getEndpoint().getKubernetesConfiguration().getOperation();
}
switch (operation) {
case KubernetesOperations.LIST_BUILD_CONFIGS:
doList(exchange, operation);
break;
case KubernetesOperations.LIST_BUILD_CONFIGS_BY_LABELS_OPERATION:
doListBuildConfigsByLabels(exchange, operation);
break;
case KubernetesOperations.GET_BUILD_CONFIG_OPERATION:
doGetBuildConfig(exchange, operation);
break;
default:
throw new IllegalArgumentException("Unsupported operation " + operation);
}
}
protected void doList(Exchange exchange, String operation) throws Exception {
BuildConfigList buildConfigsList
= getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace().list();
exchange.getOut().setBody(buildConfigsList.getItems());
}
protected void doListBuildConfigsByLabels(Exchange exchange, String operation) throws Exception {
BuildConfigList buildConfigsList = null;
Map<String, String> labels = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIGS_LABELS, Map.class);
String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (!ObjectHelper.isEmpty(namespaceName)) {
NonNamespaceOperation<BuildConfig, BuildConfigList, DoneableBuildConfig, BuildConfigResource<BuildConfig, DoneableBuildConfig, Void, Build>> buildConfigs;
buildConfigs = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs()
.inNamespace(namespaceName);
for (Map.Entry<String, String> entry : labels.entrySet()) {
buildConfigs.withLabel(entry.getKey(), entry.getValue());
}
buildConfigsList = buildConfigs.list();
} else {
FilterWatchListMultiDeletable<BuildConfig, BuildConfigList, Boolean, Watch> buildConfigs
= getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace();
for (Map.Entry<String, String> entry : labels.entrySet()) {
buildConfigs.withLabel(entry.getKey(), entry.getValue());
}
buildConfigsList = buildConfigs.list();
}
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(buildConfigsList.getItems());
}
protected void doGetBuildConfig(Exchange exchange, String operation) throws Exception {
BuildConfig buildConfig = null;
String buildConfigName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIG_NAME, String.class);
String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (ObjectHelper.isEmpty(buildConfigName)) {
LOG.error("Get a specific Build Config require specify a Build Config name");
throw new IllegalArgumentException("Get a specific Build Config require specify a Build Config name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Get a specific Build Config require specify a namespace name");
throw new IllegalArgumentException("Get a specific Build Config require specify a namespace name");
}
buildConfig = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inNamespace(namespaceName)
.withName(buildConfigName).get();
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(buildConfig);
}
}
| nicolaferraro/camel | components/camel-kubernetes/src/main/java/org/apache/camel/component/openshift/build_configs/OpenshiftBuildConfigsProducer.java | Java | apache-2.0 | 6,392 |
/*******************************************************************************
* Copyright 2015 Software Evolution and Architecture Lab, University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package CloudWave;
public enum CloudWaveJNI {
instance;
public static final String CLOUDWAVE_LIB = "cloudwavejni";
CloudWaveJNI() {System.loadLibrary(CLOUDWAVE_LIB);}
public static CloudWaveJNI getInstance(){return instance;}
public void init() throws CloudWaveException{
int r = initJNI();
if (r<0) {
System.err.println("initJNI returned " + r);
throw new CloudWaveException();
}
}
public void free(){
freeJNI();
}
protected IEventHandler eventHandler;
public IEventHandler getEventHandler() {
return eventHandler;
}
public void setEventHandler(IEventHandler eh) {
synchronized(this){ eventHandler = eh;}
}
public void doEvent(String event){
synchronized(this) {
if (eventHandler!=null)
eventHandler.doEvent(event);
}
}
protected synchronized static void callback(String event){
instance.doEvent(event);
}
//#: Init/Free
public native int initJNI();
protected native int freeJNI();
//:#
//#: Log
protected native int initLog();
protected native int freeLog();
protected native int setLogId(String id);
protected native String getLogId();
protected native int recordLog(int level, String message);
protected native int recordLogL(int level, String message, long id);
//:#
//#: Metric
protected native int initMetric();
protected native int freeMetric();
protected native int recordMetricL(int source, String name, String mdata, String munit, int type, long value);
protected native int recordMetricD(int source, String name, String mdata, String munit, int type, double value);
protected native int recordMetricS(int source, String name, String mdata, String munit, int type, String value);
protected native int recordEventL(int source, String name, String mdata, String munit, int type, long value);
protected native int recordEventD(int source, String name, String mdata, String munit, int type, double value);
protected native int recordEventS(int source, String name, String mdata, String munit, int type, String value);
//:#
//#: Events
protected native int initEvent();
protected native int freeEvent();
protected native int postEvent(String event_json);
protected native long subscribe(String event_id);
protected native int unsubscribe(long id);
//:#
}
| harinigunabalan/PerformanceHat | cw-feedback-handler/src/main/java/CloudWave/CloudWaveJNI.java | Java | apache-2.0 | 3,270 |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.entitlement.filter.callback;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.entitlement.filter.exception.EntitlementFilterException;
import javax.servlet.http.HttpServletRequest;
public class BasicAuthCallBackHandler extends EntitlementFilterCallBackHandler {
private static final Log log = LogFactory.getLog(BasicAuthCallBackHandler.class);
public BasicAuthCallBackHandler(HttpServletRequest request) throws EntitlementFilterException {
String authHeaderEn = null;
if (!(request.getHeader("Authorization") == null || request.getHeader("Authorization").equals("null"))) {
authHeaderEn = request.getHeader("Authorization");
String tempArr[] = authHeaderEn.split(" ");
if (tempArr.length == 2) {
String authHeaderDc = new String(Base64.decodeBase64(tempArr[1].getBytes()));
tempArr = authHeaderDc.split(":");
if (tempArr.length == 2) {
setUserName(tempArr[0]);
}
}
throw new EntitlementFilterException("Unable to retrieve username from Authorization header");
}
}
}
| wattale/carbon-identity | components/identity/org.wso2.carbon.identity.entitlement.filter/src/main/java/org/wso2/carbon/identity/entitlement/filter/callback/BasicAuthCallBackHandler.java | Java | apache-2.0 | 1,973 |
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.logging;
import org.apache.commons.logging.Log;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
/**
* Tests for {@link DeferredLog}.
*
* @author Phillip Webb
*/
public class DeferredLogTests {
private DeferredLog deferredLog = new DeferredLog();
private Object message = "Message";
private Throwable throwable = new IllegalStateException();
private Log log = mock(Log.class);
@Test
public void isTraceEnabled() throws Exception {
assertThat(this.deferredLog.isTraceEnabled()).isTrue();
}
@Test
public void isDebugEnabled() throws Exception {
assertThat(this.deferredLog.isDebugEnabled()).isTrue();
}
@Test
public void isInfoEnabled() throws Exception {
assertThat(this.deferredLog.isInfoEnabled()).isTrue();
}
@Test
public void isWarnEnabled() throws Exception {
assertThat(this.deferredLog.isWarnEnabled()).isTrue();
}
@Test
public void isErrorEnabled() throws Exception {
assertThat(this.deferredLog.isErrorEnabled()).isTrue();
}
@Test
public void isFatalEnabled() throws Exception {
assertThat(this.deferredLog.isFatalEnabled()).isTrue();
}
@Test
public void trace() throws Exception {
this.deferredLog.trace(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).trace(this.message, null);
}
@Test
public void traceWithThrowable() throws Exception {
this.deferredLog.trace(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).trace(this.message, this.throwable);
}
@Test
public void debug() throws Exception {
this.deferredLog.debug(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).debug(this.message, null);
}
@Test
public void debugWithThrowable() throws Exception {
this.deferredLog.debug(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).debug(this.message, this.throwable);
}
@Test
public void info() throws Exception {
this.deferredLog.info(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).info(this.message, null);
}
@Test
public void infoWithThrowable() throws Exception {
this.deferredLog.info(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).info(this.message, this.throwable);
}
@Test
public void warn() throws Exception {
this.deferredLog.warn(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).warn(this.message, null);
}
@Test
public void warnWithThrowable() throws Exception {
this.deferredLog.warn(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).warn(this.message, this.throwable);
}
@Test
public void error() throws Exception {
this.deferredLog.error(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).error(this.message, null);
}
@Test
public void errorWithThrowable() throws Exception {
this.deferredLog.error(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).error(this.message, this.throwable);
}
@Test
public void fatal() throws Exception {
this.deferredLog.fatal(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).fatal(this.message, null);
}
@Test
public void fatalWithThrowable() throws Exception {
this.deferredLog.fatal(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).fatal(this.message, this.throwable);
}
@Test
public void clearsOnReplayTo() throws Exception {
this.deferredLog.info("1");
this.deferredLog.fatal("2");
Log log2 = mock(Log.class);
this.deferredLog.replayTo(this.log);
this.deferredLog.replayTo(log2);
verify(this.log).info("1", null);
verify(this.log).fatal("2", null);
verifyNoMoreInteractions(this.log);
verifyZeroInteractions(log2);
}
}
| vakninr/spring-boot | spring-boot-project/spring-boot/src/test/java/org/springframework/boot/logging/DeferredLogTests.java | Java | apache-2.0 | 4,690 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.runtime.pipeline;
/**
*
* <p>This api is experimental and thus the classes and the interfaces returned are subject to change.</p>
*/
public interface Transformer
extends
Emitter,
Receiver,
Stage {
}
| mariofusco/droolsjbpm-integration | drools-pipeline/src/main/java/org/drools/runtime/pipeline/Transformer.java | Java | apache-2.0 | 836 |
/*******************************************************************************
* Copyright (c) 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.ibm.ws.lars.rest;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import com.ibm.ws.lars.rest.model.Asset;
import com.ibm.ws.lars.rest.model.AssetList;
/**
*
*/
public class TestUtils {
/**
* Reads the specified InputStream and returns a byte array containing all the bytes read.
*/
public static byte[] slurp(InputStream is) throws IOException {
byte[] buffer = new byte[1024];
int length;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while ((length = is.read(buffer)) != -1) {
baos.write(buffer, 0, length);
}
return baos.toByteArray();
}
/**
* Assert that an AssetList contains exactly the given list of assets
* <p>
* This method assumes that all assets have an ID and there are no duplicates in the asset list.
*/
public static void assertAssetList(AssetList list, Asset... assets) {
Map<String, Asset> assetIdMap = new HashMap<>();
for (Asset asset : assets) {
if (assetIdMap.put(asset.get_id(), asset) != null) {
throw new AssertionError("Duplicate found in list of expected assets:\n" + asset.toJson());
}
}
for (Asset asset : list) {
if (assetIdMap.remove(asset.get_id()) == null) {
throw new AssertionError("Unexpected asset found in the asset list:\n" + asset.toJson());
}
}
if (!assetIdMap.isEmpty()) {
StringBuilder message = new StringBuilder("Assets missing from asset list:\n");
for (Asset asset : assetIdMap.values()) {
message.append(asset.toJson());
message.append("\n");
}
throw new AssertionError(message.toString());
}
}
}
| antelder/tool.lars | server/src/test/java/com/ibm/ws/lars/rest/TestUtils.java | Java | apache-2.0 | 2,655 |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.topic.impl.reliable;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.partition.MigrationState;
import com.hazelcast.partition.MigrationListener;
import com.hazelcast.partition.ReplicaMigrationEvent;
import com.hazelcast.ringbuffer.impl.RingbufferService;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.OverridePropertyRule;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.topic.ITopic;
import com.hazelcast.topic.Message;
import com.hazelcast.topic.MessageListener;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertTrue;
@Category({QuickTest.class, ParallelJVMTest.class})
@RunWith(HazelcastParallelClassRunner.class)
public class SubscriptionMigrationTest extends HazelcastTestSupport {
@Rule
public OverridePropertyRule overridePropertyRule = OverridePropertyRule.set("hazelcast.partition.count", "2");
// gh issue: https://github.com/hazelcast/hazelcast/issues/13602
@Test
public void testListenerReceivesMessagesAfterPartitionIsMigratedBack() {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory();
HazelcastInstance instance1 = factory.newHazelcastInstance();
final String rtNameOnPartition0 = generateReliableTopicNameForPartition(instance1, 0);
final String rtNameOnPartition1 = generateReliableTopicNameForPartition(instance1, 1);
ITopic<String> topic0 = instance1.getReliableTopic(rtNameOnPartition0);
ITopic<String> topic1 = instance1.getReliableTopic(rtNameOnPartition1);
final CountingMigrationListener migrationListener = new CountingMigrationListener();
instance1.getPartitionService().addMigrationListener(migrationListener);
final PayloadMessageListener<String> listener0 = new PayloadMessageListener<String>();
final PayloadMessageListener<String> listener1 = new PayloadMessageListener<String>();
topic0.addMessageListener(listener0);
topic1.addMessageListener(listener1);
topic0.publish("itemA");
topic1.publish("item1");
HazelcastInstance instance2 = factory.newHazelcastInstance();
// 1 primary, 1 backup migration
assertEqualsEventually(2, migrationListener.partitionMigrationCount);
instance2.shutdown();
assertEqualsEventually(3, migrationListener.partitionMigrationCount);
topic0.publish("itemB");
topic1.publish("item2");
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertTrue(listener0.isReceived("itemA"));
assertTrue(listener0.isReceived("itemB"));
assertTrue(listener1.isReceived("item1"));
assertTrue(listener1.isReceived("item2"));
}
});
}
public class PayloadMessageListener<V> implements MessageListener<V> {
private Collection<V> receivedMessages = new HashSet<V>();
@Override
public void onMessage(Message<V> message) {
receivedMessages.add(message.getMessageObject());
}
boolean isReceived(V message) {
return receivedMessages.contains(message);
}
}
public class CountingMigrationListener implements MigrationListener {
AtomicInteger partitionMigrationCount = new AtomicInteger();
@Override
public void migrationStarted(MigrationState state) {
}
@Override
public void migrationFinished(MigrationState state) {
}
@Override
public void replicaMigrationCompleted(ReplicaMigrationEvent event) {
partitionMigrationCount.incrementAndGet();
}
@Override
public void replicaMigrationFailed(ReplicaMigrationEvent event) {
}
}
private String generateReliableTopicNameForPartition(HazelcastInstance instance, int partitionId) {
return generateKeyForPartition(instance, RingbufferService.TOPIC_RB_PREFIX, partitionId);
}
}
| mdogan/hazelcast | hazelcast/src/test/java/com/hazelcast/topic/impl/reliable/SubscriptionMigrationTest.java | Java | apache-2.0 | 5,092 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import java.util.List;
import java.util.Set;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.apache.geode.cache.execute.ResultCollector;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.cli.ConverterHint;
import org.apache.geode.management.cli.Result;
import org.apache.geode.management.internal.cli.CliUtil;
import org.apache.geode.management.internal.cli.domain.SubscriptionQueueSizeResult;
import org.apache.geode.management.internal.cli.functions.GetSubscriptionQueueSizeFunction;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.result.ResultBuilder;
import org.apache.geode.management.internal.security.ResourceOperation;
import org.apache.geode.security.ResourcePermission;
public class CountDurableCQEventsCommand extends InternalGfshCommand {
DurableClientCommandsResultBuilder builder = new DurableClientCommandsResultBuilder();
@CliCommand(value = CliStrings.COUNT_DURABLE_CQ_EVENTS,
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__HELP)
@CliMetaData()
@ResourceOperation(resource = ResourcePermission.Resource.CLUSTER,
operation = ResourcePermission.Operation.READ)
public Result countDurableCqEvents(
@CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID, mandatory = true,
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID__HELP) final String durableClientId,
@CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME,
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME__HELP) final String cqName,
@CliOption(key = {CliStrings.MEMBER, CliStrings.MEMBERS},
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__MEMBER__HELP,
optionContext = ConverterHint.MEMBERIDNAME) final String[] memberNameOrId,
@CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS},
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__GROUP__HELP,
optionContext = ConverterHint.MEMBERGROUP) final String[] group) {
Result result;
try {
Set<DistributedMember> targetMembers = findMembers(group, memberNameOrId);
if (targetMembers.isEmpty()) {
return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE);
}
String[] params = new String[2];
params[0] = durableClientId;
params[1] = cqName;
final ResultCollector<?, ?> rc =
CliUtil.executeFunction(new GetSubscriptionQueueSizeFunction(), params, targetMembers);
final List<SubscriptionQueueSizeResult> funcResults =
(List<SubscriptionQueueSizeResult>) rc.getResult();
String queueSizeColumnName;
if (cqName != null && !cqName.isEmpty()) {
queueSizeColumnName = CliStrings
.format(CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, cqName);
} else {
queueSizeColumnName = CliStrings.format(
CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, durableClientId);
}
result = builder.buildTableResultForQueueSize(funcResults, queueSizeColumnName);
} catch (Exception e) {
result = ResultBuilder.createGemFireErrorResult(e.getMessage());
}
return result;
}
}
| pdxrunner/geode | geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/CountDurableCQEventsCommand.java | Java | apache-2.0 | 4,255 |
/**
* @author Oleksandr Prunyak (987456987p@gmail.com)
* @version $Id$
* @since 0.1
*/
package ru.job4j.loop; | Alex0889/oprunyak | chapter_001/src/test/java/ru/job4j/loop/package-info.java | Java | apache-2.0 | 110 |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.properties;
import org.junit.Test;
import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.customproperties.CustomElement;
import org.kie.workbench.common.stunner.bpmn.definition.property.general.SLADueDate;
import static junit.framework.TestCase.assertTrue;
import static org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.Factories.bpmn2;
public class EmbeddedSubprocessPropertyWriterTest {
private SubProcessPropertyWriter tested = new SubProcessPropertyWriter(bpmn2.createSubProcess(),
new FlatVariableScope());
@Test
public void testSetIsAsync() {
tested.setAsync(Boolean.TRUE);
assertTrue(CustomElement.async.of(tested.getFlowElement()).get());
}
@Test
public void testSetSlaDueDate() {
String slaDueDate = "12/25/1983";
tested.setSlaDueDate(new SLADueDate(slaDueDate));
assertTrue(CustomElement.slaDueDate.of(tested.getFlowElement()).get().contains(slaDueDate));
}
}
| Rikkola/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-marshalling/src/test/java/org/kie/workbench/common/stunner/bpmn/client/marshall/converters/fromstunner/properties/EmbeddedSubprocessPropertyWriterTest.java | Java | apache-2.0 | 1,772 |
/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.exprtree;
/**
* Container of nodes representing operators.
*
* <p> Important: Do not use outside of Soy code (treat as superpackage-private).
*
* @author Kai Huang
*/
public class OperatorNodes {
private OperatorNodes() {}
/**
* Node representing the unary '-' (negative) operator.
*/
public static class NegativeOpNode extends AbstractOperatorNode {
public NegativeOpNode() { super(Operator.NEGATIVE); }
protected NegativeOpNode(NegativeOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NEGATIVE_OP_NODE; }
@Override public NegativeOpNode clone() { return new NegativeOpNode(this); }
}
/**
* Node representing the 'not' operator.
*/
public static class NotOpNode extends AbstractOperatorNode {
public NotOpNode() { super(Operator.NOT); }
protected NotOpNode(NotOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NOT_OP_NODE; }
@Override public NotOpNode clone() { return new NotOpNode(this); }
}
/**
* Node representing the '*' (times) operator.
*/
public static class TimesOpNode extends AbstractOperatorNode {
public TimesOpNode() { super(Operator.TIMES); }
protected TimesOpNode(TimesOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.TIMES_OP_NODE; }
@Override public TimesOpNode clone() { return new TimesOpNode(this); }
}
/**
* Node representing the '/' (divde by) operator.
*/
public static class DivideByOpNode extends AbstractOperatorNode {
public DivideByOpNode() { super(Operator.DIVIDE_BY); }
protected DivideByOpNode(DivideByOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.DIVIDE_BY_OP_NODE; }
@Override public DivideByOpNode clone() { return new DivideByOpNode(this); }
}
/**
* Node representing the '%' (mod) operator.
*/
public static class ModOpNode extends AbstractOperatorNode {
public ModOpNode() { super(Operator.MOD); }
protected ModOpNode(ModOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.MOD_OP_NODE; }
@Override public ModOpNode clone() { return new ModOpNode(this); }
}
/**
* Node representing the '+' (plus) operator.
*/
public static class PlusOpNode extends AbstractOperatorNode {
public PlusOpNode() { super(Operator.PLUS); }
protected PlusOpNode(PlusOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.PLUS_OP_NODE; }
@Override public PlusOpNode clone() { return new PlusOpNode(this); }
}
/**
* Node representing the binary '-' (minus) operator.
*/
public static class MinusOpNode extends AbstractOperatorNode {
public MinusOpNode() { super(Operator.MINUS); }
protected MinusOpNode(MinusOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.MINUS_OP_NODE; }
@Override public MinusOpNode clone() { return new MinusOpNode(this); }
}
/**
* Node representing the '<' (less than) operator.
*/
public static class LessThanOpNode extends AbstractOperatorNode {
public LessThanOpNode() { super(Operator.LESS_THAN); }
protected LessThanOpNode(LessThanOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.LESS_THAN_OP_NODE; }
@Override public LessThanOpNode clone() { return new LessThanOpNode(this); }
}
/**
* Node representing the '>' (greater than) operator.
*/
public static class GreaterThanOpNode extends AbstractOperatorNode {
public GreaterThanOpNode() { super(Operator.GREATER_THAN); }
protected GreaterThanOpNode(GreaterThanOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.GREATER_THAN_OP_NODE; }
@Override public GreaterThanOpNode clone() { return new GreaterThanOpNode(this); }
}
/**
* Node representing the '<=' (less than or equal) operator.
*/
public static class LessThanOrEqualOpNode extends AbstractOperatorNode {
public LessThanOrEqualOpNode() { super(Operator.LESS_THAN_OR_EQUAL); }
protected LessThanOrEqualOpNode(LessThanOrEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.LESS_THAN_OR_EQUAL_OP_NODE; }
@Override public LessThanOrEqualOpNode clone() { return new LessThanOrEqualOpNode(this); }
}
/**
* Node representing the '>=' (greater than or equal) operator.
*/
public static class GreaterThanOrEqualOpNode extends AbstractOperatorNode {
public GreaterThanOrEqualOpNode() { super(Operator.GREATER_THAN_OR_EQUAL); }
protected GreaterThanOrEqualOpNode(GreaterThanOrEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.GREATER_THAN_OR_EQUAL_OP_NODE; }
@Override public GreaterThanOrEqualOpNode clone() { return new GreaterThanOrEqualOpNode(this); }
}
/**
* Node representing the '==' (equal) operator.
*/
public static class EqualOpNode extends AbstractOperatorNode {
public EqualOpNode() { super(Operator.EQUAL); }
protected EqualOpNode(EqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.EQUAL_OP_NODE; }
@Override public EqualOpNode clone() { return new EqualOpNode(this); }
}
/**
* Node representing the '!=' (not equal) operator.
*/
public static class NotEqualOpNode extends AbstractOperatorNode {
public NotEqualOpNode() { super(Operator.NOT_EQUAL); }
protected NotEqualOpNode(NotEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NOT_EQUAL_OP_NODE; }
@Override public NotEqualOpNode clone() { return new NotEqualOpNode(this); }
}
/**
* Node representing the 'and' operator.
*/
public static class AndOpNode extends AbstractOperatorNode {
public AndOpNode() { super(Operator.AND); }
protected AndOpNode(AndOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.AND_OP_NODE; }
@Override public AndOpNode clone() { return new AndOpNode(this); }
}
/**
* Node representing the 'or' operator.
*/
public static class OrOpNode extends AbstractOperatorNode {
public OrOpNode() { super(Operator.OR); }
protected OrOpNode(OrOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.OR_OP_NODE; }
@Override public OrOpNode clone() { return new OrOpNode(this); }
}
/**
* Node representing the ternary '? :' (conditional) operator.
*/
public static class ConditionalOpNode extends AbstractOperatorNode {
public ConditionalOpNode() { super(Operator.CONDITIONAL); }
protected ConditionalOpNode(ConditionalOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.CONDITIONAL_OP_NODE; }
@Override public ConditionalOpNode clone() { return new ConditionalOpNode(this); }
}
}
| Digaku/closure-template | java/src/com/google/template/soy/exprtree/OperatorNodes.java | Java | apache-2.0 | 7,510 |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.remote.work.artifact;
import com.thoughtworks.go.plugin.access.artifact.ArtifactExtensionConstants;
import com.thoughtworks.go.plugin.api.request.GoApiRequest;
import com.thoughtworks.go.plugin.api.response.DefaultGoApiResponse;
import com.thoughtworks.go.plugin.api.response.GoApiResponse;
import com.thoughtworks.go.plugin.infra.GoPluginApiRequestProcessor;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import com.thoughtworks.go.remote.work.artifact.ConsoleLogMessage.LogLevel;
import com.thoughtworks.go.util.command.*;
import com.thoughtworks.go.work.GoPublisher;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static java.lang.String.format;
public class ArtifactRequestProcessor implements GoPluginApiRequestProcessor {
private static final List<String> goSupportedVersions = ArtifactExtensionConstants.SUPPORTED_VERSIONS;
private final SafeOutputStreamConsumer safeOutputStreamConsumer;
private final ProcessType processType;
private enum ProcessType {
FETCH, PUBLISH
}
private static final Map<LogLevel, String> FETCH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{
put(LogLevel.INFO, TaggedStreamConsumer.OUT);
put(LogLevel.ERROR, TaggedStreamConsumer.ERR);
}};
private static final Map<LogLevel, String> PUBLISH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{
put(LogLevel.INFO, TaggedStreamConsumer.PUBLISH);
put(LogLevel.ERROR, TaggedStreamConsumer.PUBLISH_ERR);
}};
private ArtifactRequestProcessor(GoPublisher publisher, ProcessType processType, EnvironmentVariableContext environmentVariableContext) {
CompositeConsumer errorStreamConsumer = new CompositeConsumer(CompositeConsumer.ERR, publisher);
CompositeConsumer outputStreamConsumer = new CompositeConsumer(CompositeConsumer.OUT, publisher);
this.safeOutputStreamConsumer = new SafeOutputStreamConsumer(new ProcessOutputStreamConsumer(errorStreamConsumer, outputStreamConsumer));
safeOutputStreamConsumer.addSecrets(environmentVariableContext.secrets());
this.processType = processType;
}
public static ArtifactRequestProcessor forFetchArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) {
return new ArtifactRequestProcessor(goPublisher, ProcessType.FETCH, environmentVariableContext);
}
public static ArtifactRequestProcessor forPublishArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) {
return new ArtifactRequestProcessor(goPublisher, ProcessType.PUBLISH, environmentVariableContext);
}
@Override
public GoApiResponse process(GoPluginDescriptor pluginDescriptor, GoApiRequest request) {
validatePluginRequest(request);
switch (Request.fromString(request.api())) {
case CONSOLE_LOG:
return processConsoleLogRequest(pluginDescriptor, request);
default:
return DefaultGoApiResponse.error("Illegal api request");
}
}
private GoApiResponse processConsoleLogRequest(GoPluginDescriptor pluginDescriptor, GoApiRequest request) {
final ConsoleLogMessage consoleLogMessage = ConsoleLogMessage.fromJSON(request.requestBody());
final String message = format("[%s] %s", pluginDescriptor.id(), consoleLogMessage.getMessage());
Optional<String> parsedTag = parseTag(processType, consoleLogMessage.getLogLevel());
if (parsedTag.isPresent()) {
safeOutputStreamConsumer.taggedStdOutput(parsedTag.get(), message);
return DefaultGoApiResponse.success(null);
}
return DefaultGoApiResponse.error(format("Unsupported log level `%s`.", consoleLogMessage.getLogLevel()));
}
private Optional<String> parseTag(ProcessType requestType, LogLevel logLevel) {
switch (requestType) {
case FETCH:
return Optional.ofNullable(FETCH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel));
case PUBLISH:
return Optional.ofNullable(PUBLISH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel));
}
return Optional.empty();
}
private void validatePluginRequest(GoApiRequest goPluginApiRequest) {
if (!goSupportedVersions.contains(goPluginApiRequest.apiVersion())) {
throw new RuntimeException(format("Unsupported '%s' API version: %s. Supported versions: %s", goPluginApiRequest.api(), goPluginApiRequest.apiVersion(), goSupportedVersions));
}
}
public enum Request {
CONSOLE_LOG("go.processor.artifact.console-log");
private final String requestName;
Request(String requestName) {
this.requestName = requestName;
}
public static Request fromString(String requestName) {
if (requestName != null) {
for (Request request : Request.values()) {
if (requestName.equalsIgnoreCase(request.requestName)) {
return request;
}
}
}
return null;
}
public String requestName() {
return requestName;
}
}
}
| gocd/gocd | common/src/main/java/com/thoughtworks/go/remote/work/artifact/ArtifactRequestProcessor.java | Java | apache-2.0 | 5,906 |
package com.senseidb.search.node.impl;
import org.json.JSONObject;
import com.senseidb.search.node.SenseiQueryBuilder;
import com.senseidb.search.node.SenseiQueryBuilderFactory;
import com.senseidb.search.req.SenseiQuery;
import com.senseidb.util.JSONUtil.FastJSONObject;
public abstract class AbstractJsonQueryBuilderFactory implements SenseiQueryBuilderFactory {
@Override
public SenseiQueryBuilder getQueryBuilder(SenseiQuery query) throws Exception {
JSONObject jsonQuery = null;
if (query != null) {
byte[] bytes = query.toBytes();
jsonQuery = new FastJSONObject(new String(bytes, SenseiQuery.utf8Charset));
}
return buildQueryBuilder(jsonQuery);
}
public abstract SenseiQueryBuilder buildQueryBuilder(JSONObject jsonQuery);
}
| javasoze/sensei | sensei-core/src/main/java/com/senseidb/search/node/impl/AbstractJsonQueryBuilderFactory.java | Java | apache-2.0 | 774 |
package org.vertexium.util;
import org.vertexium.Authorizations;
import org.vertexium.Direction;
import org.vertexium.Vertex;
import java.util.Iterator;
public class VerticesToEdgeIdsIterable implements Iterable<String> {
private final Iterable<? extends Vertex> vertices;
private final Authorizations authorizations;
public VerticesToEdgeIdsIterable(Iterable<? extends Vertex> vertices, Authorizations authorizations) {
this.vertices = vertices;
this.authorizations = authorizations;
}
@Override
public Iterator<String> iterator() {
return new SelectManyIterable<Vertex, String>(this.vertices) {
@Override
public Iterable<String> getIterable(Vertex vertex) {
return vertex.getEdgeIds(Direction.BOTH, authorizations);
}
}.iterator();
}
}
| visallo/vertexium | core/src/main/java/org/vertexium/util/VerticesToEdgeIdsIterable.java | Java | apache-2.0 | 855 |
package com.kit.db;
public class Obj {
}
| BigAppOS/BigApp_Discuz_Android | libs/ZUtils/src/com/kit/db/Obj.java | Java | apache-2.0 | 43 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.test.functions.recompile;
import java.util.HashMap;
import org.junit.Assert;
import org.junit.Test;
import org.apache.sysds.conf.CompilerConfig;
import org.apache.sysds.hops.OptimizerUtils;
import org.apache.sysds.runtime.matrix.data.MatrixValue.CellIndex;
import org.apache.sysds.test.AutomatedTestBase;
import org.apache.sysds.test.TestConfiguration;
import org.apache.sysds.test.TestUtils;
import org.apache.sysds.utils.Statistics;
public class FunctionRecompileTest extends AutomatedTestBase
{
private final static String TEST_NAME1 = "funct_recompile";
private final static String TEST_DIR = "functions/recompile/";
private final static String TEST_CLASS_DIR = TEST_DIR + FunctionRecompileTest.class.getSimpleName() + "/";
private final static double eps = 1e-10;
private final static int rows = 20;
private final static int cols = 10;
private final static double sparsity = 1.0;
@Override
public void setUp() {
TestUtils.clearAssertionInformation();
addTestConfiguration(TEST_NAME1,
new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] { "Rout" }) );
}
@Test
public void testFunctionWithoutRecompileWithoutIPA() {
runFunctionTest(false, false);
}
@Test
public void testFunctionWithoutRecompileWithIPA() {
runFunctionTest(false, true);
}
@Test
public void testFunctionWithRecompileWithoutIPA() {
runFunctionTest(true, false);
}
@Test
public void testFunctionWithRecompileWithIPA() {
runFunctionTest(true, true);
}
private void runFunctionTest( boolean recompile, boolean IPA )
{
boolean oldFlagRecompile = CompilerConfig.FLAG_DYN_RECOMPILE;
boolean oldFlagIPA = OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS;
try
{
TestConfiguration config = getTestConfiguration(TEST_NAME1);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME1 + ".dml";
programArgs = new String[]{"-args", input("V"),
Integer.toString(rows), Integer.toString(cols), output("R") };
fullRScriptName = HOME + TEST_NAME1 + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
long seed = System.nanoTime();
double[][] V = getRandomMatrix(rows, cols, 0, 1, sparsity, seed);
writeInputMatrix("V", V, true);
CompilerConfig.FLAG_DYN_RECOMPILE = recompile;
OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS = IPA;
boolean exceptionExpected = false;
runTest(true, exceptionExpected, null, -1);
runRScript(true);
//note: change from previous version due to fix in op selection (unknown size XtX and mapmult)
//CHECK compiled MR jobs
int expectNumCompiled = -1;
if( IPA ) expectNumCompiled = 1; //reblock
else expectNumCompiled = 5; //reblock, GMR,GMR,GMR,GMR (last two should piggybacked)
Assert.assertEquals("Unexpected number of compiled MR jobs.",
expectNumCompiled, Statistics.getNoOfCompiledSPInst());
//CHECK executed MR jobs
int expectNumExecuted = -1;
if( recompile ) expectNumExecuted = 0;
else if( IPA ) expectNumExecuted = 1; //reblock
else expectNumExecuted = 41; //reblock, 10*(GMR,GMR,GMR, GMR) (last two should piggybacked)
Assert.assertEquals("Unexpected number of executed MR jobs.",
expectNumExecuted, Statistics.getNoOfExecutedSPInst());
//compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromOutputDir("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromExpectedDir("Rout");
TestUtils.compareMatrices(dmlfile, rfile, eps, "DML", "R");
}
finally {
CompilerConfig.FLAG_DYN_RECOMPILE = oldFlagRecompile;
OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS = oldFlagIPA;
}
}
}
| apache/incubator-systemml | src/test/java/org/apache/sysds/test/functions/recompile/FunctionRecompileTest.java | Java | apache-2.0 | 4,614 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.jobmanager;
import akka.actor.ActorSystem;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.akka.ListeningBehaviour;
import org.apache.flink.runtime.blob.BlobClient;
import org.apache.flink.runtime.blob.BlobKey;
import org.apache.flink.runtime.client.JobExecutionException;
import org.apache.flink.runtime.instance.ActorGateway;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;
import org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings;
import org.apache.flink.runtime.jobgraph.tasks.JobCheckpointingSettings;
import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
import org.apache.flink.runtime.messages.JobManagerMessages;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.runtime.testtasks.NoOpInvokable;
import org.apache.flink.runtime.util.LeaderRetrievalUtils;
import org.apache.flink.util.NetUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import scala.Tuple2;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Tests that the JobManager handles Jobs correctly that fail in
* the initialization during the submit phase.
*/
public class JobSubmitTest {
private static final FiniteDuration timeout = new FiniteDuration(60000, TimeUnit.MILLISECONDS);
private static ActorSystem jobManagerSystem;
private static ActorGateway jmGateway;
private static Configuration jmConfig;
@BeforeClass
public static void setupJobManager() {
jmConfig = new Configuration();
int port = NetUtils.getAvailablePort();
jmConfig.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, "localhost");
jmConfig.setInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, port);
scala.Option<Tuple2<String, Object>> listeningAddress = scala.Option.apply(new Tuple2<String, Object>("localhost", port));
jobManagerSystem = AkkaUtils.createActorSystem(jmConfig, listeningAddress);
// only start JobManager (no ResourceManager)
JobManager.startJobManagerActors(
jmConfig,
jobManagerSystem,
TestingUtils.defaultExecutor(),
TestingUtils.defaultExecutor(),
JobManager.class,
MemoryArchivist.class)._1();
try {
LeaderRetrievalService lrs = LeaderRetrievalUtils.createLeaderRetrievalService(jmConfig);
jmGateway = LeaderRetrievalUtils.retrieveLeaderGateway(
lrs,
jobManagerSystem,
timeout
);
} catch (Exception e) {
fail("Could not retrieve the JobManager gateway. " + e.getMessage());
}
}
@AfterClass
public static void teardownJobmanager() {
if (jobManagerSystem != null) {
jobManagerSystem.shutdown();
}
}
@Test
public void testFailureWhenJarBlobsMissing() {
try {
// create a simple job graph
JobVertex jobVertex = new JobVertex("Test Vertex");
jobVertex.setInvokableClass(NoOpInvokable.class);
JobGraph jg = new JobGraph("test job", jobVertex);
// request the blob port from the job manager
Future<Object> future = jmGateway.ask(JobManagerMessages.getRequestBlobManagerPort(), timeout);
int blobPort = (Integer) Await.result(future, timeout);
// upload two dummy bytes and add their keys to the job graph as dependencies
BlobKey key1, key2;
BlobClient bc = new BlobClient(new InetSocketAddress("localhost", blobPort), jmConfig);
try {
key1 = bc.put(new byte[10]);
key2 = bc.put(new byte[10]);
// delete one of the blobs to make sure that the startup failed
bc.delete(key2);
}
finally {
bc.close();
}
jg.addBlob(key1);
jg.addBlob(key2);
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(
jg,
ListeningBehaviour.EXECUTION_RESULT),
timeout);
try {
Await.result(submitFuture, timeout);
}
catch (JobExecutionException e) {
// that is what we expect
assertTrue(e.getCause() instanceof IOException);
}
catch (Exception e) {
fail("Wrong exception type");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
/**
* Verifies a correct error message when vertices with master initialization
* (input formats / output formats) fail.
*/
@Test
public void testFailureWhenInitializeOnMasterFails() {
try {
// create a simple job graph
JobVertex jobVertex = new JobVertex("Vertex that fails in initializeOnMaster") {
private static final long serialVersionUID = -3540303593784587652L;
@Override
public void initializeOnMaster(ClassLoader loader) throws Exception {
throw new RuntimeException("test exception");
}
};
jobVertex.setInvokableClass(NoOpInvokable.class);
JobGraph jg = new JobGraph("test job", jobVertex);
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(
jg,
ListeningBehaviour.EXECUTION_RESULT),
timeout);
try {
Await.result(submitFuture, timeout);
}
catch (JobExecutionException e) {
// that is what we expect
// test that the exception nesting is not too deep
assertTrue(e.getCause() instanceof RuntimeException);
}
catch (Exception e) {
fail("Wrong exception type");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testAnswerFailureWhenSavepointReadFails() throws Exception {
// create a simple job graph
JobGraph jg = createSimpleJobGraph();
jg.setSavepointRestoreSettings(SavepointRestoreSettings.forPath("pathThatReallyDoesNotExist..."));
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(jg, ListeningBehaviour.DETACHED), timeout);
Object result = Await.result(submitFuture, timeout);
assertEquals(JobManagerMessages.JobResultFailure.class, result.getClass());
}
private JobGraph createSimpleJobGraph() {
JobVertex jobVertex = new JobVertex("Vertex");
jobVertex.setInvokableClass(NoOpInvokable.class);
List<JobVertexID> vertexIdList = Collections.singletonList(jobVertex.getID());
JobGraph jg = new JobGraph("test job", jobVertex);
jg.setSnapshotSettings(new JobCheckpointingSettings(vertexIdList, vertexIdList, vertexIdList,
5000, 5000, 0L, 10, ExternalizedCheckpointSettings.none(), null, true));
return jg;
}
}
| hwstreaming/flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/JobSubmitTest.java | Java | apache-2.0 | 7,730 |
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.base.accumulators;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;
/**
* An implementation of an accumulator capable of counting occurences
*/
public class CountAccumulateFunction extends AbstractAccumulateFunction<CountAccumulateFunction.CountData> {
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
protected static class CountData implements Externalizable {
public long count = 0;
public CountData() {}
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
count = in.readLong();
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeLong(count);
}
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#createContext()
*/
public CountData createContext() {
return new CountData();
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#init(java.lang.Object)
*/
public void init(CountData data) {
data.count = 0;
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#accumulate(java.lang.Object, java.lang.Object)
*/
public void accumulate(CountData data,
Object value) {
data.count++;
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#reverse(java.lang.Object, java.lang.Object)
*/
public void reverse(CountData data,
Object value) {
data.count--;
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#getResult(java.lang.Object)
*/
public Object getResult(CountData data) {
return new Long( data.count );
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#supportsReverse()
*/
public boolean supportsReverse() {
return true;
}
/**
* {@inheritDoc}
*/
public Class< ? > getResultType() {
return Long.class;
}
}
| ngs-mtech/drools | drools-core/src/main/java/org/drools/core/base/accumulators/CountAccumulateFunction.java | Java | apache-2.0 | 2,915 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.type;
import com.facebook.presto.operator.scalar.AbstractTestFunctions;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.google.common.net.InetAddresses;
import io.airlift.slice.Slices;
import org.testng.annotations.Test;
import static com.facebook.presto.spi.function.OperatorType.HASH_CODE;
import static com.facebook.presto.spi.function.OperatorType.INDETERMINATE;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.type.IpAddressType.IPADDRESS;
import static com.facebook.presto.type.IpPrefixType.IPPREFIX;
import static java.lang.System.arraycopy;
public class TestIpPrefixOperators
extends AbstractTestFunctions
{
@Test
public void testVarcharToIpPrefixCast()
{
assertFunction("CAST('::ffff:1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24");
assertFunction("CAST('192.168.0.0/24' AS IPPREFIX)", IPPREFIX, "192.168.0.0/24");
assertFunction("CAST('255.2.3.4/0' AS IPPREFIX)", IPPREFIX, "0.0.0.0/0");
assertFunction("CAST('255.2.3.4/1' AS IPPREFIX)", IPPREFIX, "128.0.0.0/1");
assertFunction("CAST('255.2.3.4/2' AS IPPREFIX)", IPPREFIX, "192.0.0.0/2");
assertFunction("CAST('255.2.3.4/4' AS IPPREFIX)", IPPREFIX, "240.0.0.0/4");
assertFunction("CAST('1.2.3.4/8' AS IPPREFIX)", IPPREFIX, "1.0.0.0/8");
assertFunction("CAST('1.2.3.4/16' AS IPPREFIX)", IPPREFIX, "1.2.0.0/16");
assertFunction("CAST('1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24");
assertFunction("CAST('1.2.3.255/25' AS IPPREFIX)", IPPREFIX, "1.2.3.128/25");
assertFunction("CAST('1.2.3.255/26' AS IPPREFIX)", IPPREFIX, "1.2.3.192/26");
assertFunction("CAST('1.2.3.255/28' AS IPPREFIX)", IPPREFIX, "1.2.3.240/28");
assertFunction("CAST('1.2.3.255/30' AS IPPREFIX)", IPPREFIX, "1.2.3.252/30");
assertFunction("CAST('1.2.3.255/32' AS IPPREFIX)", IPPREFIX, "1.2.3.255/32");
assertFunction("CAST('2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
assertFunction("CAST('2001:db8::ff00:42:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
assertFunction("CAST('2001:db8:0:0:1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:db8:0:0:1::1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:db8::1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:DB8::FF00:ABCD:12EF/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:abcd:12ef/128");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/0' AS IPPREFIX)", IPPREFIX, "::/0");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/1' AS IPPREFIX)", IPPREFIX, "8000::/1");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/2' AS IPPREFIX)", IPPREFIX, "c000::/2");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/4' AS IPPREFIX)", IPPREFIX, "f000::/4");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/8' AS IPPREFIX)", IPPREFIX, "ff00::/8");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/16' AS IPPREFIX)", IPPREFIX, "ffff::/16");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/32' AS IPPREFIX)", IPPREFIX, "ffff:ffff::/32");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/48' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff::/48");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff::/64");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/80' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff::/80");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/96' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff::/96");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/112' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:0/112");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/120' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00/120");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/124' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0/124");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/126' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffc/126");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/127' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128");
assertFunction("IPPREFIX '10.0.0.0/32'", IPPREFIX, "10.0.0.0/32");
assertFunction("IPPREFIX '64:ff9b::10.0.0.0/128'", IPPREFIX, "64:ff9b::a00:0/128");
assertInvalidCast("CAST('facebook.com/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: facebook.com/32");
assertInvalidCast("CAST('localhost/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: localhost/32");
assertInvalidCast("CAST('2001:db8::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:db8::1::1/128");
assertInvalidCast("CAST('2001:zxy::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:zxy::1::1/128");
assertInvalidCast("CAST('789.1.1.1/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 789.1.1.1/32");
assertInvalidCast("CAST('192.1.1.1' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1");
assertInvalidCast("CAST('192.1.1.1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1/128");
}
@Test
public void testIpPrefixToVarcharCast()
{
assertFunction("CAST(IPPREFIX '::ffff:1.2.3.4/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(IPPREFIX '::ffff:102:304/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128");
assertFunction("CAST(IPPREFIX '2001:db8:0:0:1:0:0:1/128' AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128");
assertFunction("CAST(CAST('1.2.3.4/32' AS IPPREFIX) AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(CAST('2001:db8:0:0:1::1/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128");
assertFunction("CAST(CAST('64:ff9b::10.0.0.0/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "64:ff9b::a00:0/128");
}
@Test
public void testIpPrefixToIpAddressCast()
{
assertFunction("CAST(IPPREFIX '1.2.3.4/32' AS IPADDRESS)", IPADDRESS, "1.2.3.4");
assertFunction("CAST(IPPREFIX '1.2.3.4/24' AS IPADDRESS)", IPADDRESS, "1.2.3.0");
assertFunction("CAST(IPPREFIX '::1/128' AS IPADDRESS)", IPADDRESS, "::1");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS IPADDRESS)", IPADDRESS, "2001:db8::ff00:42:8329");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/64' AS IPADDRESS)", IPADDRESS, "2001:db8::");
}
@Test
public void testIpAddressToIpPrefixCast()
{
assertFunction("CAST(IPADDRESS '1.2.3.4' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32");
assertFunction("CAST(IPADDRESS '::ffff:102:304' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32");
assertFunction("CAST(IPADDRESS '::1' AS IPPREFIX)", IPPREFIX, "::1/128");
assertFunction("CAST(IPADDRESS '2001:db8::ff00:42:8329' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
}
@Test
public void testEquals()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' = IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '192.168.0.0/32' = IPPREFIX '::ffff:192.168.0.0/32'", BOOLEAN, true);
assertFunction("IPPREFIX '10.0.0.0/32' = IPPREFIX '::ffff:a00:0/32'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/24' AS IPPREFIX) = IPPREFIX '1.2.3.5/24'", BOOLEAN, true);
assertFunction("IPPREFIX '2001:db8::ff00:42:8329/128' = IPPREFIX '2001:db8::ff00:42:8300/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = IPPREFIX '1.2.3.5/32'", BOOLEAN, false);
assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) = IPPREFIX '1.2.0.0/25'", BOOLEAN, false);
}
@Test
public void testDistinctFrom()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false);
assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, false);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true);
}
@Test
public void testNotEquals()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' != IPPREFIX '1.2.3.4/32'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) != IPPREFIX '1.2.3.4/32'", BOOLEAN, false);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' <> IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false);
}
@Test
public void testOrderOperators()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' > IPPREFIX '1.2.3.4/32'", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.4/32' > IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) < CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST('1.2.3.5/32' AS IPPREFIX) < CAST('1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false);
assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) < CAST('1.2.0.0/25' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '::1/128' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.5/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.6/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, false);
assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::1/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::/128' >= IPPREFIX '::1/128'", BOOLEAN, false);
assertFunction("IPPREFIX '::1/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::2222/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, false);
}
@Test
public void testIndeterminate()
{
assertOperator(INDETERMINATE, "CAST(null AS IPPREFIX)", BOOLEAN, true);
assertOperator(INDETERMINATE, "IPPREFIX '::2222/128'", BOOLEAN, false);
}
@Test
public void testHash()
{
assertOperator(HASH_CODE, "CAST(null AS IPPREFIX)", BIGINT, null);
assertOperator(HASH_CODE, "IPPREFIX '::2222/128'", BIGINT, hashFromType("::2222/128"));
}
private static long hashFromType(String address)
{
BlockBuilder blockBuilder = IPPREFIX.createBlockBuilder(null, 1);
String[] parts = address.split("/");
byte[] bytes = new byte[IPPREFIX.getFixedSize()];
byte[] addressBytes = InetAddresses.forString(parts[0]).getAddress();
arraycopy(addressBytes, 0, bytes, 0, 16);
bytes[IPPREFIX.getFixedSize() - 1] = (byte) Integer.parseInt(parts[1]);
IPPREFIX.writeSlice(blockBuilder, Slices.wrappedBuffer(bytes));
Block block = blockBuilder.build();
return IPPREFIX.hash(block, 0);
}
}
| ptkool/presto | presto-main/src/test/java/com/facebook/presto/type/TestIpPrefixOperators.java | Java | apache-2.0 | 13,240 |
package org.plasma.provisioning.rdb.mysql.v5_5.query;
import org.plasma.provisioning.rdb.mysql.v5_5.TableColumnConstraint;
import org.plasma.query.DataProperty;
import org.plasma.query.Expression;
import org.plasma.query.dsl.DataNode;
import org.plasma.query.dsl.DomainRoot;
import org.plasma.query.dsl.PathNode;
import org.plasma.sdo.helper.PlasmaTypeHelper;
/**
* Generated Domain Specific Language (DSL) implementation class representing
* the domain model entity <b>TableColumnConstraint</b>.
*
* <p>
* </p>
* <b>Data Store Mapping:</b> Corresponds to the physical data store entity
* <b>REFERENTIAL_CONSTRAINTS</b>.
*
*/
public class QTableColumnConstraint extends DomainRoot {
private QTableColumnConstraint() {
super(PlasmaTypeHelper.INSTANCE.getType(TableColumnConstraint.class));
}
/**
* Constructor which instantiates a domain query path node. A path may span
* multiple namespaces and therefore Java inplementation packages based on the
* <a href=
* "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html"
* >Condiguration</a>. Note: while this constructor is public, it is not for
* application use!
*
* @param source
* the source path node
* @param sourceProperty
* the source property logical name
*/
public QTableColumnConstraint(PathNode source, String sourceProperty) {
super(source, sourceProperty);
}
/**
* Constructor which instantiates a domain query path node. A path may span
* multiple namespaces and therefore Java inplementation packages based on the
* <a href=
* "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html"
* >Condiguration</a>. Note: while this constructor is public, it is not for
* application use!
*
* @param source
* the source path node
* @param sourceProperty
* the source property logical name
* @param expr
* the path predicate expression
*/
public QTableColumnConstraint(PathNode source, String sourceProperty, Expression expr) {
super(source, sourceProperty, expr);
}
/**
* Returns a new DSL query for <a
* href="http://docs.plasma-sdo.org/api/org/plasma/sdo/PlasmaType.html"
* >Type</a> <b>TableColumnConstraint</b> which can be used either as a query
* root or as the start (entry point) for a new path predicate expression.
*
* @return a new DSL query
*/
public static QTableColumnConstraint newQuery() {
return new QTableColumnConstraint();
}
/**
* Returns a DSL data element for property, <b>name</b>.
*
* @return a DSL data element for property, <b>name</b>.
*/
public DataProperty name() {
return new DataNode(this, TableColumnConstraint.PROPERTY.name.name());
}
/**
* Returns a DSL data element for property, <b>owner</b>.
*
* @return a DSL data element for property, <b>owner</b>.
*/
public DataProperty owner() {
return new DataNode(this, TableColumnConstraint.PROPERTY.owner.name());
}
/**
* Returns a DSL query element for reference property, <b>table</b>.
*
* @return a DSL query element for reference property, <b>table</b>.
*/
public QTable table() {
return new QTable(this, TableColumnConstraint.PROPERTY.table.name());
}
} | plasma-framework/plasma | plasma-provisioning/src/main/java/org/plasma/provisioning/rdb/mysql/v5_5/query/QTableColumnConstraint.java | Java | apache-2.0 | 3,301 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flex.compiler.problems;
import org.apache.flex.compiler.common.ISourceLocation;
/**
* This problem gets created when a variable has two Embed meta data tags
* associated with it.
*/
public final class EmbedMultipleMetaTagsProblem extends CompilerProblem
{
public static final String DESCRIPTION =
"A variable can only only have one [${EMBED}] metadata tag";
public static final int errorCode = 1344;
public EmbedMultipleMetaTagsProblem(ISourceLocation site)
{
super(site);
}
// Prevent these from being localized.
public final String EMBED = "Embed";
}
| adufilie/flex-falcon | compiler/src/org/apache/flex/compiler/problems/EmbedMultipleMetaTagsProblem.java | Java | apache-2.0 | 1,453 |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.materials;
import com.thoughtworks.go.config.CruiseConfig;
import com.thoughtworks.go.config.PipelineConfig;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterial;
import com.thoughtworks.go.config.remote.ConfigRepoConfig;
import com.thoughtworks.go.domain.materials.Material;
import com.thoughtworks.go.domain.packagerepository.PackageDefinition;
import com.thoughtworks.go.domain.packagerepository.PackageRepository;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.listener.ConfigChangedListener;
import com.thoughtworks.go.listener.EntityConfigChangedListener;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.MaterialConfigConverter;
import com.thoughtworks.go.util.SystemEnvironment;
import org.slf4j.Logger;
import org.joda.time.DateTimeUtils;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* Provides a list of unique SCMMaterials to be updated which will be consumed by MaterialUpdateService
*/
@Component
public class SCMMaterialSource extends EntityConfigChangedListener<ConfigRepoConfig> implements ConfigChangedListener, MaterialSource, MaterialUpdateCompleteListener {
private static final Logger LOGGER = LoggerFactory.getLogger(SCMMaterialSource.class);
private final GoConfigService goConfigService;
private ConcurrentMap<Material, Long> materialLastUpdateTimeMap = new ConcurrentHashMap<>();
private final MaterialConfigConverter materialConfigConverter;
private final MaterialUpdateService materialUpdateService;
private final long materialUpdateInterval;
private Set<Material> schedulableMaterials;
@Autowired
public SCMMaterialSource(GoConfigService goConfigService, SystemEnvironment systemEnvironment,
MaterialConfigConverter materialConfigConverter, MaterialUpdateService materialUpdateService) {
this.goConfigService = goConfigService;
this.materialConfigConverter = materialConfigConverter;
this.materialUpdateService = materialUpdateService;
this.materialUpdateInterval = systemEnvironment.getMaterialUpdateIdleInterval();
}
public void initialize() {
goConfigService.register(this);
goConfigService.register(new InternalConfigChangeListener() {
@Override
public void onEntityConfigChange(Object entity) {
updateSchedulableMaterials(true);
}
});
materialUpdateService.registerMaterialSources(this);
materialUpdateService.registerMaterialUpdateCompleteListener(this);
}
@Override
public Set<Material> materialsForUpdate() {
updateSchedulableMaterials(false);
return materialsWithUpdateIntervalElapsed();
}
@Override
public void onMaterialUpdate(Material material) {
if (!(material instanceof DependencyMaterial)) {
updateLastUpdateTimeForScmMaterial(material);
}
}
@Override
public void onConfigChange(CruiseConfig newCruiseConfig) {
updateSchedulableMaterials(true);
}
@Override
public void onEntityConfigChange(ConfigRepoConfig entity) {
updateSchedulableMaterials(true);
}
protected EntityConfigChangedListener<PipelineConfig> pipelineConfigChangedListener() {
final SCMMaterialSource self = this;
return new EntityConfigChangedListener<PipelineConfig>() {
@Override
public void onEntityConfigChange(PipelineConfig pipelineConfig) {
self.onConfigChange(null);
}
};
}
private Set<Material> materialsWithUpdateIntervalElapsed() {
Set<Material> materialsForUpdate = new HashSet<>();
for (Material material : schedulableMaterials) {
if (hasUpdateIntervalElapsedForScmMaterial(material)) {
materialsForUpdate.add(material);
}
}
return materialsForUpdate;
}
boolean hasUpdateIntervalElapsedForScmMaterial(Material material) {
Long lastMaterialUpdateTime = materialLastUpdateTimeMap.get(material);
if (lastMaterialUpdateTime != null) {
boolean shouldUpdateMaterial = (DateTimeUtils.currentTimeMillis() - lastMaterialUpdateTime) >= materialUpdateInterval;
if (LOGGER.isDebugEnabled() && !shouldUpdateMaterial) {
LOGGER.debug("[Material Update] Skipping update of material {} which has been last updated at {}", material, new Date(lastMaterialUpdateTime));
}
return shouldUpdateMaterial;
}
return true;
}
private void updateLastUpdateTimeForScmMaterial(Material material) {
materialLastUpdateTimeMap.put(material, DateTimeUtils.currentTimeMillis());
}
private void updateSchedulableMaterials(boolean forceLoad) {
if (forceLoad || schedulableMaterials == null) {
schedulableMaterials = materialConfigConverter.toMaterials(goConfigService.getSchedulableSCMMaterials());
}
}
private abstract class InternalConfigChangeListener extends EntityConfigChangedListener<Object> {
private final List<Class<?>> securityConfigClasses = Arrays.asList(
PipelineConfig.class,
PackageDefinition.class,
PackageRepository.class,
SCM.class
);
@Override
public boolean shouldCareAbout(Object entity) {
return securityConfigClasses.stream().anyMatch(aClass -> aClass.isAssignableFrom(entity.getClass()));
}
}
}
| Skarlso/gocd | server/src/main/java/com/thoughtworks/go/server/materials/SCMMaterialSource.java | Java | apache-2.0 | 6,425 |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.score.stream.drools.quad;
import java.util.function.Function;
import java.util.function.Supplier;
import org.optaplanner.core.api.function.PentaFunction;
import org.optaplanner.core.api.function.QuadFunction;
import org.optaplanner.core.api.score.stream.quad.QuadConstraintCollector;
import org.optaplanner.core.impl.score.stream.drools.common.BiTuple;
import org.optaplanner.core.impl.score.stream.drools.common.DroolsAbstractUniCollectingGroupByCollectorProcessor;
import org.optaplanner.core.impl.score.stream.drools.common.QuadTuple;
import org.optaplanner.core.impl.score.stream.drools.common.TriTuple;
final class DroolsQuadToTriGroupByCollectorProcessor<A, B, C, D, ResultContainer, NewA, NewB, NewC> extends
DroolsAbstractUniCollectingGroupByCollectorProcessor<ResultContainer, QuadTuple<A, B, C, D>, BiTuple<NewA, NewB>, TriTuple<NewA, NewB, NewC>> {
private final QuadFunction<A, B, C, D, NewA> groupKeyAMapping;
private final QuadFunction<A, B, C, D, NewB> groupKeyBMapping;
private final Supplier<ResultContainer> supplier;
private final PentaFunction<ResultContainer, A, B, C, D, Runnable> accumulator;
private final Function<ResultContainer, NewC> finisher;
public DroolsQuadToTriGroupByCollectorProcessor(QuadFunction<A, B, C, D, NewA> groupKeyAMapping,
QuadFunction<A, B, C, D, NewB> groupKeyBMapping,
QuadConstraintCollector<A, B, C, D, ResultContainer, NewC> collector) {
this.groupKeyAMapping = groupKeyAMapping;
this.groupKeyBMapping = groupKeyBMapping;
this.supplier = collector.supplier();
this.accumulator = collector.accumulator();
this.finisher = collector.finisher();
}
@Override
protected BiTuple<NewA, NewB> toKey(QuadTuple<A, B, C, D> abcdQuadTuple) {
return new BiTuple<>(groupKeyAMapping.apply(abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d),
groupKeyBMapping.apply(abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d));
}
@Override
protected ResultContainer newContainer() {
return supplier.get();
}
@Override
protected Runnable process(QuadTuple<A, B, C, D> abcdQuadTuple, ResultContainer container) {
return accumulator.apply(container, abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d);
}
@Override
protected TriTuple<NewA, NewB, NewC> toResult(BiTuple<NewA, NewB> key, ResultContainer container) {
return new TriTuple<>(key.a, key.b, finisher.apply(container));
}
}
| ge0ffrey/optaplanner | optaplanner-core/src/main/java/org/optaplanner/core/impl/score/stream/drools/quad/DroolsQuadToTriGroupByCollectorProcessor.java | Java | apache-2.0 | 3,217 |
/**
* Copyright (C) 2013-2016 The Rythm Engine project
* for LICENSE and other details see:
* https://github.com/rythmengine/rythmengine
*/
package org.rythmengine.cache;
/*-
* #%L
* Rythm Template Engine
* %%
* Copyright (C) 2017 - 2021 OSGL (Open Source General Library)
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.rythmengine.extension.ICacheService;
import org.rythmengine.extension.ICacheServiceFactory;
/**
* Created with IntelliJ IDEA.
* User: luog
* Date: 2/12/13
* Time: 8:45 AM
* To change this template use File | Settings | File Templates.
*/
class EhCacheServiceFactory implements ICacheServiceFactory {
@Override
public ICacheService get() {
return EhCacheService.INSTANCE;
}
}
| rythmengine/rythmengine | src/main/java/org/rythmengine/cache/EhCacheServiceFactory.java | Java | apache-2.0 | 1,271 |
/**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.core.api.security;
/**
* Serves as baseclass for all Security-related Exceptions in the OpenEngSB (similar to
* {@link java.security.GeneralSecurityException}
*
*/
public abstract class OpenEngSBSecurityException extends Exception {
private static final long serialVersionUID = -2939758040088724227L;
public OpenEngSBSecurityException() {
}
public OpenEngSBSecurityException(String message, Throwable cause) {
super(message, cause);
}
public OpenEngSBSecurityException(String message) {
super(message);
}
public OpenEngSBSecurityException(Throwable cause) {
super(cause);
}
}
| openengsb-attic/openengsb-api | src/main/java/org/openengsb/core/api/security/OpenEngSBSecurityException.java | Java | apache-2.0 | 1,499 |
package org.andidev.applicationname.format.custom;
import java.util.Locale;
import org.andidev.applicationname.format.annotation.CustomFormat;
import org.apache.commons.lang3.StringUtils;
import org.springframework.expression.EvaluationContext;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.spel.SpelParseException;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.format.Printer;
public class CustomPrinter implements Printer<Object> {
private final String spelExpression;
private final EvaluationContext evaluationContext;
public CustomPrinter(String spelExpression, EvaluationContext evaluationContext) {
this.spelExpression = StringUtils.defaultIfBlank(spelExpression, null);
this.evaluationContext = evaluationContext;
}
@Override
public String print(Object object, Locale locale) {
if (spelExpression == null) {
return null;
}
ExpressionParser parser = new SpelExpressionParser();
try {
Object result = parser.parseExpression(spelExpression).getValue(evaluationContext, object);
return result.toString();
} catch (SpelParseException e) {
throw new CustomFormatException("Could not parse spel expression = \"" + spelExpression + "\" in " + CustomFormat.class.getSimpleName() + " annotation: " + e.getMessage());
}
}
}
| andidev/spring-bootstrap-enterprise | src/main/java/org/andidev/applicationname/format/custom/CustomPrinter.java | Java | apache-2.0 | 1,468 |
package com.capitalone.dashboard.model;
import com.capitalone.dashboard.util.FeatureCollectorConstants;
import org.springframework.stereotype.Component;
/**
* Collector implementation for Feature that stores system configuration
* settings required for source system data connection (e.g., API tokens, etc.)
*/
@Component
public class TestResultCollector extends Collector {
/**
* Creates a static prototype of the Feature Collector, which includes any
* specific settings or configuration required for the use of this
* collector, including settings for connecting to any source systems.
*
* @return A configured TestResult Collector prototype
*/
public static TestResultCollector prototype() {
TestResultCollector protoType = new TestResultCollector();
protoType.setName(FeatureCollectorConstants.JIRA_XRAY);
protoType.setOnline(true);
protoType.setEnabled(true);
protoType.setCollectorType(CollectorType.Test);
protoType.setLastExecuted(System.currentTimeMillis());
return protoType;
}
} | tabladrum/Hygieia | collectors/test-results/jira-xray/src/main/java/com/capitalone/dashboard/model/TestResultCollector.java | Java | apache-2.0 | 1,094 |
/*
* Copyright 2015 - 2017 Atlarge Research Team,
* operating at Technische Universiteit Delft
* and Vrije Universiteit Amsterdam, the Netherlands.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package science.atlarge.granula.modeller.rule.derivation.time;
import science.atlarge.granula.modeller.platform.info.BasicInfo;
import science.atlarge.granula.modeller.platform.info.InfoSource;
import science.atlarge.granula.modeller.platform.operation.Operation;
import science.atlarge.granula.modeller.rule.derivation.DerivationRule;
import science.atlarge.granula.modeller.platform.info.Info;
import science.atlarge.granula.modeller.platform.info.Source;
import java.util.ArrayList;
import java.util.List;
public class ParentalEndTimeDerivation extends DerivationRule {
public ParentalEndTimeDerivation(int level) {
super(level);
}
@Override
public boolean execute() {
Operation operation = (Operation) entity;
Operation parent = operation.getParent();
Info sourceInfo = parent.getInfo("EndTime");
long endTime = Long.parseLong(sourceInfo.getValue());
BasicInfo info = new BasicInfo("EndTime");
List<Source> sources = new ArrayList<>();
sources.add(new InfoSource("ParentalEndTime", sourceInfo));
info.setDescription("The [EndTime] of an (abstract) operation is derived from the largest value of [FilialEndTimes], which are [EndTime]s of all child operations.");
info.addInfo(String.valueOf(endTime), sources);
operation.addInfo(info);
return true;
}
}
| tudelft-atlarge/granula | granula-modeller/src/main/java/science/atlarge/granula/modeller/rule/derivation/time/ParentalEndTimeDerivation.java | Java | apache-2.0 | 2,098 |
/*
* Copyright (C) 2015 Apptik Project
* Copyright (C) 2014 Kalin Maldzhanski
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apptik.comm.jus.error;
import io.apptik.comm.jus.NetworkResponse;
/**
* Error indicating that there was an authentication failure when performing a Request.
*/
@SuppressWarnings("serial")
public class AuthError extends RequestError {
public AuthError(NetworkResponse response) {
super(response);
}
public AuthError(NetworkResponse response, String exceptionMessage) {
super(response, exceptionMessage);
}
public AuthError(NetworkResponse response, String exceptionMessage, Throwable reason) {
super(response, exceptionMessage, reason);
}
public AuthError(NetworkResponse response, Throwable reason) {
super(response, reason);
}
}
| djodjoni/jus | jus-java/src/main/java/io/apptik/comm/jus/error/AuthError.java | Java | apache-2.0 | 1,412 |
/*
* Copyright 2006-2008 Kazuyuki Shudo.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dhtaccess.tools;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import dhtaccess.core.DHTAccessor;
public class Remove {
private static final String COMMAND = "rm";
private static final String ENCODE = "UTF-8";
private static final String DEFAULT_GATEWAY = "http://opendht.nyuld.net:5851/";
private static void usage(String command) {
System.out.println("usage: " + command
+ " [-h] [-g <gateway>] [-t <ttl (sec)>] <key> <value> <secret>");
}
public static void main(String[] args) {
int ttl = 3600;
// parse properties
Properties prop = System.getProperties();
String gateway = prop.getProperty("dhtaccess.gateway");
if (gateway == null || gateway.length() <= 0) {
gateway = DEFAULT_GATEWAY;
}
// parse options
Options options = new Options();
options.addOption("h", "help", false, "print help");
options.addOption("g", "gateway", true, "gateway URI, list at http://opendht.org/servers.txt");
options.addOption("t", "ttl", true, "how long (in seconds) to store the value");
CommandLineParser parser = new PosixParser();
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
System.out.println("There is an invalid option.");
e.printStackTrace();
System.exit(1);
}
String optVal;
if (cmd.hasOption('h')) {
usage(COMMAND);
System.exit(1);
}
optVal = cmd.getOptionValue('g');
if (optVal != null) {
gateway = optVal;
}
optVal = cmd.getOptionValue('t');
if (optVal != null) {
ttl = Integer.parseInt(optVal);
}
args = cmd.getArgs();
// parse arguments
if (args.length < 3) {
usage(COMMAND);
System.exit(1);
}
byte[] key = null, value = null, secret = null;
try {
key = args[0].getBytes(ENCODE);
value = args[1].getBytes(ENCODE);
secret = args[2].getBytes(ENCODE);
} catch (UnsupportedEncodingException e1) {
// NOTREACHED
}
// prepare for RPC
DHTAccessor accessor = null;
try {
accessor = new DHTAccessor(gateway);
} catch (MalformedURLException e) {
e.printStackTrace();
System.exit(1);
}
// RPC
int res = accessor.remove(key, value, ttl, secret);
String resultString;
switch (res) {
case 0:
resultString = "Success";
break;
case 1:
resultString = "Capacity";
break;
case 2:
resultString = "Again";
break;
default:
resultString = "???";
}
System.out.println(resultString);
}
}
| shudo/dht-access | src/dhtaccess/tools/Remove.java | Java | apache-2.0 | 3,313 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flex.compiler.problems;
import org.apache.flex.compiler.tree.as.IASNode;
/**
* Diagnostic emitted when the code generator detects
* a definition that conflicts with an inherited definition
* from a superclass.
*/
public final class ConflictingInheritedNameInNamespaceProblem extends SemanticProblem
{
public static final String DESCRIPTION =
"A conflict exists with inherited definition ${declName} in namespace ${nsName}.";
public static final int errorCode = 1152;
public ConflictingInheritedNameInNamespaceProblem(IASNode site, String declName, String nsName)
{
super(site);
this.declName = declName;
this.nsName = nsName;
}
public final String declName;
public final String nsName;
}
| adufilie/flex-falcon | compiler/src/org/apache/flex/compiler/problems/ConflictingInheritedNameInNamespaceProblem.java | Java | apache-2.0 | 1,604 |
package org.elasticsearch.painless;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/** Currently just a dummy class for testing a few features not yet exposed by whitelist! */
public class FeatureTestObject2 {
public FeatureTestObject2() {super();}
public static int staticNumberArgument(int injected, int userArgument) {
return injected * userArgument;
}
public static int staticNumberArgument2(int userArgument1, int userArgument2) {
return userArgument1 * userArgument2;
}
}
| nknize/elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/FeatureTestObject2.java | Java | apache-2.0 | 1,258 |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.rest.provider;
import org.jboss.pnc.model.ProductMilestone;
import org.jboss.pnc.model.ProductMilestoneRelease;
import org.jboss.pnc.rest.restmodel.ProductMilestoneReleaseRest;
import org.jboss.pnc.spi.datastore.repositories.PageInfoProducer;
import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneReleaseRepository;
import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneRepository;
import org.jboss.pnc.spi.datastore.repositories.SortInfoProducer;
import org.jboss.pnc.spi.datastore.repositories.api.RSQLPredicateProducer;
import javax.ejb.Stateless;
import javax.inject.Inject;
import java.util.function.Function;
@Stateless
public class ProductMilestoneReleaseProvider extends AbstractProvider<ProductMilestoneRelease, ProductMilestoneReleaseRest> {
private ProductMilestoneRepository milestoneRepository;
private ProductMilestoneReleaseRepository releaseRepository;
@Inject
public ProductMilestoneReleaseProvider(ProductMilestoneReleaseRepository releaseRepository,
ProductMilestoneRepository milestoneRepository,
RSQLPredicateProducer rsqlPredicateProducer,
SortInfoProducer sortInfoProducer, PageInfoProducer pageInfoProducer) {
super(releaseRepository, rsqlPredicateProducer, sortInfoProducer, pageInfoProducer);
this.releaseRepository = releaseRepository;
this.milestoneRepository = milestoneRepository;
}
// needed for EJB/CDI
@Deprecated
public ProductMilestoneReleaseProvider() {
}
@Override
protected Function<? super ProductMilestoneRelease, ? extends ProductMilestoneReleaseRest> toRESTModel() {
return ProductMilestoneReleaseRest::new;
}
@Override
protected Function<? super ProductMilestoneReleaseRest, ? extends ProductMilestoneRelease> toDBModel() {
throw new IllegalStateException("ProductMilestoneRelease entity is not to be created via REST");
}
public ProductMilestoneReleaseRest latestForMilestone(Integer milestoneId) {
ProductMilestone milestone = milestoneRepository.queryById(milestoneId);
ProductMilestoneRelease release = milestone == null ? null : releaseRepository.findLatestByMilestone(milestone);
return release == null ? null : toRESTModel().apply(release);
}
}
| dans123456/pnc | rest/src/main/java/org/jboss/pnc/rest/provider/ProductMilestoneReleaseProvider.java | Java | apache-2.0 | 3,113 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.TestSupport;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.impl.DefaultMessage;
import org.apache.camel.spi.DataFormat;
public class UnmarshalProcessorTest extends TestSupport {
public void testDataFormatReturnsSameExchange() throws Exception {
Exchange exchange = createExchangeWithBody(new DefaultCamelContext(), "body");
Processor processor = new UnmarshalProcessor(new MyDataFormat(exchange));
processor.process(exchange);
assertEquals("UnmarshalProcessor did not copy OUT from IN message", "body", exchange.getOut().getBody());
}
public void testDataFormatReturnsAnotherExchange() throws Exception {
CamelContext context = new DefaultCamelContext();
Exchange exchange = createExchangeWithBody(context, "body");
Exchange exchange2 = createExchangeWithBody(context, "body2");
Processor processor = new UnmarshalProcessor(new MyDataFormat(exchange2));
try {
processor.process(exchange);
fail("Should have thrown exception");
} catch (RuntimeCamelException e) {
assertEquals("The returned exchange " + exchange2 + " is not the same as " + exchange + " provided to the DataFormat", e.getMessage());
}
}
public void testDataFormatReturnsMessage() throws Exception {
Exchange exchange = createExchangeWithBody(new DefaultCamelContext(), "body");
Message out = new DefaultMessage();
out.setBody(new Object());
Processor processor = new UnmarshalProcessor(new MyDataFormat(out));
processor.process(exchange);
assertSame("UnmarshalProcessor did not make use of the returned OUT message", out, exchange.getOut());
assertSame("UnmarshalProcessor did change the body bound to the OUT message", out.getBody(), exchange.getOut().getBody());
}
public void testDataFormatReturnsBody() throws Exception {
Exchange exchange = createExchangeWithBody(new DefaultCamelContext(), "body");
Object unmarshalled = new Object();
Processor processor = new UnmarshalProcessor(new MyDataFormat(unmarshalled));
processor.process(exchange);
assertSame("UnmarshalProcessor did not make use of the returned object being returned while unmarshalling", unmarshalled, exchange.getOut().getBody());
}
private static class MyDataFormat implements DataFormat {
private final Object object;
MyDataFormat(Exchange exchange) {
object = exchange;
}
MyDataFormat(Message message) {
object = message;
}
MyDataFormat(Object unmarshalled) {
object = unmarshalled;
}
@Override
public void marshal(Exchange exchange, Object graph, OutputStream stream) throws Exception {
throw new IllegalAccessException("This method is not expected to be used by UnmarshalProcessor");
}
@Override
public Object unmarshal(Exchange exchange, InputStream stream) throws Exception {
return object;
}
}
}
| aaronwalker/camel | camel-core/src/test/java/org/apache/camel/processor/UnmarshalProcessorTest.java | Java | apache-2.0 | 4,219 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.java.util.common;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.parsers.CloseableIterator;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
public class CloseableIterators
{
public static <T> CloseableIterator<T> concat(List<? extends CloseableIterator<? extends T>> iterators)
{
final Closer closer = Closer.create();
iterators.forEach(closer::register);
final Iterator<T> innerIterator = Iterators.concat(iterators.iterator());
return wrap(innerIterator, closer);
}
public static <T> CloseableIterator<T> mergeSorted(
List<? extends CloseableIterator<? extends T>> iterators,
Comparator<T> comparator
)
{
Preconditions.checkNotNull(comparator);
final Closer closer = Closer.create();
iterators.forEach(closer::register);
final Iterator<T> innerIterator = Iterators.mergeSorted(iterators, comparator);
return wrap(innerIterator, closer);
}
public static <T> CloseableIterator<T> wrap(Iterator<T> innerIterator, @Nullable Closeable closeable)
{
return new CloseableIterator<T>()
{
private boolean closed;
@Override
public boolean hasNext()
{
return innerIterator.hasNext();
}
@Override
public T next()
{
return innerIterator.next();
}
@Override
public void close() throws IOException
{
if (!closed) {
if (closeable != null) {
closeable.close();
}
closed = true;
}
}
};
}
public static <T> CloseableIterator<T> withEmptyBaggage(Iterator<T> innerIterator)
{
return wrap(innerIterator, null);
}
private CloseableIterators() {}
}
| dkhwangbo/druid | java-util/src/main/java/org/apache/druid/java/util/common/CloseableIterators.java | Java | apache-2.0 | 2,771 |
package com.code.constant;
/**
* Created by niu on 2017/8/17.
*/
public class StringEvent {
//网络状态改变
public static String NET_STATE_CHANGE = "net_state_change";
}
| niuzhijun66/NiuStudyDemo | app/src/main/java/com/code/constant/StringEvent.java | Java | apache-2.0 | 187 |
/*
* Copyright 2013-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.util;
import org.cloudfoundry.client.v2.ClientV2Exception;
import reactor.core.publisher.Mono;
import java.util.Arrays;
import java.util.function.Predicate;
/**
* Utilities for dealing with {@link Exception}s
*/
public final class ExceptionUtils {
private ExceptionUtils() {
}
/**
* Returns a {@link Mono} containing an {@link IllegalArgumentException} with the configured message
*
* @param format A <a href="../util/Formatter.html#syntax">format string</a>
* @param args Arguments referenced by the format specifiers in the format string. If there are more arguments than format specifiers, the extra arguments are ignored. The number of arguments
* is variable and may be zero. The maximum number of arguments is limited by the maximum dimension of a Java array as defined by <cite>The Java™ Virtual Machine
* Specification</cite>. The behaviour on a {@code null} argument depends on the <a href="../util/Formatter.html#syntax">conversion</a>.
* @param <T> the type of the {@link Mono} being converted
* @return a {@link Mono} containing the error
*/
public static <T> Mono<T> illegalArgument(String format, Object... args) {
String message = String.format(format, args);
return Mono.error(new IllegalArgumentException(message));
}
/**
* Returns a {@link Mono} containing an {@link IllegalStateException} with the configured message
*
* @param format A <a href="../util/Formatter.html#syntax">format string</a>
* @param args Arguments referenced by the format specifiers in the format string. If there are more arguments than format specifiers, the extra arguments are ignored. The number of arguments
* is variable and may be zero. The maximum number of arguments is limited by the maximum dimension of a Java array as defined by <cite>The Java™ Virtual Machine
* Specification</cite>. The behaviour on a {@code null} argument depends on the <a href="../util/Formatter.html#syntax">conversion</a>.
* @param <T> the type of the {@link Mono} being converted
* @return a {@link Mono} containing the error
*/
public static <T> Mono<T> illegalState(String format, Object... args) {
String message = String.format(format, args);
return Mono.error(new IllegalStateException(message));
}
/**
* A predicate that returns {@code true} if the exception is a {@link ClientV2Exception} and its code matches expectation
*
* @param codes the codes to match
* @return {@code true} if the exception is a {@link ClientV2Exception} and its code matches
*/
public static Predicate<? super Throwable> statusCode(int... codes) {
return t -> t instanceof ClientV2Exception &&
Arrays.stream(codes).anyMatch(candidate -> ((ClientV2Exception) t).getCode().equals(candidate));
}
}
| Orange-OpenSource/cf-java-client | cloudfoundry-util/src/main/java/org/cloudfoundry/util/ExceptionUtils.java | Java | apache-2.0 | 3,609 |
/*
* Copyright 2012-2014 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marklogic.samplestack.web.security;
import java.io.IOException;
import java.io.Writer;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
import org.apache.http.HttpStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.web.AuthenticationEntryPoint;
import org.springframework.stereotype.Component;
import com.marklogic.samplestack.web.JsonHttpResponse;
@Component
/**
* Class to customize the default Login handling. Rather than redirection
* to a login form, Samplestack simply denies access
* (where authentication is required)
*/
public class SamplestackAuthenticationEntryPoint implements
AuthenticationEntryPoint {
@Autowired
private JsonHttpResponse errors;
@Override
/**
* Override handler that returns 401 for any unauthenticated
* request to a secured endpoint.
*/
public void commence(HttpServletRequest request,
HttpServletResponse response, AuthenticationException authException)
throws IOException {
HttpServletResponseWrapper responseWrapper = new HttpServletResponseWrapper(
response);
responseWrapper.setStatus(HttpStatus.SC_UNAUTHORIZED);
Writer out = responseWrapper.getWriter();
errors.writeJsonResponse(out, HttpStatus.SC_UNAUTHORIZED, "Unauthorized");
out.close();
}
}
| laurelnaiad/marklogic-samplestack-old | appserver/java-spring/src/main/java/com/marklogic/samplestack/web/security/SamplestackAuthenticationEntryPoint.java | Java | apache-2.0 | 2,075 |
/*
* Copyright 2016 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.common;
import static java.util.Objects.requireNonNull;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Formatter;
import java.util.Locale;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import it.unimi.dsi.fastutil.io.FastByteArrayInputStream;
/**
* HTTP/2 data. Helpers in this class create {@link HttpData} objects that leave the stream open.
* To create a {@link HttpData} that closes the stream, directly instantiate {@link DefaultHttpData}.
*
* <p>Implementations should generally extend {@link AbstractHttpData} to interact with other {@link HttpData}
* implementations.
*/
public interface HttpData extends HttpObject {
/**
* Empty HTTP/2 data.
*/
HttpData EMPTY_DATA = new DefaultHttpData(new byte[0], 0, 0, false);
/**
* Creates a new instance from the specified byte array. The array is not copied; any changes made in the
* array later will be visible to {@link HttpData}.
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of the specified array is 0.
*/
static HttpData of(byte[] data) {
requireNonNull(data, "data");
if (data.length == 0) {
return EMPTY_DATA;
}
return new DefaultHttpData(data, 0, data.length, false);
}
/**
* Creates a new instance from the specified byte array, {@code offset} and {@code length}.
* The array is not copied; any changes made in the array later will be visible to {@link HttpData}.
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code length} is 0.
*
* @throws ArrayIndexOutOfBoundsException if {@code offset} and {@code length} are out of bounds
*/
static HttpData of(byte[] data, int offset, int length) {
requireNonNull(data);
if (offset < 0 || length < 0 || offset > data.length - length) {
throw new ArrayIndexOutOfBoundsException(
"offset: " + offset + ", length: " + length + ", data.length: " + data.length);
}
if (length == 0) {
return EMPTY_DATA;
}
return new DefaultHttpData(data, offset, length, false);
}
/**
* Converts the specified {@code text} into an {@link HttpData}.
*
* @param charset the {@link Charset} to use for encoding {@code text}
* @param text the {@link String} to convert
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0.
*/
static HttpData of(Charset charset, String text) {
requireNonNull(charset, "charset");
requireNonNull(text, "text");
if (text.isEmpty()) {
return EMPTY_DATA;
}
return of(text.getBytes(charset));
}
/**
* Converts the specified Netty {@link ByteBuf} into an {@link HttpData}. Unlike {@link #of(byte[])}, this
* method makes a copy of the {@link ByteBuf}.
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the readable bytes of {@code buf} is 0.
*/
static HttpData of(ByteBuf buf) {
requireNonNull(buf, "buf");
if (!buf.isReadable()) {
return EMPTY_DATA;
}
return of(ByteBufUtil.getBytes(buf));
}
/**
* Converts the specified formatted string into an {@link HttpData}. The string is formatted by
* {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}.
*
* @param charset the {@link Charset} to use for encoding string
* @param format {@linkplain Formatter the format string} of the response content
* @param args the arguments referenced by the format specifiers in the format string
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty.
*/
static HttpData of(Charset charset, String format, Object... args) {
requireNonNull(charset, "charset");
requireNonNull(format, "format");
requireNonNull(args, "args");
if (format.isEmpty()) {
return EMPTY_DATA;
}
return of(String.format(Locale.ENGLISH, format, args).getBytes(charset));
}
/**
* Converts the specified {@code text} into a UTF-8 {@link HttpData}.
*
* @param text the {@link String} to convert
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0.
*/
static HttpData ofUtf8(String text) {
return of(StandardCharsets.UTF_8, text);
}
/**
* Converts the specified formatted string into a UTF-8 {@link HttpData}. The string is formatted by
* {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}.
*
* @param format {@linkplain Formatter the format string} of the response content
* @param args the arguments referenced by the format specifiers in the format string
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty.
*/
static HttpData ofUtf8(String format, Object... args) {
return of(StandardCharsets.UTF_8, format, args);
}
/**
* Converts the specified {@code text} into a US-ASCII {@link HttpData}.
*
* @param text the {@link String} to convert
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0.
*/
static HttpData ofAscii(String text) {
return of(StandardCharsets.US_ASCII, text);
}
/**
* Converts the specified formatted string into a US-ASCII {@link HttpData}. The string is formatted by
* {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}.
*
* @param format {@linkplain Formatter the format string} of the response content
* @param args the arguments referenced by the format specifiers in the format string
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty.
*/
static HttpData ofAscii(String format, Object... args) {
return of(StandardCharsets.US_ASCII, format, args);
}
/**
* Returns the underlying byte array of this data.
*/
byte[] array();
/**
* Returns the start offset of the {@link #array()}.
*/
int offset();
/**
* Returns the length of this data.
*/
int length();
/**
* Returns whether the {@link #length()} is 0.
*/
default boolean isEmpty() {
return length() == 0;
}
/**
* Decodes this data into a {@link String}.
*
* @param charset the {@link Charset} to use for decoding this data
*
* @return the decoded {@link String}
*/
default String toString(Charset charset) {
requireNonNull(charset, "charset");
return new String(array(), offset(), length(), charset);
}
/**
* Decodes this data into a {@link String} using UTF-8 encoding.
*
* @return the decoded {@link String}
*/
default String toStringUtf8() {
return toString(StandardCharsets.UTF_8);
}
/**
* Decodes this data into a {@link String} using US-ASCII encoding.
*
* @return the decoded {@link String}
*/
default String toStringAscii() {
return toString(StandardCharsets.US_ASCII);
}
/**
* Returns a new {@link InputStream} that is sourced from this data.
*/
default InputStream toInputStream() {
return new FastByteArrayInputStream(array(), offset(), length());
}
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using the specified
* {@link Charset}.
*/
default Reader toReader(Charset charset) {
requireNonNull(charset, "charset");
return new InputStreamReader(toInputStream(), charset);
}
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using
* {@link StandardCharsets#UTF_8}.
*/
default Reader toReaderUtf8() {
return toReader(StandardCharsets.UTF_8);
}
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using
* {@link StandardCharsets#US_ASCII}.
*/
default Reader toReaderAscii() {
return toReader(StandardCharsets.US_ASCII);
}
}
| jmostella/armeria | core/src/main/java/com/linecorp/armeria/common/HttpData.java | Java | apache-2.0 | 9,114 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.openstack.nova.v2_0;
import static org.jclouds.Constants.PROPERTY_ENDPOINT;
import static org.testng.Assert.assertEquals;
import java.util.Properties;
import org.jclouds.http.HttpRequest;
import org.jclouds.http.HttpResponse;
import org.jclouds.openstack.nova.v2_0.internal.BaseNovaApiExpectTest;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableSet;
/**
* Tests to ensure that we can pick the only endpoint of a service
*/
@Test(groups = "unit", testName = "EndpointIdIsRandomExpectTest")
public class EndpointIdIsRandomExpectTest extends BaseNovaApiExpectTest {
public EndpointIdIsRandomExpectTest() {
this.identity = "demo:demo";
this.credential = "password";
}
@Override
protected Properties setupProperties() {
Properties overrides = super.setupProperties();
overrides.setProperty(PROPERTY_ENDPOINT, "http://10.10.10.10:5000/v2.0/");
return overrides;
}
public void testVersionMatchOnConfiguredRegionsWhenResponseIs2xx() {
HttpRequest authenticate = HttpRequest
.builder()
.method("POST")
.endpoint("http://10.10.10.10:5000/v2.0/tokens")
.addHeader("Accept", "application/json")
.payload(
payloadFromStringWithContentType(
"{\"auth\":{\"passwordCredentials\":{\"username\":\"demo\",\"password\":\"password\"},\"tenantName\":\"demo\"}}",
"application/json")).build();
HttpResponse authenticationResponse = HttpResponse.builder().statusCode(200)
.payload(payloadFromResourceWithContentType("/access_version_uids.json", "application/json")).build();
NovaApi whenNovaRegionExists = requestSendsResponse(authenticate, authenticationResponse);
assertEquals(whenNovaRegionExists.getConfiguredRegions(), ImmutableSet.of("RegionOne"));
}
}
| yanzhijun/jclouds-aliyun | apis/openstack-nova/src/test/java/org/jclouds/openstack/nova/v2_0/EndpointIdIsRandomExpectTest.java | Java | apache-2.0 | 2,716 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.registry.dubbo;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.Version;
import org.apache.dubbo.common.utils.ExecutorUtil;
import org.apache.dubbo.common.utils.NamedThreadFactory;
import org.apache.dubbo.common.utils.NetUtils;
import org.apache.dubbo.registry.NotifyListener;
import org.apache.dubbo.registry.RegistryService;
import org.apache.dubbo.registry.support.FailbackRegistry;
import org.apache.dubbo.remoting.Constants;
import org.apache.dubbo.rpc.Invoker;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import static org.apache.dubbo.registry.Constants.REGISTRY_RECONNECT_PERIOD_KEY;
/**
* DubboRegistry
*/
public class DubboRegistry extends FailbackRegistry {
// Reconnecting detection cycle: 3 seconds (unit:millisecond)
private static final int RECONNECT_PERIOD_DEFAULT = 3 * 1000;
// Scheduled executor service
private final ScheduledExecutorService reconnectTimer = Executors.newScheduledThreadPool(1, new NamedThreadFactory("DubboRegistryReconnectTimer", true));
// Reconnection timer, regular check connection is available. If unavailable, unlimited reconnection.
private final ScheduledFuture<?> reconnectFuture;
// The lock for client acquisition process, lock the creation process of the client instance to prevent repeated clients
private final ReentrantLock clientLock = new ReentrantLock();
private final Invoker<RegistryService> registryInvoker;
private final RegistryService registryService;
/**
* The time in milliseconds the reconnectTimer will wait
*/
private final int reconnectPeriod;
public DubboRegistry(Invoker<RegistryService> registryInvoker, RegistryService registryService) {
super(registryInvoker.getUrl());
this.registryInvoker = registryInvoker;
this.registryService = registryService;
// Start reconnection timer
this.reconnectPeriod = registryInvoker.getUrl().getParameter(REGISTRY_RECONNECT_PERIOD_KEY, RECONNECT_PERIOD_DEFAULT);
reconnectFuture = reconnectTimer.scheduleWithFixedDelay(() -> {
// Check and connect to the registry
try {
connect();
} catch (Throwable t) { // Defensive fault tolerance
logger.error("Unexpected error occur at reconnect, cause: " + t.getMessage(), t);
}
}, reconnectPeriod, reconnectPeriod, TimeUnit.MILLISECONDS);
}
protected final void connect() {
try {
// Check whether or not it is connected
if (isAvailable()) {
return;
}
if (logger.isInfoEnabled()) {
logger.info("Reconnect to registry " + getUrl());
}
clientLock.lock();
try {
// Double check whether or not it is connected
if (isAvailable()) {
return;
}
recover();
} finally {
clientLock.unlock();
}
} catch (Throwable t) { // Ignore all the exceptions and wait for the next retry
if (getUrl().getParameter(Constants.CHECK_KEY, true)) {
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
throw new RuntimeException(t.getMessage(), t);
}
logger.error("Failed to connect to registry " + getUrl().getAddress() + " from provider/consumer " + NetUtils.getLocalHost() + " use dubbo " + Version.getVersion() + ", cause: " + t.getMessage(), t);
}
}
@Override
public boolean isAvailable() {
if (registryInvoker == null) {
return false;
}
return registryInvoker.isAvailable();
}
@Override
public void destroy() {
super.destroy();
try {
// Cancel the reconnection timer
ExecutorUtil.cancelScheduledFuture(reconnectFuture);
} catch (Throwable t) {
logger.warn("Failed to cancel reconnect timer", t);
}
registryInvoker.destroy();
ExecutorUtil.gracefulShutdown(reconnectTimer, reconnectPeriod);
}
@Override
public void doRegister(URL url) {
registryService.register(url);
}
@Override
public void doUnregister(URL url) {
registryService.unregister(url);
}
@Override
public void doSubscribe(URL url, NotifyListener listener) {
registryService.subscribe(url, listener);
}
@Override
public void doUnsubscribe(URL url, NotifyListener listener) {
registryService.unsubscribe(url, listener);
}
@Override
public List<URL> lookup(URL url) {
return registryService.lookup(url);
}
}
| lovepoem/dubbo | dubbo-registry/dubbo-registry-default/src/main/java/org/apache/dubbo/registry/dubbo/DubboRegistry.java | Java | apache-2.0 | 5,956 |
package org.ovirt.engine.core.itests;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNotNull;
import org.ovirt.engine.core.common.queries.*;
import org.ovirt.engine.core.common.action.LoginUserParameters;
import org.ovirt.engine.core.common.action.VdcReturnValueBase;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.action.RunVmParams;
import org.ovirt.engine.core.compat.Guid;
/**
* Created by IntelliJ IDEA. User: gmostizk Date: Aug 31, 2009 Time: 11:28:01 AM To change this template use File |
* Settings | File Templates.
*/
@Ignore
public class ClientHandshakeSequenceTest extends AbstractBackendTest {
@Test
public void getDomainList() {
VdcQueryReturnValue value = backend.RunPublicQuery(VdcQueryType.GetDomainList, new VdcQueryParametersBase());
assertTrue(value.getSucceeded());
assertNotNull(value.getReturnValue());
System.out.println(value.getReturnValue());
}
@Test
public void getVersion() {
VdcQueryReturnValue value = backend.RunPublicQuery(VdcQueryType.GetConfigurationValue,
new GetConfigurationValueParameters(ConfigurationValues.VdcVersion));
assertNotNull(value);
assertNotNull(value.getReturnValue());
System.out.println("Version: " + value.getReturnValue());
}
@Test
public void loginAdmin() {
VdcReturnValueBase value = backend.Login(new LoginUserParameters("admin", "admin", "domain", "os", "browser",
"client_type"));
assertTrue(value.getSucceeded());
assertNotNull(value.getActionReturnValue());
}
@Test
public void testRunVm() {
RunVmParams params = new RunVmParams(Guid.NewGuid());
VdcReturnValueBase result = backend.runInternalAction(VdcActionType.RunVm, params);
}
}
| jbeecham/ovirt-engine | backend/manager/modules/bll/src/test/java/org/ovirt/engine/core/itests/ClientHandshakeSequenceTest.java | Java | apache-2.0 | 1,920 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.analysis.reverse;
import java.io.IOException;
import java.io.StringReader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
public class TestReverseStringFilter extends BaseTokenStreamTestCase {
public void testFilter() throws Exception {
TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false); // 1-4 length string
((Tokenizer)stream).setReader(new StringReader("Do have a nice day"));
ReverseStringFilter filter = new ReverseStringFilter(stream);
assertTokenStreamContents(filter, new String[] { "oD", "evah", "a", "ecin", "yad" });
}
public void testFilterWithMark() throws Exception {
TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false); // 1-4 length string
((Tokenizer)stream).setReader(new StringReader("Do have a nice day"));
ReverseStringFilter filter = new ReverseStringFilter(stream, '\u0001');
assertTokenStreamContents(filter,
new String[] { "\u0001oD", "\u0001evah", "\u0001a", "\u0001ecin", "\u0001yad" });
}
public void testReverseString() throws Exception {
assertEquals( "A", ReverseStringFilter.reverse( "A" ) );
assertEquals( "BA", ReverseStringFilter.reverse( "AB" ) );
assertEquals( "CBA", ReverseStringFilter.reverse( "ABC" ) );
}
public void testReverseChar() throws Exception {
char[] buffer = { 'A', 'B', 'C', 'D', 'E', 'F' };
ReverseStringFilter.reverse( buffer, 2, 3 );
assertEquals( "ABEDCF", new String( buffer ) );
}
public void testReverseSupplementary() throws Exception {
// supplementary at end
assertEquals("𩬅艱鍟䇹愯瀛", ReverseStringFilter.reverse("瀛愯䇹鍟艱𩬅"));
// supplementary at end - 1
assertEquals("a𩬅艱鍟䇹愯瀛", ReverseStringFilter.reverse("瀛愯䇹鍟艱𩬅a"));
// supplementary at start
assertEquals("fedcba𩬅", ReverseStringFilter.reverse("𩬅abcdef"));
// supplementary at start + 1
assertEquals("fedcba𩬅z", ReverseStringFilter.reverse("z𩬅abcdef"));
// supplementary medial
assertEquals("gfe𩬅dcba", ReverseStringFilter.reverse("abcd𩬅efg"));
}
public void testReverseSupplementaryChar() throws Exception {
// supplementary at end
char[] buffer = "abc瀛愯䇹鍟艱𩬅".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 7);
assertEquals("abc𩬅艱鍟䇹愯瀛", new String(buffer));
// supplementary at end - 1
buffer = "abc瀛愯䇹鍟艱𩬅d".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 8);
assertEquals("abcd𩬅艱鍟䇹愯瀛", new String(buffer));
// supplementary at start
buffer = "abc𩬅瀛愯䇹鍟艱".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 7);
assertEquals("abc艱鍟䇹愯瀛𩬅", new String(buffer));
// supplementary at start + 1
buffer = "abcd𩬅瀛愯䇹鍟艱".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 8);
assertEquals("abc艱鍟䇹愯瀛𩬅d", new String(buffer));
// supplementary medial
buffer = "abc瀛愯𩬅def".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 7);
assertEquals("abcfed𩬅愯瀛", new String(buffer));
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer, new ReverseStringFilter(tokenizer));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new KeywordTokenizer();
return new TokenStreamComponents(tokenizer, new ReverseStringFilter(tokenizer));
}
};
checkOneTerm(a, "", "");
}
}
| visouza/solr-5.0.0 | lucene/analysis/common/src/test/org/apache/lucene/analysis/reverse/TestReverseStringFilter.java | Java | apache-2.0 | 5,094 |
package org.cloudfoundry.autoscaler.data.couchdb.dao.impl;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.cloudfoundry.autoscaler.data.couchdb.dao.AppInstanceMetricsDAO;
import org.cloudfoundry.autoscaler.data.couchdb.dao.base.TypedCouchDbRepositorySupport;
import org.cloudfoundry.autoscaler.data.couchdb.document.AppInstanceMetrics;
import org.ektorp.ComplexKey;
import org.ektorp.CouchDbConnector;
import org.ektorp.ViewQuery;
import org.ektorp.support.View;
public class AppInstanceMetricsDAOImpl extends CommonDAOImpl implements AppInstanceMetricsDAO {
@View(name = "byAll", map = "function(doc) { if (doc.type == 'AppInstanceMetrics' ) emit([doc.appId, doc.appType, doc.timestamp], doc._id)}")
private static class AppInstanceMetricsRepository_All extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_All(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_byAll");
}
public List<AppInstanceMetrics> getAllRecords() {
return queryView("byAll");
}
}
@View(name = "by_appId", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.appId) { emit([doc.appId], doc._id) } }")
private static class AppInstanceMetricsRepository_ByAppId
extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_ByAppId(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByAppId");
}
public List<AppInstanceMetrics> findByAppId(String appId) {
ComplexKey key = ComplexKey.of(appId);
return queryView("by_appId", key);
}
}
@View(name = "by_appId_between", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.appId && doc.timestamp) { emit([doc.appId, doc.timestamp], doc._id) } }")
private static class AppInstanceMetricsRepository_ByAppIdBetween
extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_ByAppIdBetween(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByAppIdBetween");
}
public List<AppInstanceMetrics> findByAppIdBetween(String appId, long startTimestamp, long endTimestamp)
throws Exception {
ComplexKey startKey = ComplexKey.of(appId, startTimestamp);
ComplexKey endKey = ComplexKey.of(appId, endTimestamp);
ViewQuery q = createQuery("by_appId_between").includeDocs(true).startKey(startKey).endKey(endKey);
List<AppInstanceMetrics> returnvalue = null;
String[] input = beforeConnection("QUERY", new String[] { "by_appId_between", appId,
String.valueOf(startTimestamp), String.valueOf(endTimestamp) });
try {
returnvalue = db.queryView(q, AppInstanceMetrics.class);
} catch (Exception e) {
e.printStackTrace();
}
afterConnection(input);
return returnvalue;
}
}
@View(name = "by_serviceId_before", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.serviceId && doc.timestamp) { emit([ doc.serviceId, doc.timestamp], doc._id) } }")
private static class AppInstanceMetricsRepository_ByServiceId_Before
extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_ByServiceId_Before(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByServiceId");
}
public List<AppInstanceMetrics> findByServiceIdBefore(String serviceId, long olderThan) throws Exception {
ComplexKey startKey = ComplexKey.of(serviceId, 0);
ComplexKey endKey = ComplexKey.of(serviceId, olderThan);
ViewQuery q = createQuery("by_serviceId_before").includeDocs(true).startKey(startKey).endKey(endKey);
List<AppInstanceMetrics> returnvalue = null;
String[] input = beforeConnection("QUERY",
new String[] { "by_serviceId_before", serviceId, String.valueOf(0), String.valueOf(olderThan) });
try {
returnvalue = db.queryView(q, AppInstanceMetrics.class);
} catch (Exception e) {
e.printStackTrace();
}
afterConnection(input);
return returnvalue;
}
}
private static final Logger logger = Logger.getLogger(AppInstanceMetricsDAOImpl.class);
private AppInstanceMetricsRepository_All metricsRepoAll;
private AppInstanceMetricsRepository_ByAppId metricsRepoByAppId;
private AppInstanceMetricsRepository_ByAppIdBetween metricsRepoByAppIdBetween;
private AppInstanceMetricsRepository_ByServiceId_Before metricsRepoByServiceIdBefore;
public AppInstanceMetricsDAOImpl(CouchDbConnector db) {
metricsRepoAll = new AppInstanceMetricsRepository_All(db);
metricsRepoByAppId = new AppInstanceMetricsRepository_ByAppId(db);
metricsRepoByAppIdBetween = new AppInstanceMetricsRepository_ByAppIdBetween(db);
metricsRepoByServiceIdBefore = new AppInstanceMetricsRepository_ByServiceId_Before(db);
}
public AppInstanceMetricsDAOImpl(CouchDbConnector db, boolean initDesignDocument) {
this(db);
if (initDesignDocument) {
try {
initAllRepos();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
}
@Override
public List<AppInstanceMetrics> findAll() {
// TODO Auto-generated method stub
return this.metricsRepoAll.getAllRecords();
}
@Override
public List<AppInstanceMetrics> findByAppId(String appId) {
// TODO Auto-generated method stub
return this.metricsRepoByAppId.findByAppId(appId);
}
@Override
public List<AppInstanceMetrics> findByAppIdBetween(String appId, long startTimestamp, long endTimestamp)
throws Exception {
// TODO Auto-generated method stub
return this.metricsRepoByAppIdBetween.findByAppIdBetween(appId, startTimestamp, endTimestamp);
}
@Override
public List<AppInstanceMetrics> findByServiceIdBefore(String serviceId, long olderThan) throws Exception {
// TODO Auto-generated method stub
return this.metricsRepoByServiceIdBefore.findByServiceIdBefore(serviceId, olderThan);
}
@Override
public List<AppInstanceMetrics> findByAppIdAfter(String appId, long timestamp) throws Exception {
try {
return findByAppIdBetween(appId, timestamp, System.currentTimeMillis());
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public <T> TypedCouchDbRepositorySupport<T> getDefaultRepo() {
// TODO Auto-generated method stub
return (TypedCouchDbRepositorySupport<T>) this.metricsRepoAll;
}
@SuppressWarnings("unchecked")
@Override
public <T> List<TypedCouchDbRepositorySupport<T>> getAllRepos() {
// TODO Auto-generated method stub
List<TypedCouchDbRepositorySupport<T>> repoList = new ArrayList<TypedCouchDbRepositorySupport<T>>();
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoAll);
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByAppId);
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByAppIdBetween);
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByServiceIdBefore);
return repoList;
}
}
| cfibmers/open-Autoscaler | server/src/main/java/org/cloudfoundry/autoscaler/data/couchdb/dao/impl/AppInstanceMetricsDAOImpl.java | Java | apache-2.0 | 6,943 |
package jef.common.wrapper;
import java.io.Serializable;
public interface IHolder<T> extends Serializable{
T get();
void set(T obj);
}
| xuse/ef-orm | common-core/src/main/java/jef/common/wrapper/IHolder.java | Java | apache-2.0 | 139 |
/*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.restassured.response;
public interface Validatable<T extends ValidatableResponseOptions<T, R>, R extends ResponseBody<R> & ResponseOptions<R>> {
/**
* Returns a validatable response that's lets you validate the response. Usage example:
* <p/>
* <pre>
* given().
* param("firstName", "John").
* param("lastName", "Doe").
* when().
* get("/greet").
* then().
* body("greeting", equalTo("John Doe"));
* </pre>
*
* @return A validatable response
*/
T then();
}
| jayway/rest-assured | rest-assured/src/main/java/io/restassured/response/Validatable.java | Java | apache-2.0 | 1,199 |
/*
* Copyright 2010-2014 Ning, Inc.
* Copyright 2014 The Billing Project, LLC
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.plugin.meter.timeline.shutdown;
import java.util.HashMap;
import java.util.Map;
import org.joda.time.DateTime;
/**
* This class is used solely as a Json mapping class when saving timelines in a database
* blob on shutdown, and restoring them on startup.
* <p/>
* The Map<Integer, Map<Integer, DateTime>> maps from sourceId to eventCategoryId to startTime.
*/
public class StartTimes {
private final DateTime timeInserted;
private final Map<Integer, Map<Integer, DateTime>> startTimesMap;
private DateTime minStartTime;
public StartTimes(final DateTime timeInserted, final Map<Integer, Map<Integer, DateTime>> startTimesMap) {
this.timeInserted = timeInserted;
this.startTimesMap = startTimesMap;
DateTime minDateTime = new DateTime(Long.MAX_VALUE);
for (final Map<Integer, DateTime> categoryMap : startTimesMap.values()) {
for (final DateTime startTime : categoryMap.values()) {
if (minDateTime.isAfter(startTime)) {
minDateTime = startTime;
}
}
}
this.minStartTime = minDateTime;
}
public StartTimes() {
this.timeInserted = new DateTime();
minStartTime = new DateTime(Long.MAX_VALUE);
this.startTimesMap = new HashMap<Integer, Map<Integer, DateTime>>();
}
public void addTime(final int sourceId, final int categoryId, final DateTime dateTime) {
Map<Integer, DateTime> sourceTimes = startTimesMap.get(sourceId);
if (sourceTimes == null) {
sourceTimes = new HashMap<Integer, DateTime>();
startTimesMap.put(sourceId, sourceTimes);
}
sourceTimes.put(categoryId, dateTime);
if (dateTime.isBefore(minStartTime)) {
minStartTime = dateTime;
}
}
public DateTime getStartTimeForSourceIdAndCategoryId(final int sourceId, final int categoryId) {
final Map<Integer, DateTime> sourceTimes = startTimesMap.get(sourceId);
if (sourceTimes != null) {
return sourceTimes.get(categoryId);
} else {
return null;
}
}
public Map<Integer, Map<Integer, DateTime>> getStartTimesMap() {
return startTimesMap;
}
public DateTime getTimeInserted() {
return timeInserted;
}
public DateTime getMinStartTime() {
return minStartTime;
}
}
| killbill/killbill-meter-plugin | src/main/java/org/killbill/billing/plugin/meter/timeline/shutdown/StartTimes.java | Java | apache-2.0 | 3,097 |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.metadata;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.druid.indexing.overlord.DataSourceMetadata;
import io.druid.indexing.overlord.ObjectMetadata;
import io.druid.indexing.overlord.SegmentPublishResult;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.StringUtils;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.LinearShardSpec;
import io.druid.timeline.partition.NoneShardSpec;
import io.druid.timeline.partition.NumberedShardSpec;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.tweak.HandleCallback;
import org.skife.jdbi.v2.util.StringMapper;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
public class IndexerSQLMetadataStorageCoordinatorTest
{
@Rule
public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule();
private final ObjectMapper mapper = new DefaultObjectMapper();
private final DataSegment defaultSegment = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"version",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new LinearShardSpec(0),
9,
100
);
private final DataSegment defaultSegment2 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"version",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new LinearShardSpec(1),
9,
100
);
private final DataSegment defaultSegment3 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-03T00Z/2015-01-04T00Z"),
"version",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
NoneShardSpec.instance(),
9,
100
);
// Overshadows defaultSegment, defaultSegment2
private final DataSegment defaultSegment4 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new LinearShardSpec(0),
9,
100
);
private final DataSegment numberedSegment0of0 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(0, 0),
9,
100
);
private final DataSegment numberedSegment1of0 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(1, 0),
9,
100
);
private final DataSegment numberedSegment2of0 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(2, 0),
9,
100
);
private final DataSegment numberedSegment2of1 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(2, 1),
9,
100
);
private final DataSegment numberedSegment3of1 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(3, 1),
9,
100
);
private final Set<DataSegment> SEGMENTS = ImmutableSet.of(defaultSegment, defaultSegment2);
private final AtomicLong metadataUpdateCounter = new AtomicLong();
private IndexerSQLMetadataStorageCoordinator coordinator;
private TestDerbyConnector derbyConnector;
@Before
public void setUp()
{
derbyConnector = derbyConnectorRule.getConnector();
mapper.registerSubtypes(LinearShardSpec.class);
derbyConnector.createDataSourceTable();
derbyConnector.createTaskTables();
derbyConnector.createSegmentTable();
metadataUpdateCounter.set(0);
coordinator = new IndexerSQLMetadataStorageCoordinator(
mapper,
derbyConnectorRule.metadataTablesConfigSupplier().get(),
derbyConnector
)
{
@Override
protected DataSourceMetadataUpdateResult updateDataSourceMetadataWithHandle(
Handle handle,
String dataSource,
DataSourceMetadata startMetadata,
DataSourceMetadata endMetadata
) throws IOException
{
// Count number of times this method is called.
metadataUpdateCounter.getAndIncrement();
return super.updateDataSourceMetadataWithHandle(handle, dataSource, startMetadata, endMetadata);
}
};
}
private void unUseSegment()
{
for (final DataSegment segment : SEGMENTS) {
Assert.assertEquals(
1, (int) derbyConnector.getDBI().<Integer>withHandle(
new HandleCallback<Integer>()
{
@Override
public Integer withHandle(Handle handle) throws Exception
{
return handle.createStatement(
StringUtils.format(
"UPDATE %s SET used = false WHERE id = :id",
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable()
)
).bind("id", segment.getIdentifier()).execute();
}
}
)
);
}
}
private List<String> getUsedIdentifiers()
{
final String table = derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable();
return derbyConnector.retryWithHandle(
new HandleCallback<List<String>>()
{
@Override
public List<String> withHandle(Handle handle) throws Exception
{
return handle.createQuery("SELECT id FROM " + table + " WHERE used = true ORDER BY id")
.map(StringMapper.FIRST)
.list();
}
}
);
}
@Test
public void testSimpleAnnounce() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
for (DataSegment segment : SEGMENTS) {
Assert.assertArrayEquals(
mapper.writeValueAsString(segment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
segment.getIdentifier()
)
);
}
Assert.assertEquals(
ImmutableList.of(defaultSegment.getIdentifier(), defaultSegment2.getIdentifier()),
getUsedIdentifiers()
);
// Should not update dataSource metadata.
Assert.assertEquals(0, metadataUpdateCounter.get());
}
@Test
public void testOvershadowingAnnounce() throws IOException
{
final ImmutableSet<DataSegment> segments = ImmutableSet.of(defaultSegment, defaultSegment2, defaultSegment4);
coordinator.announceHistoricalSegments(segments);
for (DataSegment segment : segments) {
Assert.assertArrayEquals(
mapper.writeValueAsString(segment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
segment.getIdentifier()
)
);
}
Assert.assertEquals(ImmutableList.of(defaultSegment4.getIdentifier()), getUsedIdentifiers());
}
@Test
public void testTransactionalAnnounceSuccess() throws IOException
{
// Insert first segment.
final SegmentPublishResult result1 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "bar"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment), true), result1);
Assert.assertArrayEquals(
mapper.writeValueAsString(defaultSegment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
defaultSegment.getIdentifier()
)
);
// Insert second segment.
final SegmentPublishResult result2 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment2),
new ObjectMetadata(ImmutableMap.of("foo", "bar")),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment2), true), result2);
Assert.assertArrayEquals(
mapper.writeValueAsString(defaultSegment2).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
defaultSegment2.getIdentifier()
)
);
// Examine metadata.
Assert.assertEquals(
new ObjectMetadata(ImmutableMap.of("foo", "baz")),
coordinator.getDataSourceMetadata("fooDataSource")
);
// Should only be tried once per call.
Assert.assertEquals(2, metadataUpdateCounter.get());
}
@Test
public void testTransactionalAnnounceRetryAndSuccess() throws IOException
{
final AtomicLong attemptCounter = new AtomicLong();
final IndexerSQLMetadataStorageCoordinator failOnceCoordinator = new IndexerSQLMetadataStorageCoordinator(
mapper,
derbyConnectorRule.metadataTablesConfigSupplier().get(),
derbyConnector
)
{
@Override
protected DataSourceMetadataUpdateResult updateDataSourceMetadataWithHandle(
Handle handle,
String dataSource,
DataSourceMetadata startMetadata,
DataSourceMetadata endMetadata
) throws IOException
{
metadataUpdateCounter.getAndIncrement();
if (attemptCounter.getAndIncrement() == 0) {
return DataSourceMetadataUpdateResult.TRY_AGAIN;
} else {
return super.updateDataSourceMetadataWithHandle(handle, dataSource, startMetadata, endMetadata);
}
}
};
// Insert first segment.
final SegmentPublishResult result1 = failOnceCoordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "bar"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment), true), result1);
Assert.assertArrayEquals(
mapper.writeValueAsString(defaultSegment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
defaultSegment.getIdentifier()
)
);
// Reset attempt counter to induce another failure.
attemptCounter.set(0);
// Insert second segment.
final SegmentPublishResult result2 = failOnceCoordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment2),
new ObjectMetadata(ImmutableMap.of("foo", "bar")),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment2), true), result2);
Assert.assertArrayEquals(
mapper.writeValueAsString(defaultSegment2).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
defaultSegment2.getIdentifier()
)
);
// Examine metadata.
Assert.assertEquals(
new ObjectMetadata(ImmutableMap.of("foo", "baz")),
failOnceCoordinator.getDataSourceMetadata("fooDataSource")
);
// Should be tried twice per call.
Assert.assertEquals(4, metadataUpdateCounter.get());
}
@Test
public void testTransactionalAnnounceFailDbNullWantNotNull() throws IOException
{
final SegmentPublishResult result1 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(ImmutableMap.of("foo", "bar")),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.<DataSegment>of(), false), result1);
// Should only be tried once.
Assert.assertEquals(1, metadataUpdateCounter.get());
}
@Test
public void testTransactionalAnnounceFailDbNotNullWantNull() throws IOException
{
final SegmentPublishResult result1 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment), true), result1);
final SegmentPublishResult result2 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment2),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.<DataSegment>of(), false), result2);
// Should only be tried once per call.
Assert.assertEquals(2, metadataUpdateCounter.get());
}
@Test
public void testTransactionalAnnounceFailDbNotNullWantDifferent() throws IOException
{
final SegmentPublishResult result1 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment), true), result1);
final SegmentPublishResult result2 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment2),
new ObjectMetadata(ImmutableMap.of("foo", "qux")),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.<DataSegment>of(), false), result2);
// Should only be tried once per call.
Assert.assertEquals(2, metadataUpdateCounter.get());
}
@Test
public void testSimpleUsedList() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval()
)
)
);
}
@Test
public void testMultiIntervalUsedList() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
coordinator.announceHistoricalSegments(ImmutableSet.of(defaultSegment3));
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForIntervals(
defaultSegment.getDataSource(),
ImmutableList.of(defaultSegment.getInterval())
)
)
);
Assert.assertEquals(
ImmutableSet.of(defaultSegment3),
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForIntervals(
defaultSegment.getDataSource(),
ImmutableList.of(defaultSegment3.getInterval())
)
)
);
Assert.assertEquals(
ImmutableSet.of(defaultSegment, defaultSegment2, defaultSegment3),
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForIntervals(
defaultSegment.getDataSource(),
ImmutableList.of(defaultSegment.getInterval(), defaultSegment3.getInterval())
)
)
);
//case to check no duplication if two intervals overlapped with the interval of same segment.
Assert.assertEquals(
ImmutableList.of(defaultSegment3),
coordinator.getUsedSegmentsForIntervals(
defaultSegment.getDataSource(),
ImmutableList.of(
Interval.parse("2015-01-03T00Z/2015-01-03T05Z"),
Interval.parse("2015-01-03T09Z/2015-01-04T00Z")
)
)
);
}
@Test
public void testSimpleUnUsedList() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval()
)
)
);
}
@Test
public void testUsedOverlapLow() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Set<DataSegment> actualSegments = ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
Interval.parse("2014-12-31T23:59:59.999Z/2015-01-01T00:00:00.001Z") // end is exclusive
)
);
Assert.assertEquals(
SEGMENTS,
actualSegments
);
}
@Test
public void testUsedOverlapHigh() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
Interval.parse("2015-1-1T23:59:59.999Z/2015-02-01T00Z")
)
)
);
}
@Test
public void testUsedOutOfBoundsLow() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertTrue(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(defaultSegment.getInterval().getStart().minus(1), defaultSegment.getInterval().getStart())
).isEmpty()
);
}
@Test
public void testUsedOutOfBoundsHigh() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertTrue(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(defaultSegment.getInterval().getEnd(), defaultSegment.getInterval().getEnd().plusDays(10))
).isEmpty()
);
}
@Test
public void testUsedWithinBoundsEnd() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().minusMillis(1))
)
)
);
}
@Test
public void testUsedOverlapEnd() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().plusMillis(1))
)
)
);
}
@Test
public void testUnUsedOverlapLow() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertTrue(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(
defaultSegment.getInterval().getStart().minus(1),
defaultSegment.getInterval().getStart().plus(1)
)
).isEmpty()
);
}
@Test
public void testUnUsedUnderlapLow() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertTrue(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(defaultSegment.getInterval().getStart().plus(1), defaultSegment.getInterval().getEnd())
).isEmpty()
);
}
@Test
public void testUnUsedUnderlapHigh() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertTrue(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(defaultSegment.getInterval().getStart(), defaultSegment.getInterval().getEnd().minus(1))
).isEmpty()
);
}
@Test
public void testUnUsedOverlapHigh() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertTrue(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withStart(defaultSegment.getInterval().getEnd().minus(1))
).isEmpty()
);
}
@Test
public void testUnUsedBigOverlap() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
Interval.parse("2000/2999")
)
)
);
}
@Test
public void testUnUsedLowRange() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withStart(defaultSegment.getInterval().getStart().minus(1))
)
)
);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withStart(defaultSegment.getInterval().getStart().minusYears(1))
)
)
);
}
@Test
public void testUnUsedHighRange() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().plus(1))
)
)
);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().plusYears(1))
)
)
);
}
@Test
public void testDeleteDataSourceMetadata() throws IOException
{
coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "bar"))
);
Assert.assertEquals(
new ObjectMetadata(ImmutableMap.of("foo", "bar")),
coordinator.getDataSourceMetadata("fooDataSource")
);
Assert.assertFalse("deleteInvalidDataSourceMetadata", coordinator.deleteDataSourceMetadata("nonExistentDS"));
Assert.assertTrue("deleteValidDataSourceMetadata", coordinator.deleteDataSourceMetadata("fooDataSource"));
Assert.assertNull("getDataSourceMetadataNullAfterDelete", coordinator.getDataSourceMetadata("fooDataSource"));
}
@Test
public void testSingleAdditionalNumberedShardWithNoCorePartitions() throws IOException
{
additionalNumberedShardTest(ImmutableSet.of(numberedSegment0of0));
}
@Test
public void testMultipleAdditionalNumberedShardsWithNoCorePartitions() throws IOException
{
additionalNumberedShardTest(ImmutableSet.of(numberedSegment0of0, numberedSegment1of0, numberedSegment2of0));
}
@Test
public void testSingleAdditionalNumberedShardWithOneCorePartition() throws IOException
{
additionalNumberedShardTest(ImmutableSet.of(numberedSegment2of1));
}
@Test
public void testMultipleAdditionalNumberedShardsWithOneCorePartition() throws IOException
{
additionalNumberedShardTest(ImmutableSet.of(numberedSegment2of1, numberedSegment3of1));
}
private void additionalNumberedShardTest(Set<DataSegment> segments) throws IOException
{
coordinator.announceHistoricalSegments(segments);
for (DataSegment segment : segments) {
Assert.assertArrayEquals(
mapper.writeValueAsString(segment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
segment.getIdentifier()
)
);
}
Assert.assertEquals(
segments.stream().map(DataSegment::getIdentifier).collect(Collectors.toList()),
getUsedIdentifiers()
);
// Should not update dataSource metadata.
Assert.assertEquals(0, metadataUpdateCounter.get());
}
}
| lizhanhui/data_druid | server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java | Java | apache-2.0 | 26,606 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.util;
import java.util.Arrays;
import java.nio.CharBuffer;
import org.opengis.metadata.citation.Citation; // For javadoc
import org.opengis.referencing.IdentifiedObject; // For javadoc
import static java.lang.Character.*;
/**
* Static methods working with {@link CharSequence} instances. Some methods defined in this
* class duplicate the functionalities already provided in the standard {@link String} class,
* but works on a generic {@code CharSequence} instance instead of {@code String}.
*
* <h2>Unicode support</h2>
* Every methods defined in this class work on <cite>code points</cite> instead of characters
* when appropriate. Consequently those methods should behave correctly with characters outside
* the <cite>Basic Multilingual Plane</cite> (BMP).
*
* <h2>Policy on space characters</h2>
* Java defines two methods for testing if a character is a white space:
* {@link Character#isWhitespace(int)} and {@link Character#isSpaceChar(int)}.
* Those two methods differ in the way they handle {@linkplain Characters#NO_BREAK_SPACE
* no-break spaces}, tabulations and line feeds. The general policy in the SIS library is:
*
* <ul>
* <li>Use {@code isWhitespace(…)} when separating entities (words, numbers, tokens, <i>etc.</i>)
* in a list. Using that method, characters separated by a no-break space are considered as
* part of the same entity.</li>
* <li>Use {@code isSpaceChar(…)} when parsing a single entity, for example a single word.
* Using this method, no-break spaces are considered as part of the entity while line
* feeds or tabulations are entity boundaries.</li>
* </ul>
*
* <div class="note"><b>Example:</b>
* Numbers formatted in the French locale use no-break spaces as group separators. When parsing a list of numbers,
* ordinary spaces around the numbers may need to be ignored, but no-break spaces shall be considered as part of the
* numbers. Consequently {@code isWhitespace(…)} is appropriate for skipping spaces <em>between</em> the numbers.
* But if there is spaces to skip <em>inside</em> a single number, then {@code isSpaceChar(…)} is a good choice
* for accepting no-break spaces and for stopping the parse operation at tabulations or line feed character.
* A tabulation or line feed between two characters is very likely to separate two distinct values.</div>
*
* In practice, the {@link java.text.Format} implementations in the SIS library typically use
* {@code isSpaceChar(…)} while most of the rest of the SIS library, including this
* {@code CharSequences} class, consistently uses {@code isWhitespace(…)}.
*
* <p>Note that the {@link String#trim()} method doesn't follow any of those policies and should
* generally be avoided. That {@code trim()} method removes every ISO control characters without
* distinction about whether the characters are space or not, and ignore all Unicode spaces.
* The {@link #trimWhitespaces(String)} method defined in this class can be used as an alternative.</p>
*
* <h2>Handling of null values</h2>
* Most methods in this class accept a {@code null} {@code CharSequence} argument. In such cases
* the method return value is either a {@code null} {@code CharSequence}, an empty array, or a
* {@code 0} or {@code false} primitive type calculated as if the input was an empty string.
*
* @author Martin Desruisseaux (Geomatys)
* @version 1.1
*
* @see StringBuilders
*
* @since 0.3
* @module
*/
public final class CharSequences extends Static {
/**
* An array of zero-length. This constant play a role equivalents to
* {@link java.util.Collections#EMPTY_LIST}.
*/
public static final String[] EMPTY_ARRAY = new String[0];
/**
* An array of strings containing only white spaces. String lengths are equal to their
* index in the {@code spaces} array. For example, {@code spaces[4]} contains a string
* of length 4. Strings are constructed only when first needed.
*/
private static final String[] SPACES = new String[10];
/**
* Do not allow instantiation of this class.
*/
private CharSequences() {
}
/**
* Returns the code point after the given index. This method completes
* {@link Character#codePointBefore(CharSequence, int)} but is rarely used because slightly
* inefficient (in most cases, the code point at {@code index} is known together with the
* corresponding {@code charCount(int)} value, so the method calls should be unnecessary).
*/
private static int codePointAfter(final CharSequence text, final int index) {
return codePointAt(text, index + charCount(codePointAt(text, index)));
}
/**
* Returns a character sequence of the specified length filled with white spaces.
*
* <h4>Use case</h4>
* This method is typically invoked for performing right-alignment of text on the
* {@linkplain java.io.Console console} or other device using monospaced font.
* Callers compute a value for the {@code length} argument by (<var>desired width</var> - <var>used width</var>).
* Since the <var>used width</var> value may be greater than expected, this method handle negative {@code length}
* values as if the value was zero.
*
* @param length the string length. Negative values are clamped to 0.
* @return a string of length {@code length} filled with white spaces.
*/
public static CharSequence spaces(final int length) {
/*
* No need to synchronize. In the unlikely event of two threads calling this method
* at the same time and the two calls creating a new string, the String.intern() call
* will take care of canonicalizing the strings.
*/
if (length <= 0) {
return "";
}
if (length < SPACES.length) {
String s = SPACES[length - 1];
if (s == null) {
final char[] spaces = new char[length];
Arrays.fill(spaces, ' ');
s = new String(spaces).intern();
SPACES[length - 1] = s;
}
return s;
}
return new CharSequence() {
@Override public int length() {
return length;
}
@Override public char charAt(int index) {
ArgumentChecks.ensureValidIndex(length, index);
return ' ';
}
@Override public CharSequence subSequence(final int start, final int end) {
ArgumentChecks.ensureValidIndexRange(length, start, end);
final int n = end - start;
return (n == length) ? this : spaces(n);
}
@Override public String toString() {
final char[] array = new char[length];
Arrays.fill(array, ' ');
return new String(array);
}
};
}
/**
* Returns the {@linkplain CharSequence#length() length} of the given characters sequence,
* or 0 if {@code null}.
*
* @param text the character sequence from which to get the length, or {@code null}.
* @return the length of the character sequence, or 0 if the argument is {@code null}.
*/
public static int length(final CharSequence text) {
return (text != null) ? text.length() : 0;
}
/**
* Returns the number of Unicode code points in the given characters sequence,
* or 0 if {@code null}. Unpaired surrogates within the text count as one code
* point each.
*
* @param text the character sequence from which to get the count, or {@code null}.
* @return the number of Unicode code points, or 0 if the argument is {@code null}.
*
* @see #codePointCount(CharSequence, int, int)
*/
public static int codePointCount(final CharSequence text) {
return (text != null) ? codePointCount(text, 0, text.length()) : 0;
}
/**
* Returns the number of Unicode code points in the given characters sub-sequence,
* or 0 if {@code null}. Unpaired surrogates within the text count as one code
* point each.
*
* <p>This method performs the same work than the standard
* {@link Character#codePointCount(CharSequence, int, int)} method, except that it tries
* to delegate to the optimized methods from the {@link String}, {@link StringBuilder},
* {@link StringBuffer} or {@link CharBuffer} classes if possible.</p>
*
* @param text the character sequence from which to get the count, or {@code null}.
* @param fromIndex the index from which to start the computation.
* @param toIndex the index after the last character to take in account.
* @return the number of Unicode code points, or 0 if the argument is {@code null}.
*
* @see Character#codePointCount(CharSequence, int, int)
* @see String#codePointCount(int, int)
* @see StringBuilder#codePointCount(int, int)
*/
public static int codePointCount(final CharSequence text, final int fromIndex, final int toIndex) {
if (text == null) return 0;
if (text instanceof String) return ((String) text).codePointCount(fromIndex, toIndex);
if (text instanceof StringBuilder) return ((StringBuilder) text).codePointCount(fromIndex, toIndex);
if (text instanceof StringBuffer) return ((StringBuffer) text).codePointCount(fromIndex, toIndex);
if (text instanceof CharBuffer) {
final CharBuffer buffer = (CharBuffer) text;
if (buffer.hasArray() && !buffer.isReadOnly()) {
final int position = buffer.position();
return Character.codePointCount(buffer.array(), position + fromIndex, position + toIndex);
}
}
return Character.codePointCount(text, fromIndex, toIndex);
}
/**
* Returns the number of occurrences of the {@code toSearch} string in the given {@code text}.
* The search is case-sensitive.
*
* @param text the character sequence to count occurrences, or {@code null}.
* @param toSearch the string to search in the given {@code text}.
* It shall contain at least one character.
* @return the number of occurrences of {@code toSearch} in {@code text},
* or 0 if {@code text} was null or empty.
* @throws NullArgumentException if the {@code toSearch} argument is null.
* @throws IllegalArgumentException if the {@code toSearch} argument is empty.
*/
public static int count(final CharSequence text, final String toSearch) {
ArgumentChecks.ensureNonEmpty("toSearch", toSearch);
final int length = toSearch.length();
if (length == 1) {
// Implementation working on a single character is faster.
return count(text, toSearch.charAt(0));
}
int n = 0;
if (text != null) {
int i = 0;
while ((i = indexOf(text, toSearch, i, text.length())) >= 0) {
n++;
i += length;
}
}
return n;
}
/**
* Counts the number of occurrence of the given character in the given character sequence.
*
* @param text the character sequence to count occurrences, or {@code null}.
* @param toSearch the character to count.
* @return the number of occurrences of the given character, or 0 if the {@code text} is null.
*/
public static int count(final CharSequence text, final char toSearch) {
int n = 0;
if (text != null) {
if (text instanceof String) {
final String s = (String) text;
for (int i=s.indexOf(toSearch); ++i != 0; i=s.indexOf(toSearch, i)) {
n++;
}
} else {
// No need to use the code point API here, since we are looking for exact matches.
for (int i=text.length(); --i>=0;) {
if (text.charAt(i) == toSearch) {
n++;
}
}
}
}
return n;
}
/**
* Returns the index within the given strings of the first occurrence of the specified part,
* starting at the specified index. This method is equivalent to the following method call,
* except that this method works on arbitrary {@link CharSequence} objects instead of
* {@link String}s only, and that the upper limit can be specified:
*
* {@preformat java
* return text.indexOf(part, fromIndex);
* }
*
* There is no restriction on the value of {@code fromIndex}. If negative or greater
* than {@code toIndex}, then the behavior of this method is as if the search started
* from 0 or {@code toIndex} respectively. This is consistent with the
* {@link String#indexOf(String, int)} behavior.
*
* @param text the string in which to perform the search.
* @param toSearch the substring for which to search.
* @param fromIndex the index from which to start the search.
* @param toIndex the index after the last character where to perform the search.
* @return the index within the text of the first occurrence of the specified part, starting at the specified index,
* or -1 if no occurrence has been found or if the {@code text} argument is null.
* @throws NullArgumentException if the {@code toSearch} argument is null.
* @throws IllegalArgumentException if the {@code toSearch} argument is empty.
*
* @see String#indexOf(String, int)
* @see StringBuilder#indexOf(String, int)
* @see StringBuffer#indexOf(String, int)
*/
public static int indexOf(final CharSequence text, final CharSequence toSearch, int fromIndex, int toIndex) {
ArgumentChecks.ensureNonEmpty("toSearch", toSearch);
if (text != null) {
int length = text.length();
if (toIndex > length) {
toIndex = length;
}
if (toSearch instanceof String && toIndex == length) {
if (text instanceof String) {
return ((String) text).indexOf((String) toSearch, fromIndex);
}
if (text instanceof StringBuilder) {
return ((StringBuilder) text).indexOf((String) toSearch, fromIndex);
}
if (text instanceof StringBuffer) {
return ((StringBuffer) text).indexOf((String) toSearch, fromIndex);
}
}
if (fromIndex < 0) {
fromIndex = 0;
}
length = toSearch.length();
toIndex -= length;
search: for (; fromIndex <= toIndex; fromIndex++) {
for (int i=0; i<length; i++) {
// No need to use the codePointAt API here, since we are looking for exact matches.
if (text.charAt(fromIndex + i) != toSearch.charAt(i)) {
continue search;
}
}
return fromIndex;
}
}
return -1;
}
/**
* Returns the index within the given character sequence of the first occurrence of the
* specified character, starting the search at the specified index. If the character is
* not found, then this method returns -1.
*
* <p>There is no restriction on the value of {@code fromIndex}. If negative or greater
* than {@code toIndex}, then the behavior of this method is as if the search started
* from 0 or {@code toIndex} respectively. This is consistent with the behavior documented
* in {@link String#indexOf(int, int)}.</p>
*
* @param text the character sequence in which to perform the search, or {@code null}.
* @param toSearch the Unicode code point of the character to search.
* @param fromIndex the index to start the search from.
* @param toIndex the index after the last character where to perform the search.
* @return the index of the first occurrence of the given character in the specified sub-sequence,
* or -1 if no occurrence has been found or if the {@code text} argument is null.
*
* @see String#indexOf(int, int)
*/
public static int indexOf(final CharSequence text, final int toSearch, int fromIndex, int toIndex) {
if (text != null) {
final int length = text.length();
if (toIndex >= length) {
if (text instanceof String) {
// String provides a faster implementation.
return ((String) text).indexOf(toSearch, fromIndex);
}
toIndex = length;
}
if (fromIndex < 0) {
fromIndex = 0;
}
char head = (char) toSearch;
char tail = (char) 0;
if (head != toSearch) { // Outside BMP plane?
head = highSurrogate(toSearch);
tail = lowSurrogate (toSearch);
toIndex--;
}
while (fromIndex < toIndex) {
if (text.charAt(fromIndex) == head) {
if (tail == 0 || text.charAt(fromIndex+1) == tail) {
return fromIndex;
}
}
fromIndex++;
}
}
return -1;
}
/**
* Returns the index within the given character sequence of the last occurrence of the
* specified character, searching backward in the given index range.
* If the character is not found, then this method returns -1.
*
* <p>There is no restriction on the value of {@code toIndex}. If greater than the text length
* or less than {@code fromIndex}, then the behavior of this method is as if the search started
* from {@code length} or {@code fromIndex} respectively. This is consistent with the behavior
* documented in {@link String#lastIndexOf(int, int)}.</p>
*
* @param text the character sequence in which to perform the search, or {@code null}.
* @param toSearch the Unicode code point of the character to search.
* @param fromIndex the index of the first character in the range where to perform the search.
* @param toIndex the index after the last character in the range where to perform the search.
* @return the index of the last occurrence of the given character in the specified sub-sequence,
* or -1 if no occurrence has been found or if the {@code text} argument is null.
*
* @see String#lastIndexOf(int, int)
*/
public static int lastIndexOf(final CharSequence text, final int toSearch, int fromIndex, int toIndex) {
if (text != null) {
if (fromIndex <= 0) {
if (text instanceof String) {
// String provides a faster implementation.
return ((String) text).lastIndexOf(toSearch, toIndex - 1);
}
fromIndex = 0;
}
final int length = text.length();
if (toIndex > length) {
toIndex = length;
}
char tail = (char) toSearch;
char head = (char) 0;
if (tail != toSearch) { // Outside BMP plane?
tail = lowSurrogate (toSearch);
head = highSurrogate(toSearch);
fromIndex++;
}
while (toIndex > fromIndex) {
if (text.charAt(--toIndex) == tail) {
if (head == 0 || text.charAt(--toIndex) == head) {
return toIndex;
}
}
}
}
return -1;
}
/**
* Returns the index of the first character after the given number of lines.
* This method counts the number of occurrence of {@code '\n'}, {@code '\r'}
* or {@code "\r\n"} starting from the given position. When {@code numLines}
* occurrences have been found, the index of the first character after the last
* occurrence is returned.
*
* <p>If the {@code numLines} argument is positive, this method searches forward.
* If negative, this method searches backward. If 0, this method returns the
* beginning of the current line.</p>
*
* <p>If this method reaches the end of {@code text} while searching forward, then
* {@code text.length()} is returned. If this method reaches the beginning of
* {@code text} while searching backward, then 0 is returned.</p>
*
* @param text the string in which to skip a determined amount of lines.
* @param numLines the number of lines to skip. Can be positive, zero or negative.
* @param fromIndex index at which to start the search, from 0 to {@code text.length()} inclusive.
* @return index of the first character after the last skipped line.
* @throws NullPointerException if the {@code text} argument is null.
* @throws IndexOutOfBoundsException if {@code fromIndex} is out of bounds.
*/
public static int indexOfLineStart(final CharSequence text, int numLines, int fromIndex) {
final int length = text.length();
/*
* Go backward if the number of lines is negative.
* No need to use the codePoint API because we are
* looking only for characters in the BMP plane.
*/
if (numLines <= 0) {
do {
char c;
do {
if (fromIndex == 0) {
return fromIndex;
}
c = text.charAt(--fromIndex);
if (c == '\n') {
if (fromIndex != 0 && text.charAt(fromIndex - 1) == '\r') {
--fromIndex;
}
break;
}
} while (c != '\r');
} while (++numLines != 1);
// Execute the forward code below for skipping the "end of line" characters.
}
/*
* Skips forward the given amount of lines.
*/
while (--numLines >= 0) {
char c;
do {
if (fromIndex == length) {
return fromIndex;
}
c = text.charAt(fromIndex++);
if (c == '\r') {
if (fromIndex != length && text.charAt(fromIndex) == '\n') {
fromIndex++;
}
break;
}
} while (c != '\n');
}
return fromIndex;
}
/**
* Returns the index of the first non-white character in the given range.
* If the given range contains only space characters, then this method returns the index of the
* first character after the given range, which is always equals or greater than {@code toIndex}.
* Note that this character may not exist if {@code toIndex} is equals to the text length.
*
* <p>Special cases:</p>
* <ul>
* <li>If {@code fromIndex} is greater than {@code toIndex},
* then this method unconditionally returns {@code fromIndex}.</li>
* <li>If the given range contains only space characters and the character at {@code toIndex-1}
* is the high surrogate of a valid supplementary code point, then this method returns
* {@code toIndex+1}, which is the index of the next code point.</li>
* <li>If {@code fromIndex} is negative or {@code toIndex} is greater than the text length,
* then the behavior of this method is undefined.</li>
* </ul>
*
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* @param text the string in which to perform the search (can not be null).
* @param fromIndex the index from which to start the search (can not be negative).
* @param toIndex the index after the last character where to perform the search.
* @return the index within the text of the first occurrence of a non-space character, starting
* at the specified index, or a value equals or greater than {@code toIndex} if none.
* @throws NullPointerException if the {@code text} argument is null.
*
* @see #skipTrailingWhitespaces(CharSequence, int, int)
* @see #trimWhitespaces(CharSequence)
* @see String#stripLeading()
*/
public static int skipLeadingWhitespaces(final CharSequence text, int fromIndex, final int toIndex) {
while (fromIndex < toIndex) {
final int c = codePointAt(text, fromIndex);
if (!isWhitespace(c)) break;
fromIndex += charCount(c);
}
return fromIndex;
}
/**
* Returns the index <em>after</em> the last non-white character in the given range.
* If the given range contains only space characters, then this method returns the index of the
* first character in the given range, which is always equals or lower than {@code fromIndex}.
*
* <p>Special cases:</p>
* <ul>
* <li>If {@code fromIndex} is lower than {@code toIndex},
* then this method unconditionally returns {@code toIndex}.</li>
* <li>If the given range contains only space characters and the character at {@code fromIndex}
* is the low surrogate of a valid supplementary code point, then this method returns
* {@code fromIndex-1}, which is the index of the code point.</li>
* <li>If {@code fromIndex} is negative or {@code toIndex} is greater than the text length,
* then the behavior of this method is undefined.</li>
* </ul>
*
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* @param text the string in which to perform the search (can not be null).
* @param fromIndex the index from which to start the search (can not be negative).
* @param toIndex the index after the last character where to perform the search.
* @return the index within the text of the last occurrence of a non-space character, starting
* at the specified index, or a value equals or lower than {@code fromIndex} if none.
* @throws NullPointerException if the {@code text} argument is null.
*
* @see #skipLeadingWhitespaces(CharSequence, int, int)
* @see #trimWhitespaces(CharSequence)
* @see String#stripTrailing()
*/
public static int skipTrailingWhitespaces(final CharSequence text, final int fromIndex, int toIndex) {
while (toIndex > fromIndex) {
final int c = codePointBefore(text, toIndex);
if (!isWhitespace(c)) break;
toIndex -= charCount(c);
}
return toIndex;
}
/**
* Allocates the array to be returned by the {@code split(…)} methods. If the given {@code text} argument is
* an instance of {@link String}, {@link StringBuilder} or {@link StringBuffer}, then this method returns a
* {@code String[]} array instead of {@code CharSequence[]}. This is possible because the specification of
* their {@link CharSequence#subSequence(int, int)} method guarantees to return {@code String} instances.
* Some Apache SIS code will cast the {@code split(…)} return value based on this knowledge.
*
* <p>Note that this is a undocumented SIS features. There is currently no commitment that this implementation
* details will not change in future version.</p>
*
* @param text the text to be splitted.
* @return an array where to store the result of splitting the given {@code text}.
*/
private static CharSequence[] createSplitArray(final CharSequence text) {
return (text instanceof String ||
text instanceof StringBuilder ||
text instanceof StringBuffer) ? new String[8] : new CharSequence[8];
}
/**
* Splits a text around the given character. The array returned by this method contains all
* subsequences of the given text that is terminated by the given character or is terminated
* by the end of the text. The subsequences in the array are in the order in which they occur
* in the given text. If the character is not found in the input, then the resulting array has
* just one element, which is the whole given text.
*
* <p>This method is similar to the standard {@link String#split(String)} method except for the
* following:</p>
*
* <ul>
* <li>It accepts generic character sequences.</li>
* <li>It accepts {@code null} argument, in which case an empty array is returned.</li>
* <li>The separator is a simple character instead of a regular expression.</li>
* <li>If the {@code separator} argument is {@code '\n'} or {@code '\r'}, then this method
* splits around any of {@code "\r"}, {@code "\n"} or {@code "\r\n"} characters sequences.
* <li>The leading and trailing spaces of each subsequences are trimmed.</li>
* </ul>
*
* @param text the text to split, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @return the array of subsequences computed by splitting the given text around the given
* character, or an empty array if {@code text} was null.
*
* @see String#split(String)
*/
@SuppressWarnings("ReturnOfCollectionOrArrayField")
public static CharSequence[] split(final CharSequence text, final char separator) {
if (text == null) {
return EMPTY_ARRAY;
}
if (separator == '\n' || separator == '\r') {
final CharSequence[] splitted = splitOnEOL(text);
for (int i=0; i < splitted.length; i++) {
// For consistency with the rest of this method.
splitted[i] = trimWhitespaces(splitted[i]);
}
return splitted;
}
// 'excludeEmpty' must use the same criterion than trimWhitespaces(…).
final boolean excludeEmpty = isWhitespace(separator);
CharSequence[] splitted = createSplitArray(text);
final int length = text.length();
int count = 0, last = 0, i = 0;
while ((i = indexOf(text, separator, i, length)) >= 0) {
final CharSequence item = trimWhitespaces(text, last, i);
if (!excludeEmpty || item.length() != 0) {
if (count == splitted.length) {
splitted = Arrays.copyOf(splitted, count << 1);
}
splitted[count++] = item;
}
last = ++i;
}
// Add the last element.
final CharSequence item = trimWhitespaces(text, last, length);
if (!excludeEmpty || item.length() != 0) {
if (count == splitted.length) {
splitted = Arrays.copyOf(splitted, count + 1);
}
splitted[count++] = item;
}
return ArraysExt.resize(splitted, count);
}
/**
* Splits a text around the <cite>End Of Line</cite> (EOL) characters.
* EOL characters can be any of {@code "\r"}, {@code "\n"} or {@code "\r\n"} sequences.
* Each element in the returned array will be a single line. If the given text is already
* a single line, then this method returns a singleton containing only the given text.
*
* <p>Notes:</p>
* <ul>
* <li>At the difference of <code>{@linkplain #split split}(toSplit, '\n’)</code>,
* this method does not remove whitespaces.</li>
* <li>This method does not check for Unicode
* {@linkplain Characters#LINE_SEPARATOR line separator} and
* {@linkplain Characters#PARAGRAPH_SEPARATOR paragraph separator}.</li>
* </ul>
*
* <div class="note"><b>Performance note:</b>
* Prior JDK8 this method was usually cheap because all string instances created by
* {@link String#substring(int,int)} shared the same {@code char[]} internal array.
* However since JDK8, the new {@code String} implementation copies the data in new arrays.
* Consequently it is better to use index rather than this method for splitting large {@code String}s.
* However this method still useful for other {@link CharSequence} implementations providing an efficient
* {@code subSequence(int,int)} method.</div>
*
* @param text the multi-line text from which to get the individual lines, or {@code null}.
* @return the lines in the text, or an empty array if the given text was null.
*
* @see #indexOfLineStart(CharSequence, int, int)
*/
@SuppressWarnings("ReturnOfCollectionOrArrayField")
public static CharSequence[] splitOnEOL(final CharSequence text) {
if (text == null) {
return EMPTY_ARRAY;
}
/*
* This method is implemented on top of String.indexOf(int,int),
* assuming that it will be faster for String and StringBuilder.
*/
final int length = text.length();
int lf = indexOf(text, '\n', 0, length);
int cr = indexOf(text, '\r', 0, length);
if (lf < 0 && cr < 0) {
return new CharSequence[] {
text
};
}
int count = 0;
CharSequence[] splitted = createSplitArray(text);
int last = 0;
boolean hasMore;
do {
int skip = 1;
final int splitAt;
if (cr < 0) {
// There is no "\r" character in the whole text, only "\n".
splitAt = lf;
hasMore = (lf = indexOf(text, '\n', lf+1, length)) >= 0;
} else if (lf < 0) {
// There is no "\n" character in the whole text, only "\r".
splitAt = cr;
hasMore = (cr = indexOf(text, '\r', cr+1, length)) >= 0;
} else if (lf < cr) {
// There is both "\n" and "\r" characters with "\n" first.
splitAt = lf;
hasMore = true;
lf = indexOf(text, '\n', lf+1, length);
} else {
// There is both "\r" and "\n" characters with "\r" first.
// We need special care for the "\r\n" sequence.
splitAt = cr;
if (lf == ++cr) {
cr = indexOf(text, '\r', cr+1, length);
lf = indexOf(text, '\n', lf+1, length);
hasMore = (cr >= 0 || lf >= 0);
skip = 2;
} else {
cr = indexOf(text, '\r', cr+1, length);
hasMore = true; // Because there is lf.
}
}
if (count >= splitted.length) {
splitted = Arrays.copyOf(splitted, count*2);
}
splitted[count++] = text.subSequence(last, splitAt);
last = splitAt + skip;
} while (hasMore);
/*
* Add the remaining string and we are done.
*/
if (count >= splitted.length) {
splitted = Arrays.copyOf(splitted, count+1);
}
splitted[count++] = text.subSequence(last, text.length());
return ArraysExt.resize(splitted, count);
}
/**
* Returns {@code true} if {@link #split(CharSequence, char)} parsed an empty string.
*/
private static boolean isEmpty(final CharSequence[] tokens) {
switch (tokens.length) {
case 0: return true;
case 1: return tokens[0].length() == 0;
default: return false;
}
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Double#parseDouble(String) parses} each item as a {@code double}.
* Empty sub-sequences are parsed as {@link Double#NaN}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static double[] parseDoubles(final CharSequence values, final char separator)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_DOUBLE;
final double[] parsed = new double[tokens.length];
for (int i=0; i<tokens.length; i++) {
final String token = trimWhitespaces(tokens[i]).toString();
parsed[i] = token.isEmpty() ? Double.NaN : Double.parseDouble(token);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Float#parseFloat(String) parses} each item as a {@code float}.
* Empty sub-sequences are parsed as {@link Float#NaN}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static float[] parseFloats(final CharSequence values, final char separator)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_FLOAT;
final float[] parsed = new float[tokens.length];
for (int i=0; i<tokens.length; i++) {
final String token = trimWhitespaces(tokens[i]).toString();
parsed[i] = token.isEmpty() ? Float.NaN : Float.parseFloat(token);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Long#parseLong(String) parses} each item as a {@code long}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static long[] parseLongs(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_LONG;
final long[] parsed = new long[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Long.parseLong(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Integer#parseInt(String) parses} each item as an {@code int}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static int[] parseInts(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_INT;
final int[] parsed = new int[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Integer.parseInt(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Short#parseShort(String) parses} each item as a {@code short}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static short[] parseShorts(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_SHORT;
final short[] parsed = new short[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Short.parseShort(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Byte#parseByte(String) parses} each item as a {@code byte}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static byte[] parseBytes(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_BYTE;
final byte[] parsed = new byte[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Byte.parseByte(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* Replaces some Unicode characters by ASCII characters on a "best effort basis".
* For example the “ é ” character is replaced by “ e ” (without accent),
* the “ ″ ” symbol for minutes of angle is replaced by straight double quotes “ " ”,
* and combined characters like ㎏, ㎎, ㎝, ㎞, ㎢, ㎦, ㎖, ㎧, ㎩, ㎐, <i>etc.</i> are replaced
* by the corresponding sequences of characters.
*
* <div class="note"><b>Note:</b>
* the replacement of Greek letters is a more complex task than what this method can do,
* since it depends on the context. For example if the Greek letters are abbreviations
* for coordinate system axes like φ and λ, then the replacements depend on the enclosing
* coordinate system. See {@link org.apache.sis.io.wkt.Transliterator} for more information.</div>
*
* @param text the text to scan for Unicode characters to replace by ASCII characters, or {@code null}.
* @return the given text with substitutions applied, or {@code text} if no replacement
* has been applied, or {@code null} if the given text was null.
*
* @see StringBuilders#toASCII(StringBuilder)
* @see org.apache.sis.io.wkt.Transliterator#filter(String)
* @see java.text.Normalizer
*/
public static CharSequence toASCII(final CharSequence text) {
return StringBuilders.toASCII(text, null);
}
/**
* Returns a string with leading and trailing whitespace characters omitted.
* This method is similar in purpose to {@link String#trim()}, except that the later considers
* every {@linkplain Character#isISOControl(int) ISO control codes} below 32 to be a whitespace.
* That {@code String.trim()} behavior has the side effect of removing the heading of ANSI escape
* sequences (a.k.a. X3.64), and to ignore Unicode spaces. This {@code trimWhitespaces(…)} method
* is built on the more accurate {@link Character#isWhitespace(int)} method instead.
*
* <p>This method performs the same work than {@link #trimWhitespaces(CharSequence)},
* but is overloaded for the {@code String} type because of its frequent use.</p>
*
* @param text the text from which to remove leading and trailing whitespaces, or {@code null}.
* @return a string with leading and trailing whitespaces removed, or {@code null} is the given
* text was null.
*
* @todo To be replaced by {@link String#strip()} in JDK 11.
*/
public static String trimWhitespaces(String text) {
if (text != null) {
final int length = text.length();
final int lower = skipLeadingWhitespaces(text, 0, length);
text = text.substring(lower, skipTrailingWhitespaces(text, lower, length));
}
return text;
}
/**
* Returns a text with leading and trailing whitespace characters omitted.
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* <p>This method is the generic version of {@link #trimWhitespaces(String)}.</p>
*
* @param text the text from which to remove leading and trailing whitespaces, or {@code null}.
* @return a characters sequence with leading and trailing whitespaces removed,
* or {@code null} is the given text was null.
*
* @see #skipLeadingWhitespaces(CharSequence, int, int)
* @see #skipTrailingWhitespaces(CharSequence, int, int)
* @see String#strip()
*/
public static CharSequence trimWhitespaces(CharSequence text) {
if (text != null) {
text = trimWhitespaces(text, 0, text.length());
}
return text;
}
/**
* Returns a sub-sequence with leading and trailing whitespace characters omitted.
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* <p>Invoking this method is functionally equivalent to the following code snippet,
* except that the {@link CharSequence#subSequence(int, int) subSequence} method is
* invoked only once instead of two times:</p>
*
* {@preformat java
* text = trimWhitespaces(text.subSequence(lower, upper));
* }
*
* @param text the text from which to remove leading and trailing white spaces.
* @param lower index of the first character to consider for inclusion in the sub-sequence.
* @param upper index after the last character to consider for inclusion in the sub-sequence.
* @return a characters sequence with leading and trailing white spaces removed, or {@code null}
* if the {@code text} argument is null.
* @throws IndexOutOfBoundsException if {@code lower} or {@code upper} is out of bounds.
*/
public static CharSequence trimWhitespaces(CharSequence text, int lower, int upper) {
final int length = length(text);
ArgumentChecks.ensureValidIndexRange(length, lower, upper);
if (text != null) {
lower = skipLeadingWhitespaces (text, lower, upper);
upper = skipTrailingWhitespaces(text, lower, upper);
if (lower != 0 || upper != length) { // Safety in case subSequence doesn't make the check.
text = text.subSequence(lower, upper);
}
}
return text;
}
/**
* Trims the fractional part of the given formatted number, provided that it doesn't change
* the value. This method assumes that the number is formatted in the US locale, typically
* by the {@link Double#toString(double)} method.
*
* <p>More specifically if the given value ends with a {@code '.'} character followed by a
* sequence of {@code '0'} characters, then those characters are omitted. Otherwise this
* method returns the text unchanged. This is a <cite>"all or nothing"</cite> method:
* either the fractional part is completely removed, or either it is left unchanged.</p>
*
* <h4>Examples</h4>
* This method returns {@code "4"} if the given value is {@code "4."}, {@code "4.0"} or
* {@code "4.00"}, but returns {@code "4.10"} unchanged (including the trailing {@code '0'}
* character) if the input is {@code "4.10"}.
*
* <h4>Use case</h4>
* This method is useful before to {@linkplain Integer#parseInt(String) parse a number}
* if that number should preferably be parsed as an integer before attempting to parse
* it as a floating point number.
*
* @param value the value to trim if possible, or {@code null}.
* @return the value without the trailing {@code ".0"} part (if any),
* or {@code null} if the given text was null.
*
* @see StringBuilders#trimFractionalPart(StringBuilder)
*/
public static CharSequence trimFractionalPart(final CharSequence value) {
if (value != null) {
for (int i=value.length(); i>0;) {
final int c = codePointBefore(value, i);
i -= charCount(c);
switch (c) {
case '0': continue;
case '.': return value.subSequence(0, i);
default : return value;
}
}
}
return value;
}
/**
* Makes sure that the {@code text} string is not longer than {@code maxLength} characters.
* If {@code text} is not longer, then it is returned unchanged. Otherwise this method returns
* a copy of {@code text} with some characters substituted by the {@code "(…)"} string.
*
* <p>If the text needs to be shortened, then this method tries to apply the above-cited
* substitution between two words. For example, the following text:</p>
*
* <blockquote>
* "This sentence given as an example is way too long to be included in a short name."
* </blockquote>
*
* May be shortened to something like this:
*
* <blockquote>
* "This sentence given (…) in a short name."
* </blockquote>
*
* @param text the sentence to reduce if it is too long, or {@code null}.
* @param maxLength the maximum length allowed for {@code text}.
* @return a sentence not longer than {@code maxLength}, or {@code null} if the given text was null.
*/
public static CharSequence shortSentence(CharSequence text, final int maxLength) {
ArgumentChecks.ensureStrictlyPositive("maxLength", maxLength);
if (text != null) {
final int length = text.length();
int toRemove = length - maxLength;
if (toRemove > 0) {
toRemove += 5; // Space needed for the " (…) " string.
/*
* We will remove characters from 'lower' to 'upper' both exclusive. We try to
* adjust 'lower' and 'upper' in such a way that the first and last characters
* to be removed will be spaces or punctuation characters.
*/
int lower = length >>> 1;
if (lower != 0 && isLowSurrogate(text.charAt(lower))) {
lower--;
}
int upper = lower;
boolean forward = false;
do { // To be run as long as we need to remove more characters.
int nc=0, type=UNASSIGNED;
forward = !forward;
searchWordBreak: while (true) {
final int c;
if (forward) {
if ((upper += nc) == length) break;
c = codePointAt(text, upper);
} else {
if ((lower -= nc) == 0) break;
c = codePointBefore(text, lower);
}
nc = charCount(c);
if (isWhitespace(c)) {
if (type != UNASSIGNED) {
type = SPACE_SEPARATOR;
}
} else switch (type) {
// After we skipped white, then non-white, then white characters, stop.
case SPACE_SEPARATOR: {
break searchWordBreak;
}
// For the first non-white character, just remember its type.
// Arbitrarily use UPPERCASE_LETTER for any kind of identifier
// part (which include UPPERCASE_LETTER anyway).
case UNASSIGNED: {
type = isUnicodeIdentifierPart(c) ? UPPERCASE_LETTER : getType(c);
break;
}
// If we expected an identifier, stop at the first other char.
case UPPERCASE_LETTER: {
if (!isUnicodeIdentifierPart(c)) {
break searchWordBreak;
}
break;
}
// For all other kind of character, break when the type change.
default: {
if (getType(c) != type) {
break searchWordBreak;
}
break;
}
}
toRemove -= nc;
}
} while (toRemove > 0);
text = new StringBuilder(lower + (length-upper) + 5) // 5 is the length of " (…) "
.append(text, 0, lower).append(" (…) ").append(text, upper, length);
}
}
return text;
}
/**
* Given a string in upper cases (typically a Java constant), returns a string formatted
* like an English sentence. This heuristic method performs the following steps:
*
* <ol>
* <li>Replace all occurrences of {@code '_'} by spaces.</li>
* <li>Converts all letters except the first one to lower case letters using
* {@link Character#toLowerCase(int)}. Note that this method does not use
* the {@link String#toLowerCase()} method. Consequently the system locale
* is ignored. This method behaves as if the conversion were done in the
* {@linkplain java.util.Locale#ROOT root} locale.</li>
* </ol>
*
* <p>Note that those heuristic rules may be modified in future SIS versions,
* depending on the practical experience gained.</p>
*
* @param identifier the name of a Java constant, or {@code null}.
* @return the identifier like an English sentence, or {@code null}
* if the given {@code identifier} argument was null.
*/
public static CharSequence upperCaseToSentence(final CharSequence identifier) {
if (identifier == null) {
return null;
}
final StringBuilder buffer = new StringBuilder(identifier.length());
final int length = identifier.length();
for (int i=0; i<length;) {
int c = codePointAt(identifier, i);
if (i != 0) {
if (c == '_') {
c = ' ';
} else {
c = toLowerCase(c);
}
}
buffer.appendCodePoint(c);
i += charCount(c);
}
return buffer;
}
/**
* Given a string in camel cases (typically an identifier), returns a string formatted
* like an English sentence. This heuristic method performs the following steps:
*
* <ol>
* <li>Invoke {@link #camelCaseToWords(CharSequence, boolean)}, which separate the words
* on the basis of character case. For example {@code "transferFunctionType"} become
* <cite>"transfer function type"</cite>. This works fine for ISO 19115 identifiers.</li>
*
* <li>Next replace all occurrence of {@code '_'} by spaces in order to take in account
* an other common naming convention, which uses {@code '_'} as a word separator. This
* convention is used by netCDF attributes like {@code "project_name"}.</li>
*
* <li>Finally ensure that the first character is upper-case.</li>
* </ol>
*
* <h4>Exception to the above rules</h4>
* If the given identifier contains only upper-case letters, digits and the {@code '_'} character,
* then the identifier is returned "as is" except for the {@code '_'} characters which are replaced by {@code '-'}.
* This work well for identifiers like {@code "UTF-8"} or {@code "ISO-LATIN-1"} for instance.
*
* <p>Note that those heuristic rules may be modified in future SIS versions,
* depending on the practical experience gained.</p>
*
* @param identifier an identifier with no space, words begin with an upper-case character, or {@code null}.
* @return the identifier with spaces inserted after what looks like words, or {@code null}
* if the given {@code identifier} argument was null.
*/
public static CharSequence camelCaseToSentence(final CharSequence identifier) {
if (identifier == null) {
return null;
}
final StringBuilder buffer;
if (isCode(identifier)) {
if (identifier instanceof String) {
return ((String) identifier).replace('_', '-');
}
buffer = new StringBuilder(identifier);
StringBuilders.replace(buffer, '_', '-');
} else {
buffer = (StringBuilder) camelCaseToWords(identifier, true);
final int length = buffer.length();
if (length != 0) {
StringBuilders.replace(buffer, '_', ' ');
final int c = buffer.codePointAt(0);
final int up = toUpperCase(c);
if (c != up) {
StringBuilders.replace(buffer, 0, charCount(c), toChars(up));
}
}
}
return buffer;
}
/**
* Given a string in camel cases, returns a string with the same words separated by spaces.
* A word begins with a upper-case character following a lower-case character. For example
* if the given string is {@code "PixelInterleavedSampleModel"}, then this method returns
* <cite>"Pixel Interleaved Sample Model"</cite> or <cite>"Pixel interleaved sample model"</cite>
* depending on the value of the {@code toLowerCase} argument.
*
* <p>If {@code toLowerCase} is {@code false}, then this method inserts spaces but does not change
* the case of characters. If {@code toLowerCase} is {@code true}, then this method changes
* {@linkplain Character#toLowerCase(int) to lower case} the first character after each spaces
* inserted by this method (note that this intentionally exclude the very first character in
* the given string), except if the second character {@linkplain Character#isUpperCase(int)
* is upper case}, in which case the word is assumed an acronym.</p>
*
* <p>The given string is usually a programmatic identifier like a class name or a method name.</p>
*
* @param identifier an identifier with no space, words begin with an upper-case character.
* @param toLowerCase {@code true} for changing the first character of words to lower case,
* except for the first word and acronyms.
* @return the identifier with spaces inserted after what looks like words, or {@code null}
* if the given {@code identifier} argument was null.
*/
public static CharSequence camelCaseToWords(final CharSequence identifier, final boolean toLowerCase) {
if (identifier == null) {
return null;
}
/*
* Implementation note: the 'camelCaseToSentence' method needs
* this method to unconditionally returns a new StringBuilder.
*/
final int length = identifier.length();
final StringBuilder buffer = new StringBuilder(length + 8);
final int lastIndex = (length != 0) ? length - charCount(codePointBefore(identifier, length)) : 0;
int last = 0;
for (int i=1; i<=length;) {
final int cp;
final boolean doAppend;
if (i == length) {
cp = 0;
doAppend = true;
} else {
cp = codePointAt(identifier, i);
doAppend = Character.isUpperCase(cp) && isLowerCase(codePointBefore(identifier, i));
}
if (doAppend) {
final int pos = buffer.length();
buffer.append(identifier, last, i).append(' ');
if (toLowerCase && pos!=0 && last<lastIndex && isLowerCase(codePointAfter(identifier, last))) {
final int c = buffer.codePointAt(pos);
final int low = toLowerCase(c);
if (c != low) {
StringBuilders.replace(buffer, pos, pos + charCount(c), toChars(low));
}
}
last = i;
}
i += charCount(cp);
}
/*
* Removes the trailing space, if any.
*/
final int lg = buffer.length();
if (lg != 0) {
final int cp = buffer.codePointBefore(lg);
if (isWhitespace(cp)) {
buffer.setLength(lg - charCount(cp));
}
}
return buffer;
}
/**
* Creates an acronym from the given text. This method returns a string containing the first character of each word,
* where the words are separated by the camel case convention, the {@code '_'} character, or any character which is
* not a {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier part} (including spaces).
*
* <p>An exception to the above rule happens if the given text is a Unicode identifier without the {@code '_'}
* character, and every characters are upper case. In such case the text is returned unchanged on the assumption
* that it is already an acronym.</p>
*
* <p><b>Examples:</b> given {@code "northEast"}, this method returns {@code "NE"}.
* Given {@code "Open Geospatial Consortium"}, this method returns {@code "OGC"}.</p>
*
* @param text the text for which to create an acronym, or {@code null}.
* @return the acronym, or {@code null} if the given text was null.
*/
public static CharSequence camelCaseToAcronym(CharSequence text) {
text = trimWhitespaces(text);
if (text != null && !isAcronym(text)) {
final int length = text.length();
final StringBuilder buffer = new StringBuilder(8); // Acronyms are usually short.
boolean wantChar = true;
for (int i=0; i<length;) {
final int c = codePointAt(text, i);
if (wantChar) {
if (isUnicodeIdentifierStart(c)) {
buffer.appendCodePoint(c);
wantChar = false;
}
} else if (!isUnicodeIdentifierPart(c) || c == '_') {
wantChar = true;
} else if (Character.isUpperCase(c)) {
// Test for mixed-case (e.g. "northEast").
// Note that i is guaranteed to be greater than 0 here.
if (!Character.isUpperCase(codePointBefore(text, i))) {
buffer.appendCodePoint(c);
}
}
i += charCount(c);
}
final int acrlg = buffer.length();
if (acrlg != 0) {
/*
* If every characters except the first one are upper-case, ensure that the
* first one is upper-case as well. This is for handling the identifiers which
* are compliant to Java-Beans convention (e.g. "northEast").
*/
if (isUpperCase(buffer, 1, acrlg, true)) {
final int c = buffer.codePointAt(0);
final int up = toUpperCase(c);
if (c != up) {
StringBuilders.replace(buffer, 0, charCount(c), toChars(up));
}
}
if (!equals(text, buffer)) {
text = buffer;
}
}
}
return text;
}
/**
* Returns {@code true} if the first string is likely to be an acronym of the second string.
* An acronym is a sequence of {@linkplain Character#isLetterOrDigit(int) letters or digits}
* built from at least one character of each word in the {@code words} string. More than
* one character from the same word may appear in the acronym, but they must always
* be the first consecutive characters. The comparison is case-insensitive.
*
* <div class="note"><b>Example:</b>
* Given the {@code "Open Geospatial Consortium"} words, the following strings are recognized as acronyms:
* {@code "OGC"}, {@code "ogc"}, {@code "O.G.C."}, {@code "OpGeoCon"}.</div>
*
* If any of the given arguments is {@code null}, this method returns {@code false}.
*
* @param acronym a possible acronym of the sequence of words, or {@code null}.
* @param words the sequence of words, or {@code null}.
* @return {@code true} if the first string is an acronym of the second one.
*/
public static boolean isAcronymForWords(final CharSequence acronym, final CharSequence words) {
final int lga = length(acronym);
int ia=0, ca;
do {
if (ia >= lga) return false;
ca = codePointAt(acronym, ia);
ia += charCount(ca);
} while (!isLetterOrDigit(ca));
final int lgc = length(words);
int ic=0, cc;
do {
if (ic >= lgc) return false;
cc = codePointAt(words, ic);
ic += charCount(cc);
}
while (!isLetterOrDigit(cc));
if (toUpperCase(ca) != toUpperCase(cc)) {
// The first letter must match.
return false;
}
cmp: while (ia < lga) {
if (ic >= lgc) {
// There is more letters in the acronym than in the complete name.
return false;
}
ca = codePointAt(acronym, ia); ia += charCount(ca);
cc = codePointAt(words, ic); ic += charCount(cc);
if (isLetterOrDigit(ca)) {
if (toUpperCase(ca) == toUpperCase(cc)) {
// Acronym letter matches the letter from the complete name.
// Continue the comparison with next letter of both strings.
continue;
}
// Will search for the next word after the 'else' block.
} else do {
if (ia >= lga) break cmp;
ca = codePointAt(acronym, ia);
ia += charCount(ca);
} while (!isLetterOrDigit(ca));
/*
* At this point, 'ca' is the next acronym letter to compare and we
* need to search for the next word in the complete name. We first
* skip remaining letters, then we skip non-letter characters.
*/
boolean skipLetters = true;
do while (isLetterOrDigit(cc) == skipLetters) {
if (ic >= lgc) {
return false;
}
cc = codePointAt(words, ic);
ic += charCount(cc);
} while ((skipLetters = !skipLetters) == false);
// Now that we are aligned on a new word, the first letter must match.
if (toUpperCase(ca) != toUpperCase(cc)) {
return false;
}
}
/*
* Now that we have processed all acronym letters, the complete name can not have
* any additional word. We can only finish the current word and skip trailing non-
* letter characters.
*/
boolean skipLetters = true;
do {
do {
if (ic >= lgc) return true;
cc = codePointAt(words, ic);
ic += charCount(cc);
} while (isLetterOrDigit(cc) == skipLetters);
} while ((skipLetters = !skipLetters) == false);
return false;
}
/**
* Returns {@code true} if the given string contains only upper case letters or digits.
* A few punctuation characters like {@code '_'} and {@code '.'} are also accepted.
*
* <p>This method is used for identifying character strings that are likely to be code
* like {@code "UTF-8"} or {@code "ISO-LATIN-1"}.</p>
*
* @see #isUnicodeIdentifier(CharSequence)
*/
private static boolean isCode(final CharSequence identifier) {
for (int i=identifier.length(); --i>=0;) {
final char c = identifier.charAt(i);
// No need to use the code point API here, since the conditions
// below are requiring the characters to be in the basic plane.
if (!((c >= 'A' && c <= 'Z') || (c >= '-' && c <= ':') || c == '_')) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the given text is presumed to be an acronym. Acronyms are presumed
* to be valid Unicode identifiers in all upper-case letters and without the {@code '_'} character.
*
* @see #camelCaseToAcronym(CharSequence)
*/
private static boolean isAcronym(final CharSequence text) {
return isUpperCase(text) && indexOf(text, '_', 0, text.length()) < 0 && isUnicodeIdentifier(text);
}
/**
* Returns {@code true} if the given identifier is a legal Unicode identifier.
* This method returns {@code true} if the identifier length is greater than zero,
* the first character is a {@linkplain Character#isUnicodeIdentifierStart(int)
* Unicode identifier start} and all remaining characters (if any) are
* {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier parts}.
*
* <h4>Relationship with legal XML identifiers</h4>
* Most legal Unicode identifiers are also legal XML identifiers, but the converse is not true.
* The most noticeable differences are the ‘{@code :}’, ‘{@code -}’ and ‘{@code .}’ characters,
* which are legal in XML identifiers but not in Unicode.
*
* <table class="sis">
* <caption>Characters legal in one set but not in the other</caption>
* <tr><th colspan="2">Not legal in Unicode</th> <th class="sep" colspan="2">Not legal in XML</th></tr>
* <tr><td>{@code :}</td><td>(colon)</td> <td class="sep">{@code µ}</td><td>(micro sign)</td></tr>
* <tr><td>{@code -}</td><td>(hyphen or minus)</td> <td class="sep">{@code ª}</td><td>(feminine ordinal indicator)</td></tr>
* <tr><td>{@code .}</td><td>(dot)</td> <td class="sep">{@code º}</td><td>(masculine ordinal indicator)</td></tr>
* <tr><td>{@code ·}</td><td>(middle dot)</td> <td class="sep">{@code ⁔}</td><td>(inverted undertie)</td></tr>
* <tr>
* <td colspan="2">Many punctuation, symbols, <i>etc</i>.</td>
* <td colspan="2" class="sep">{@linkplain Character#isIdentifierIgnorable(int) Identifier ignorable} characters.</td>
* </tr>
* </table>
*
* Note that the ‘{@code _}’ (underscore) character is legal according both Unicode and XML, while spaces,
* ‘{@code !}’, ‘{@code #}’, ‘{@code *}’, ‘{@code /}’, ‘{@code ?}’ and most other punctuation characters are not.
*
* <h4>Usage in Apache SIS</h4>
* In its handling of {@linkplain org.apache.sis.referencing.ImmutableIdentifier identifiers}, Apache SIS favors
* Unicode identifiers without {@linkplain Character#isIdentifierIgnorable(int) ignorable} characters since those
* identifiers are legal XML identifiers except for the above-cited rarely used characters. As a side effect,
* this policy excludes ‘{@code :}’, ‘{@code -}’ and ‘{@code .}’ which would normally be legal XML identifiers.
* But since those characters could easily be confused with
* {@linkplain org.apache.sis.util.iso.DefaultNameSpace#DEFAULT_SEPARATOR namespace separators},
* this exclusion is considered desirable.
*
* @param identifier the character sequence to test, or {@code null}.
* @return {@code true} if the given character sequence is a legal Unicode identifier.
*
* @see org.apache.sis.referencing.ImmutableIdentifier
* @see org.apache.sis.metadata.iso.citation.Citations#toCodeSpace(Citation)
* @see org.apache.sis.referencing.IdentifiedObjects#getSimpleNameOrIdentifier(IdentifiedObject)
*/
public static boolean isUnicodeIdentifier(final CharSequence identifier) {
final int length = length(identifier);
if (length == 0) {
return false;
}
int c = codePointAt(identifier, 0);
if (!isUnicodeIdentifierStart(c)) {
return false;
}
for (int i=0; (i += charCount(c)) < length;) {
c = codePointAt(identifier, i);
if (!isUnicodeIdentifierPart(c)) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the given text is non-null, contains at least one upper-case character and
* no lower-case character. Space and punctuation are ignored.
*
* @param text the character sequence to test (may be {@code null}).
* @return {@code true} if non-null, contains at least one upper-case character and no lower-case character.
*
* @see String#toUpperCase()
*
* @since 0.7
*/
public static boolean isUpperCase(final CharSequence text) {
return isUpperCase(text, 0, length(text), false);
}
/**
* Returns {@code true} if the given sub-sequence is non-null, contains at least one upper-case character and
* no lower-case character. Space and punctuation are ignored.
*
* @param text the character sequence to test.
* @param lower index of the first character to check, inclusive.
* @param upper index of the last character to check, exclusive.
* @param hasUpperCase {@code true} if this method should behave as if the given text already had
* at least one upper-case character (not necessarily in the portion given by the indices).
* @return {@code true} if contains at least one upper-case character and no lower-case character.
*/
private static boolean isUpperCase(final CharSequence text, int lower, final int upper, boolean hasUpperCase) {
while (lower < upper) {
final int c = codePointAt(text, lower);
if (Character.isLowerCase(c)) {
return false;
}
if (!hasUpperCase) {
hasUpperCase = Character.isUpperCase(c);
}
lower += charCount(c);
}
return hasUpperCase;
}
/**
* Returns {@code true} if the given texts are equal, optionally ignoring case and filtered-out characters.
* This method is sometime used for comparing identifiers in a lenient way.
*
* <p><b>Example:</b> the following call compares the two strings ignoring case and any
* characters which are not {@linkplain Character#isLetterOrDigit(int) letter or digit}.
* In particular, spaces and punctuation characters like {@code '_'} and {@code '-'} are
* ignored:</p>
*
* {@preformat java
* assert equalsFiltered("WGS84", "WGS_84", Characters.Filter.LETTERS_AND_DIGITS, true) == true;
* }
*
* @param s1 the first characters sequence to compare, or {@code null}.
* @param s2 the second characters sequence to compare, or {@code null}.
* @param filter the subset of characters to compare, or {@code null} for comparing all characters.
* @param ignoreCase {@code true} for ignoring cases, or {@code false} for requiring exact match.
* @return {@code true} if both arguments are {@code null} or if the two given texts are equal,
* optionally ignoring case and filtered-out characters.
*/
public static boolean equalsFiltered(final CharSequence s1, final CharSequence s2,
final Characters.Filter filter, final boolean ignoreCase)
{
if (s1 == s2) {
return true;
}
if (s1 == null || s2 == null) {
return false;
}
if (filter == null) {
return ignoreCase ? equalsIgnoreCase(s1, s2) : equals(s1, s2);
}
final int lg1 = s1.length();
final int lg2 = s2.length();
int i1 = 0, i2 = 0;
while (i1 < lg1) {
int c1 = codePointAt(s1, i1);
final int n = charCount(c1);
if (filter.contains(c1)) {
int c2; // Fetch the next significant character from the second string.
do {
if (i2 >= lg2) {
return false; // The first string has more significant characters than expected.
}
c2 = codePointAt(s2, i2);
i2 += charCount(c2);
} while (!filter.contains(c2));
// Compare the characters in the same way than String.equalsIgnoreCase(String).
if (c1 != c2 && !(ignoreCase && equalsIgnoreCase(c1, c2))) {
return false;
}
}
i1 += n;
}
while (i2 < lg2) {
final int s = codePointAt(s2, i2);
if (filter.contains(s)) {
return false; // The first string has less significant characters than expected.
}
i2 += charCount(s);
}
return true;
}
/**
* Returns {@code true} if the given code points are equal, ignoring case.
* This method implements the same comparison algorithm than String#equalsIgnoreCase(String).
*
* <p>This method does not verify if {@code c1 == c2}. This check should have been done
* by the caller, since the caller code is a more optimal place for this check.</p>
*/
private static boolean equalsIgnoreCase(int c1, int c2) {
c1 = toUpperCase(c1);
c2 = toUpperCase(c2);
if (c1 == c2) {
return true;
}
// Need this check for Georgian alphabet.
return toLowerCase(c1) == toLowerCase(c2);
}
/**
* Returns {@code true} if the two given texts are equal, ignoring case.
* This method is similar to {@link String#equalsIgnoreCase(String)}, except
* it works on arbitrary character sequences and compares <cite>code points</cite>
* instead of characters.
*
* @param s1 the first string to compare, or {@code null}.
* @param s2 the second string to compare, or {@code null}.
* @return {@code true} if the two given texts are equal, ignoring case,
* or if both arguments are {@code null}.
*
* @see String#equalsIgnoreCase(String)
*/
public static boolean equalsIgnoreCase(final CharSequence s1, final CharSequence s2) {
if (s1 == s2) {
return true;
}
if (s1 == null || s2 == null) {
return false;
}
// Do not check for String cases. We do not want to delegate to String.equalsIgnoreCase
// because we compare code points while String.equalsIgnoreCase compares characters.
final int lg1 = s1.length();
final int lg2 = s2.length();
int i1 = 0, i2 = 0;
while (i1<lg1 && i2<lg2) {
final int c1 = codePointAt(s1, i1);
final int c2 = codePointAt(s2, i2);
if (c1 != c2 && !equalsIgnoreCase(c1, c2)) {
return false;
}
i1 += charCount(c1);
i2 += charCount(c2);
}
return i1 == i2;
}
/**
* Returns {@code true} if the two given texts are equal. This method delegates to
* {@link String#contentEquals(CharSequence)} if possible. This method never invoke
* {@link CharSequence#toString()} in order to avoid a potentially large copy of data.
*
* @param s1 the first string to compare, or {@code null}.
* @param s2 the second string to compare, or {@code null}.
* @return {@code true} if the two given texts are equal, or if both arguments are {@code null}.
*
* @see String#contentEquals(CharSequence)
*/
public static boolean equals(final CharSequence s1, final CharSequence s2) {
if (s1 == s2) {
return true;
}
if (s1 != null && s2 != null) {
if (s1 instanceof String) return ((String) s1).contentEquals(s2);
if (s2 instanceof String) return ((String) s2).contentEquals(s1);
final int length = s1.length();
if (s2.length() == length) {
for (int i=0; i<length; i++) {
if (s1.charAt(i) != s2.charAt(i)) {
return false;
}
}
return true;
}
}
return false;
}
/**
* Returns {@code true} if the given text at the given offset contains the given part,
* in a case-sensitive comparison. This method is equivalent to the following code,
* except that this method works on arbitrary {@link CharSequence} objects instead of
* {@link String}s only:
*
* {@preformat java
* return text.regionMatches(offset, part, 0, part.length());
* }
*
* This method does not thrown {@code IndexOutOfBoundsException}. Instead if
* {@code fromIndex < 0} or {@code fromIndex + part.length() > text.length()},
* then this method returns {@code false}.
*
* @param text the character sequence for which to tests for the presence of {@code part}.
* @param fromIndex the offset in {@code text} where to test for the presence of {@code part}.
* @param part the part which may be present in {@code text}.
* @return {@code true} if {@code text} contains {@code part} at the given {@code offset}.
* @throws NullPointerException if any of the arguments is null.
*
* @see String#regionMatches(int, String, int, int)
*/
public static boolean regionMatches(final CharSequence text, final int fromIndex, final CharSequence part) {
if (text instanceof String && part instanceof String) {
// It is okay to delegate to String implementation since we do not ignore cases.
return ((String) text).startsWith((String) part, fromIndex);
}
final int length;
if (fromIndex < 0 || fromIndex + (length = part.length()) > text.length()) {
return false;
}
for (int i=0; i<length; i++) {
// No need to use the code point API here, since we are looking for exact matches.
if (text.charAt(fromIndex + i) != part.charAt(i)) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the given text at the given offset contains the given part,
* optionally in a case-insensitive way. This method is equivalent to the following code,
* except that this method works on arbitrary {@link CharSequence} objects instead of
* {@link String}s only:
*
* {@preformat java
* return text.regionMatches(ignoreCase, offset, part, 0, part.length());
* }
*
* This method does not thrown {@code IndexOutOfBoundsException}. Instead if
* {@code fromIndex < 0} or {@code fromIndex + part.length() > text.length()},
* then this method returns {@code false}.
*
* @param text the character sequence for which to tests for the presence of {@code part}.
* @param fromIndex the offset in {@code text} where to test for the presence of {@code part}.
* @param part the part which may be present in {@code text}.
* @param ignoreCase {@code true} if the case should be ignored.
* @return {@code true} if {@code text} contains {@code part} at the given {@code offset}.
* @throws NullPointerException if any of the arguments is null.
*
* @see String#regionMatches(boolean, int, String, int, int)
*
* @since 0.4
*/
public static boolean regionMatches(final CharSequence text, int fromIndex, final CharSequence part, final boolean ignoreCase) {
if (!ignoreCase) {
return regionMatches(text, fromIndex, part);
}
// Do not check for String cases. We do not want to delegate to String.regionMatches
// because we compare code points while String.regionMatches(…) compares characters.
final int limit = text.length();
final int length = part.length();
if (fromIndex < 0) { // Not checked before because we want NullPointerException if an argument is null.
return false;
}
for (int i=0; i<length;) {
if (fromIndex >= limit) {
return false;
}
final int c1 = codePointAt(part, i);
final int c2 = codePointAt(text, fromIndex);
if (c1 != c2 && !equalsIgnoreCase(c1, c2)) {
return false;
}
fromIndex += charCount(c2);
i += charCount(c1);
}
return true;
}
/**
* Returns {@code true} if the given character sequence starts with the given prefix.
*
* @param text the characters sequence to test.
* @param prefix the expected prefix.
* @param ignoreCase {@code true} if the case should be ignored.
* @return {@code true} if the given sequence starts with the given prefix.
* @throws NullPointerException if any of the arguments is null.
*/
public static boolean startsWith(final CharSequence text, final CharSequence prefix, final boolean ignoreCase) {
return regionMatches(text, 0, prefix, ignoreCase);
}
/**
* Returns {@code true} if the given character sequence ends with the given suffix.
*
* @param text the characters sequence to test.
* @param suffix the expected suffix.
* @param ignoreCase {@code true} if the case should be ignored.
* @return {@code true} if the given sequence ends with the given suffix.
* @throws NullPointerException if any of the arguments is null.
*/
public static boolean endsWith(final CharSequence text, final CharSequence suffix, final boolean ignoreCase) {
int is = text.length();
int ip = suffix.length();
while (ip > 0) {
if (is <= 0) {
return false;
}
final int cs = codePointBefore(text, is);
final int cp = codePointBefore(suffix, ip);
if (cs != cp && (!ignoreCase || !equalsIgnoreCase(cs, cp))) {
return false;
}
is -= charCount(cs);
ip -= charCount(cp);
}
return true;
}
/**
* Returns the longest sequence of characters which is found at the beginning of the two given texts.
* If one of those texts is {@code null}, then the other text is returned.
* If there is no common prefix, then this method returns an empty string.
*
* @param s1 the first text, or {@code null}.
* @param s2 the second text, or {@code null}.
* @return the common prefix of both texts (may be empty), or {@code null} if both texts are null.
*/
public static CharSequence commonPrefix(final CharSequence s1, final CharSequence s2) {
if (s1 == null) return s2;
if (s2 == null) return s1;
final CharSequence shortest;
final int lg1 = s1.length();
final int lg2 = s2.length();
final int length;
if (lg1 <= lg2) {
shortest = s1;
length = lg1;
} else {
shortest = s2;
length = lg2;
}
int i = 0;
while (i < length) {
// No need to use the codePointAt API here, since we are looking for exact matches.
if (s1.charAt(i) != s2.charAt(i)) {
break;
}
i++;
}
return shortest.subSequence(0, i);
}
/**
* Returns the longest sequence of characters which is found at the end of the two given texts.
* If one of those texts is {@code null}, then the other text is returned.
* If there is no common suffix, then this method returns an empty string.
*
* @param s1 the first text, or {@code null}.
* @param s2 the second text, or {@code null}.
* @return the common suffix of both texts (may be empty), or {@code null} if both texts are null.
*/
public static CharSequence commonSuffix(final CharSequence s1, final CharSequence s2) {
if (s1 == null) return s2;
if (s2 == null) return s1;
final CharSequence shortest;
final int lg1 = s1.length();
final int lg2 = s2.length();
final int length;
if (lg1 <= lg2) {
shortest = s1;
length = lg1;
} else {
shortest = s2;
length = lg2;
}
int i = 0;
while (++i <= length) {
// No need to use the codePointAt API here, since we are looking for exact matches.
if (s1.charAt(lg1 - i) != s2.charAt(lg2 - i)) {
break;
}
}
i--;
return shortest.subSequence(length - i, shortest.length());
}
/**
* Returns the words found at the beginning and end of both texts.
* The returned string is the concatenation of the {@linkplain #commonPrefix common prefix}
* with the {@linkplain #commonSuffix common suffix}, with prefix and suffix eventually made
* shorter for avoiding to cut in the middle of a word.
*
* <p>The purpose of this method is to create a global identifier from a list of component identifiers.
* The later are often eastward and northward components of a vector, in which case this method provides
* an identifier for the vector as a whole.</p>
*
* <div class="note"><b>Example:</b>
* given the following inputs:
* <ul>
* <li>{@code "baroclinic_eastward_velocity"}</li>
* <li>{@code "baroclinic_northward_velocity"}</li>
* </ul>
* This method returns {@code "baroclinic_velocity"}. Note that the {@code "ward"} characters
* are a common suffix of both texts but nevertheless omitted because they cut a word.</div>
*
* <p>If one of those texts is {@code null}, then the other text is returned.
* If there is no common words, then this method returns an empty string.</p>
*
* <h4>Possible future evolution</h4>
* Current implementation searches only for a common prefix and a common suffix, ignoring any common words
* that may appear in the middle of the strings. A character is considered the beginning of a word if it is
* {@linkplain Character#isLetterOrDigit(int) a letter or digit} which is not preceded by another letter or
* digit (as leading "s" and "c" in "snake_case"), or if it is an {@linkplain Character#isUpperCase(int)
* upper case} letter preceded by a {@linkplain Character#isLowerCase(int) lower case} letter or no letter
* (as both "C" in "CamelCase").
*
* @param s1 the first text, or {@code null}.
* @param s2 the second text, or {@code null}.
* @return the common suffix of both texts (may be empty), or {@code null} if both texts are null.
*
* @since 1.1
*/
public static CharSequence commonWords(final CharSequence s1, final CharSequence s2) {
final int lg1 = length(s1);
final int lg2 = length(s2);
final int shortestLength = Math.min(lg1, lg2); // 0 if s1 or s2 is null, in which case prefix and suffix will have the other value.
final CharSequence prefix = commonPrefix(s1, s2); int prefixLength = length(prefix); if (prefixLength >= shortestLength) return prefix;
final CharSequence suffix = commonSuffix(s1, s2); int suffixLength = length(suffix); if (suffixLength >= shortestLength) return suffix;
final int length = prefixLength + suffixLength;
if (length >= lg1) return s1; // Check if one of the strings is already equal to prefix + suffix.
if (length >= lg2) return s2;
/*
* At this point `s1` and `s2` contain at least one character between the prefix and the suffix.
* If the prefix or the suffix seems to stop in the middle of a word, skip the remaining of that word.
* For example if `s1` and `s2` are "eastward_velocity" and "northward_velocity", the common suffix is
* "ward_velocity" but we want to retain only "velocity".
*
* The first condition below (before the loop) checks the character after the common prefix (for example "e"
* in "baroclinic_eastward_velocity" if the prefix is "baroclinic_"). The intent is to handle the case where
* the word separator is not the same (e.g. "baroclinic_eastward_velocity" and "baroclinic northward velocity",
* in which case the '_' or ' ' character would not appear in the prefix).
*/
if (!isWordBoundary(s1, prefixLength, codePointAt(s1, prefixLength)) &&
!isWordBoundary(s2, prefixLength, codePointAt(s2, prefixLength)))
{
while (prefixLength > 0) {
final int c = codePointBefore(prefix, prefixLength);
final int n = charCount(c);
prefixLength -= n;
if (isWordBoundary(prefix, prefixLength, c)) {
if (!isLetterOrDigit(c)) prefixLength += n; // Keep separator character.
break;
}
}
}
/*
* Same process than for the prefix above. The condition before the loop checks the character before suffix
* for the same reason than above, but using only `isLetterOrDigit` ignoring camel-case. The reason is that
* if the character before was a word separator according camel-case convention (i.e. an upper-case letter),
* we would need to include it in the common suffix.
*/
int suffixStart = 0;
if (isLetterOrDigit(codePointBefore(s1, lg1 - suffixLength)) &&
isLetterOrDigit(codePointBefore(s2, lg2 - suffixLength)))
{
while (suffixStart < suffixLength) {
final int c = codePointAt(suffix, suffixStart);
if (isWordBoundary(suffix, suffixStart, c)) break;
suffixStart += charCount(c);
}
}
/*
* At this point we got the final prefix and suffix to use. If the prefix or suffix is empty,
* trim whitespaces or '_' character. For example if the suffix is "_velocity" and no prefix,
* return "velocity" without leading "_" character.
*/
if (prefixLength == 0) {
while (suffixStart < suffixLength) {
final int c = codePointAt(suffix, suffixStart);
if (isLetterOrDigit(c)) {
return suffix.subSequence(suffixStart, suffixLength); // Skip leading ignorable characters in suffix.
}
suffixStart += charCount(c);
}
return "";
}
if (suffixStart >= suffixLength) {
while (prefixLength > 0) {
final int c = codePointBefore(prefix, prefixLength);
if (isLetterOrDigit(c)) {
return prefix.subSequence(0, prefixLength); // Skip trailing ignorable characters in prefix.
}
prefixLength -= charCount(c);
}
return "";
}
/*
* All special cases have been examined. Return the concatenation of (possibly shortened)
* common prefix and suffix.
*/
final StringBuilder buffer = new StringBuilder(prefixLength + suffixLength).append(prefix);
final int c1 = codePointBefore(prefix, prefixLength);
final int c2 = codePointAt(suffix, suffixStart);
if (isLetterOrDigit(c1) && isLetterOrDigit(c2)) {
if (!Character.isUpperCase(c2) || !isLowerCase(c1)) {
buffer.append(' '); // Keep a separator between two words (except if CamelCase is used).
}
} else if (c1 == c2) {
suffixStart += charCount(c2); // Avoid repeating '_' in e.g. "baroclinic_<removed>_velocity".
}
return buffer.append(suffix, suffixStart, suffixLength).toString();
}
/**
* Returns {@code true} if the character {@code c} is the beginning of a word or a non-word character.
* For example this method returns {@code true} if {@code c} is {@code '_'} in {@code "snake_case"} or
* {@code "C"} in {@code "CamelCase"}.
*
* @param s the character sequence from which the {@code c} character has been obtained.
* @param i the index in {@code s} where the {@code c} character has been obtained.
* @param c the code point in {@code s} as index {@code i}.
* @return whether the given character is the beginning of a word or a non-word character.
*/
private static boolean isWordBoundary(final CharSequence s, final int i, final int c) {
if (!isLetterOrDigit(c)) return true;
if (!Character.isUpperCase(c)) return false;
return (i <= 0 || isLowerCase(codePointBefore(s, i)));
}
/**
* Returns the token starting at the given offset in the given text. For the purpose of this
* method, a "token" is any sequence of consecutive characters of the same type, as defined
* below.
*
* <p>Let define <var>c</var> as the first non-blank character located at an index equals or
* greater than the given offset. Then the characters that are considered of the same type
* are:</p>
*
* <ul>
* <li>If <var>c</var> is a
* {@linkplain Character#isUnicodeIdentifierStart(int) Unicode identifier start},
* then any following characters that are
* {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier part}.</li>
* <li>Otherwise any character for which {@link Character#getType(int)} returns
* the same value than for <var>c</var>.</li>
* </ul>
*
* @param text the text for which to get the token.
* @param fromIndex index of the fist character to consider in the given text.
* @return a sub-sequence of {@code text} starting at the given offset, or an empty string
* if there is no non-blank character at or after the given offset.
* @throws NullPointerException if the {@code text} argument is null.
*/
public static CharSequence token(final CharSequence text, int fromIndex) {
final int length = text.length();
int upper = fromIndex;
/*
* Skip whitespaces. At the end of this loop,
* 'c' will be the first non-blank character.
*/
int c;
do {
if (upper >= length) return "";
c = codePointAt(text, upper);
fromIndex = upper;
upper += charCount(c);
}
while (isWhitespace(c));
/*
* Advance over all characters "of the same type".
*/
if (isUnicodeIdentifierStart(c)) {
while (upper<length && isUnicodeIdentifierPart(c = codePointAt(text, upper))) {
upper += charCount(c);
}
} else {
final int type = getType(codePointAt(text, fromIndex));
while (upper<length && getType(c = codePointAt(text, upper)) == type) {
upper += charCount(c);
}
}
return text.subSequence(fromIndex, upper);
}
/**
* Replaces all occurrences of a given string in the given character sequence. If no occurrence of
* {@code toSearch} is found in the given text or if {@code toSearch} is equal to {@code replaceBy},
* then this method returns the {@code text} unchanged.
* Otherwise this method returns a new character sequence with all occurrences replaced by {@code replaceBy}.
*
* <p>This method is similar to {@link String#replace(CharSequence, CharSequence)} except that is accepts
* arbitrary {@code CharSequence} objects. As of Java 10, another difference is that this method does not
* create a new {@code String} if {@code toSearch} is equals to {@code replaceBy}.</p>
*
* @param text the character sequence in which to perform the replacements, or {@code null}.
* @param toSearch the string to replace.
* @param replaceBy the replacement for the searched string.
* @return the given text with replacements applied, or {@code text} if no replacement has been applied,
* or {@code null} if the given text was null
*
* @see String#replace(char, char)
* @see StringBuilders#replace(StringBuilder, String, String)
* @see String#replace(CharSequence, CharSequence)
*
* @since 0.4
*/
public static CharSequence replace(final CharSequence text, final CharSequence toSearch, final CharSequence replaceBy) {
ArgumentChecks.ensureNonEmpty("toSearch", toSearch);
ArgumentChecks.ensureNonNull ("replaceBy", replaceBy);
if (text != null && !toSearch.equals(replaceBy)) {
if (text instanceof String) {
return ((String) text).replace(toSearch, replaceBy);
}
final int length = text.length();
int i = indexOf(text, toSearch, 0, length);
if (i >= 0) {
int p = 0;
final int sl = toSearch.length();
final StringBuilder buffer = new StringBuilder(length + (replaceBy.length() - sl));
do {
buffer.append(text, p, i).append(replaceBy);
i = indexOf(text, toSearch, p = i + sl, length);
} while (i >= 0);
return buffer.append(text, p, length);
}
}
return text;
}
/**
* Copies a sequence of characters in the given {@code char[]} array.
*
* @param src the characters sequence from which to copy characters.
* @param srcOffset index of the first character from {@code src} to copy.
* @param dst the array where to copy the characters.
* @param dstOffset index where to write the first character in {@code dst}.
* @param length number of characters to copy.
*
* @see String#getChars(int, int, char[], int)
* @see StringBuilder#getChars(int, int, char[], int)
* @see StringBuffer#getChars(int, int, char[], int)
* @see CharBuffer#get(char[], int, int)
* @see javax.swing.text.Segment#array
*/
public static void copyChars(final CharSequence src, int srcOffset,
final char[] dst, int dstOffset, int length)
{
ArgumentChecks.ensurePositive("length", length);
if (src instanceof String) {
((String) src).getChars(srcOffset, srcOffset + length, dst, dstOffset);
} else if (src instanceof StringBuilder) {
((StringBuilder) src).getChars(srcOffset, srcOffset + length, dst, dstOffset);
} else if (src instanceof StringBuffer) {
((StringBuffer) src).getChars(srcOffset, srcOffset + length, dst, dstOffset);
} else if (src instanceof CharBuffer) {
((CharBuffer) src).subSequence(srcOffset, srcOffset + length).get(dst, dstOffset, length);
} else {
/*
* Another candidate could be `javax.swing.text.Segment`, but it
* is probably not worth to introduce a Swing dependency for it.
*/
while (length != 0) {
dst[dstOffset++] = src.charAt(srcOffset++);
length--;
}
}
}
}
| apache/sis | core/sis-utility/src/main/java/org/apache/sis/util/CharSequences.java | Java | apache-2.0 | 106,867 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.river.test.spec.config.configurationprovider;
import net.jini.config.Configuration;
import net.jini.config.ConfigurationProvider;
import net.jini.config.ConfigurationException;
import net.jini.config.ConfigurationNotFoundException;
/**
* Some Configuration that can be instantiated but can not be
* really used. This configuration provider doesn't have default
* for options
*/
public class ValidConfigurationWithoutDefaults implements Configuration {
public static boolean wasCalled = false;
public ValidConfigurationWithoutDefaults(String[] options)
throws ConfigurationException {
this(options, null);
wasCalled = true;
}
public ValidConfigurationWithoutDefaults(String[] options, ClassLoader cl)
throws ConfigurationException {
wasCalled = true;
if (options == null) {
throw new ConfigurationNotFoundException(
"default options are not supplied");
}
}
public Object getEntry(String component, String name, Class type)
throws ConfigurationException {
throw new AssertionError();
};
public Object getEntry(String component, String name, Class type,
Object defaultValue) throws ConfigurationException {
throw new AssertionError();
};
public Object getEntry(String component, String name, Class type,
Object defaultValue, Object data) throws ConfigurationException {
throw new AssertionError();
};
}
| pfirmstone/JGDMS | qa/src/org/apache/river/test/spec/config/configurationprovider/ValidConfigurationWithoutDefaults.java | Java | apache-2.0 | 2,339 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty4;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import org.apache.camel.util.concurrent.CamelThreadFactory;
/**
* A builder to create Netty {@link io.netty.channel.EventLoopGroup} which can be used for sharing worker pools
* with multiple Netty {@link NettyServerBootstrapFactory} server bootstrap configurations.
*/
public final class NettyWorkerPoolBuilder {
private String name = "NettyWorker";
private String pattern;
private int workerCount;
private boolean nativeTransport;
private volatile EventLoopGroup workerPool;
public void setName(String name) {
this.name = name;
}
public void setPattern(String pattern) {
this.pattern = pattern;
}
public void setWorkerCount(int workerCount) {
this.workerCount = workerCount;
}
public void setNativeTransport(boolean nativeTransport) {
this.nativeTransport = nativeTransport;
}
public NettyWorkerPoolBuilder withName(String name) {
setName(name);
return this;
}
public NettyWorkerPoolBuilder withPattern(String pattern) {
setPattern(pattern);
return this;
}
public NettyWorkerPoolBuilder withWorkerCount(int workerCount) {
setWorkerCount(workerCount);
return this;
}
public NettyWorkerPoolBuilder withNativeTransport(boolean nativeTransport) {
setNativeTransport(nativeTransport);
return this;
}
/**
* Creates a new worker pool.
*/
public EventLoopGroup build() {
int count = workerCount > 0 ? workerCount : NettyHelper.DEFAULT_IO_THREADS;
if (nativeTransport) {
workerPool = new EpollEventLoopGroup(count, new CamelThreadFactory(pattern, name, false));
} else {
workerPool = new NioEventLoopGroup(count, new CamelThreadFactory(pattern, name, false));
}
return workerPool;
}
/**
* Shutdown the created worker pool
*/
public void destroy() {
if (workerPool != null) {
workerPool.shutdownGracefully();
workerPool = null;
}
}
}
| Fabryprog/camel | components/camel-netty4/src/main/java/org/apache/camel/component/netty4/NettyWorkerPoolBuilder.java | Java | apache-2.0 | 3,053 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.common.params;
import java.util.EnumSet;
import java.util.Locale;
import org.apache.solr.common.SolrException;
/** Facet parameters */
public interface FacetParams {
/** Should facet counts be calculated? */
public static final String FACET = "facet";
/**
* Numeric option indicating the maximum number of threads to be used in counting facet field
* vales
*/
public static final String FACET_THREADS = FACET + ".threads";
/** What method should be used to do the faceting */
public static final String FACET_METHOD = FACET + ".method";
/**
* Value for FACET_METHOD param to indicate that Solr should enumerate over terms in a field to
* calculate the facet counts.
*/
public static final String FACET_METHOD_enum = "enum";
/**
* Value for FACET_METHOD param to indicate that Solr should enumerate over documents and count up
* terms by consulting an uninverted representation of the field values (such as the FieldCache
* used for sorting).
*/
public static final String FACET_METHOD_fc = "fc";
/** Value for FACET_METHOD param, like FACET_METHOD_fc but counts per-segment. */
public static final String FACET_METHOD_fcs = "fcs";
/** Value for FACET_METHOD param to indicate that Solr should use an UnInvertedField */
public static final String FACET_METHOD_uif = "uif";
/**
* Any lucene formated queries the user would like to use for Facet Constraint Counts
* (multi-value)
*/
public static final String FACET_QUERY = FACET + ".query";
/**
* Any field whose terms the user wants to enumerate over for Facet Constraint Counts
* (multi-value)
*/
public static final String FACET_FIELD = FACET + ".field";
/** The offset into the list of facets. Can be overridden on a per field basis. */
public static final String FACET_OFFSET = FACET + ".offset";
/**
* Numeric option indicating the maximum number of facet field counts be included in the response
* for each field - in descending order of count. Can be overridden on a per field basis.
*/
public static final String FACET_LIMIT = FACET + ".limit";
/**
* Numeric option indicating the minimum number of hits before a facet should be included in the
* response. Can be overridden on a per field basis.
*/
public static final String FACET_MINCOUNT = FACET + ".mincount";
/**
* Boolean option indicating whether facet field counts of "0" should be included in the response.
* Can be overridden on a per field basis.
*/
public static final String FACET_ZEROS = FACET + ".zeros";
/**
* Boolean option indicating whether the response should include a facet field count for all
* records which have no value for the facet field. Can be overridden on a per field basis.
*/
public static final String FACET_MISSING = FACET + ".missing";
static final String FACET_OVERREQUEST = FACET + ".overrequest";
/**
* The percentage to over-request by when performing initial distributed requests.
*
* <p>default value is 1.5
*/
public static final String FACET_OVERREQUEST_RATIO = FACET_OVERREQUEST + ".ratio";
/**
* An additional amount to over-request by when performing initial distributed requests. This
* value will be added after accounting for the over-request ratio.
*
* <p>default value is 10
*/
public static final String FACET_OVERREQUEST_COUNT = FACET_OVERREQUEST + ".count";
/**
* Comma separated list of fields to pivot
*
* <p>example: author,type (for types by author / types within author)
*/
public static final String FACET_PIVOT = FACET + ".pivot";
/**
* Minimum number of docs that need to match to be included in the sublist
*
* <p>default value is 1
*/
public static final String FACET_PIVOT_MINCOUNT = FACET_PIVOT + ".mincount";
/**
* String option: "count" causes facets to be sorted by the count, "index" results in index order.
*/
public static final String FACET_SORT = FACET + ".sort";
public static final String FACET_SORT_COUNT = "count";
public static final String FACET_SORT_COUNT_LEGACY = "true";
public static final String FACET_SORT_INDEX = "index";
public static final String FACET_SORT_INDEX_LEGACY = "false";
/** Only return constraints of a facet field with the given prefix. */
public static final String FACET_PREFIX = FACET + ".prefix";
/** Only return constraints of a facet field containing the given string. */
public static final String FACET_CONTAINS = FACET + ".contains";
/** Only return constraints of a facet field containing the given string. */
public static final String FACET_MATCHES = FACET + ".matches";
/** If using facet contains, ignore case when comparing values. */
public static final String FACET_CONTAINS_IGNORE_CASE = FACET_CONTAINS + ".ignoreCase";
/** Only return constraints of a facet field excluding the given string. */
public static final String FACET_EXCLUDETERMS = FACET + ".excludeTerms";
/**
* When faceting by enumerating the terms in a field, only use the filterCache for terms with a df
* >= to this parameter.
*/
public static final String FACET_ENUM_CACHE_MINDF = FACET + ".enum.cache.minDf";
/**
* A boolean parameter that caps the facet counts at 1. With this set, a returned count will only
* be 0 or 1. For apps that don't need the count, this should be an optimization
*/
public static final String FACET_EXISTS = FACET + ".exists";
/**
* Any field whose terms the user wants to enumerate over for Facet Contraint Counts (multi-value)
*/
public static final String FACET_DATE = FACET + ".date";
/**
* Date string indicating the starting point for a date facet range. Can be overridden on a per
* field basis.
*/
public static final String FACET_DATE_START = FACET_DATE + ".start";
/**
* Date string indicating the ending point for a date facet range. Can be overridden on a per
* field basis.
*/
public static final String FACET_DATE_END = FACET_DATE + ".end";
/**
* Date Math string indicating the interval of sub-ranges for a date facet range. Can be
* overridden on a per field basis.
*/
public static final String FACET_DATE_GAP = FACET_DATE + ".gap";
/**
* Boolean indicating how counts should be computed if the range between 'start' and 'end' is not
* evenly divisible by 'gap'. If this value is true, then all counts of ranges involving the 'end'
* point will use the exact endpoint specified -- this includes the 'between' and 'after' counts
* as well as the last range computed using the 'gap'. If the value is false, then 'gap' is used
* to compute the effective endpoint closest to the 'end' param which results in the range between
* 'start' and 'end' being evenly divisible by 'gap'.
*
* <p>The default is false.
*
* <p>Can be overridden on a per field basis.
*/
public static final String FACET_DATE_HARD_END = FACET_DATE + ".hardend";
/**
* String indicating what "other" ranges should be computed for a date facet range (multi-value).
*
* <p>Can be overridden on a per field basis.
*
* @see FacetRangeOther
*/
public static final String FACET_DATE_OTHER = FACET_DATE + ".other";
/**
* Multivalued string indicating what rules should be applied to determine when the ranges
* generated for date faceting should be inclusive or exclusive of their end points.
*
* <p>The default value if none are specified is: [lower,upper,edge] <i>(NOTE: This is different
* then FACET_RANGE_INCLUDE)</i>
*
* <p>Can be overridden on a per field basis.
*
* @see FacetRangeInclude
* @see #FACET_RANGE_INCLUDE
*/
public static final String FACET_DATE_INCLUDE = FACET_DATE + ".include";
/**
* Any numerical field whose terms the user wants to enumerate over Facet Contraint Counts for
* selected ranges.
*/
public static final String FACET_RANGE = FACET + ".range";
/**
* Number indicating the starting point for a numerical range facet. Can be overridden on a per
* field basis.
*/
public static final String FACET_RANGE_START = FACET_RANGE + ".start";
/**
* Number indicating the ending point for a numerical range facet. Can be overridden on a per
* field basis.
*/
public static final String FACET_RANGE_END = FACET_RANGE + ".end";
/**
* Number indicating the interval of sub-ranges for a numerical facet range. Can be overridden on
* a per field basis.
*/
public static final String FACET_RANGE_GAP = FACET_RANGE + ".gap";
/**
* Boolean indicating how counts should be computed if the range between 'start' and 'end' is not
* evenly divisible by 'gap'. If this value is true, then all counts of ranges involving the 'end'
* point will use the exact endpoint specified -- this includes the 'between' and 'after' counts
* as well as the last range computed using the 'gap'. If the value is false, then 'gap' is used
* to compute the effective endpoint closest to the 'end' param which results in the range between
* 'start' and 'end' being evenly divisible by 'gap'.
*
* <p>The default is false.
*
* <p>Can be overridden on a per field basis.
*/
public static final String FACET_RANGE_HARD_END = FACET_RANGE + ".hardend";
/**
* String indicating what "other" ranges should be computed for a numerical range facet
* (multi-value). Can be overridden on a per field basis.
*/
public static final String FACET_RANGE_OTHER = FACET_RANGE + ".other";
/**
* Multivalued string indicating what rules should be applied to determine when the ranges
* generated for numeric faceting should be inclusive or exclusive of their end points.
*
* <p>The default value if none are specified is: lower
*
* <p>Can be overridden on a per field basis.
*
* @see FacetRangeInclude
*/
public static final String FACET_RANGE_INCLUDE = FACET_RANGE + ".include";
/**
* String indicating the method to use to resolve range facets.
*
* <p>Can be overridden on a per field basis.
*
* @see FacetRangeMethod
*/
public static final String FACET_RANGE_METHOD = FACET_RANGE + ".method";
/** Any field whose values the user wants to enumerate as explicit intervals of terms. */
public static final String FACET_INTERVAL = FACET + ".interval";
/** Set of terms for a single interval to facet on. */
public static final String FACET_INTERVAL_SET = FACET_INTERVAL + ".set";
/**
* A spatial RPT field to generate a 2D "heatmap" (grid of facet counts) on. Just like the other
* faceting types, this may include a 'key' or local-params to facet multiple times. All
* parameters with this suffix can be overridden on a per-field basis.
*/
public static final String FACET_HEATMAP = "facet.heatmap";
/** The format of the heatmap: either png or ints2D (default). */
public static final String FACET_HEATMAP_FORMAT = FACET_HEATMAP + ".format";
/**
* The region the heatmap should minimally enclose. It defaults to the world if not set. The
* format can either be a minimum to maximum point range format:
*
* <pre>["-150 10" TO "-100 30"]</pre>
*
* (the first is bottom-left and second is bottom-right, both of which are parsed as points are
* parsed). OR, any WKT can be provided and it's bounding box will be taken.
*/
public static final String FACET_HEATMAP_GEOM = FACET_HEATMAP + ".geom";
/**
* Specify the heatmap grid level explicitly, instead of deriving it via distErr or distErrPct.
*/
public static final String FACET_HEATMAP_LEVEL = FACET_HEATMAP + ".gridLevel";
/**
* Used to determine the heatmap grid level to compute, defaulting to 0.15. It has the same
* interpretation of distErrPct when searching on RPT, but relative to the shape in 'bbox'. It's a
* fraction (not a %) of the radius of the shape that grid squares must fit into without
* exceeding. > 0 and <= 0.5. Mutually exclusive with distErr & gridLevel.
*/
public static final String FACET_HEATMAP_DIST_ERR_PCT = FACET_HEATMAP + ".distErrPct";
/**
* Used to determine the heatmap grid level to compute (optional). It has the same interpretation
* of maxDistErr or distErr with RPT. It's an absolute distance (in units of what's specified on
* the field type) that a grid square must maximally fit into (width & height). It can be used
* to to more explicitly specify the maximum grid square size without knowledge of what particular
* grid levels translate to. This can in turn be used with knowledge of the size of 'bbox' to get
* a target minimum number of grid cells. Mutually exclusive with distErrPct & gridLevel.
*/
public static final String FACET_HEATMAP_DIST_ERR = FACET_HEATMAP + ".distErr";
/**
* The maximum number of cells (grid squares) the client is willing to handle. If this limit would
* be exceeded, we throw an error instead. Defaults to 100k.
*/
public static final String FACET_HEATMAP_MAX_CELLS = FACET_HEATMAP + ".maxCells";
/**
* An enumeration of the legal values for {@link #FACET_RANGE_OTHER} and {@link #FACET_DATE_OTHER}
* ...
*
* <ul>
* <li>before = the count of matches before the start
* <li>after = the count of matches after the end
* <li>between = the count of all matches between start and end
* <li>all = all of the above (default value)
* <li>none = no additional info requested
* </ul>
*
* @see #FACET_RANGE_OTHER
* @see #FACET_DATE_OTHER
*/
public enum FacetRangeOther {
BEFORE,
AFTER,
BETWEEN,
ALL,
NONE;
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
public static FacetRangeOther get(String label) {
try {
return valueOf(label.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
label + " is not a valid type of 'other' range facet information",
e);
}
}
}
/**
* An enumeration of the legal values for {@link #FACET_DATE_INCLUDE} and {@link
* #FACET_RANGE_INCLUDE} <br>
*
* <ul>
* <li>lower = all gap based ranges include their lower bound
* <li>upper = all gap based ranges include their upper bound
* <li>edge = the first and last gap ranges include their edge bounds (ie: lower for the first
* one, upper for the last one) even if the corresponding upper/lower option is not
* specified
* <li>outer = the BEFORE and AFTER ranges should be inclusive of their bounds, even if the
* first or last ranges already include those boundaries.
* <li>all = shorthand for lower, upper, edge, and outer
* </ul>
*
* @see #FACET_DATE_INCLUDE
* @see #FACET_RANGE_INCLUDE
*/
public enum FacetRangeInclude {
ALL,
LOWER,
UPPER,
EDGE,
OUTER;
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
public static FacetRangeInclude get(String label) {
try {
return valueOf(label.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
label + " is not a valid type of for range 'include' information",
e);
}
}
/**
* Convinience method for parsing the param value according to the correct semantics and
* applying the default of "LOWER"
*/
public static EnumSet<FacetRangeInclude> parseParam(final String[] param) {
// short circut for default behavior
if (null == param || 0 == param.length) return EnumSet.of(LOWER);
// build up set containing whatever is specified
final EnumSet<FacetRangeInclude> include = EnumSet.noneOf(FacetRangeInclude.class);
for (final String o : param) {
include.add(FacetRangeInclude.get(o));
}
// if set contains all, then we're back to short circuting
if (include.contains(FacetRangeInclude.ALL)) return EnumSet.allOf(FacetRangeInclude.class);
// use whatever we've got.
return include;
}
}
/**
* An enumeration of the legal values for {@link #FACET_RANGE_METHOD}
*
* <ul>
* <li>filter =
* <li>dv =
* </ul>
*
* @see #FACET_RANGE_METHOD
*/
public enum FacetRangeMethod {
FILTER,
DV;
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
public static FacetRangeMethod get(String label) {
try {
return valueOf(label.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
label + " is not a valid method for range faceting",
e);
}
}
public static FacetRangeMethod getDefault() {
return FILTER;
}
}
}
| apache/solr | solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java | Java | apache-2.0 | 17,854 |
/*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.textocat.textokit.morph.commons;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.textocat.textokit.morph.dictionary.resource.GramModel;
import com.textocat.textokit.morph.fs.Word;
import com.textocat.textokit.morph.fs.Wordform;
import com.textocat.textokit.postagger.MorphCasUtils;
import java.util.BitSet;
import java.util.Set;
import static com.textocat.textokit.morph.commons.PunctuationUtils.punctuationTagMap;
import static com.textocat.textokit.morph.model.MorphConstants.*;
/**
* EXPERIMENTAL <br>
* EXPERIMENTAL <br>
* EXPERIMENTAL
*
* @author Rinat Gareev
*/
public class TagUtils {
private static final Set<String> closedPosSet = ImmutableSet.of(NPRO, Apro, PREP, CONJ, PRCL);
/**
* @param dict
* @return function that returns true if the given gram bits represents a
* closed class tag
*/
public static Function<BitSet, Boolean> getClosedClassIndicator(GramModel gm) {
// initialize mask
final BitSet closedClassTagsMask = new BitSet();
for (String cpGram : closedPosSet) {
closedClassTagsMask.set(gm.getGrammemNumId(cpGram));
}
//
return new Function<BitSet, Boolean>() {
@Override
public Boolean apply(BitSet _wfBits) {
BitSet wfBits = (BitSet) _wfBits.clone();
wfBits.and(closedClassTagsMask);
return !wfBits.isEmpty();
}
};
}
// FIXME refactor hard-coded dependency on a tag mapper implementation
public static boolean isClosedClassTag(String tag) {
return closedClassPunctuationTags.contains(tag)
|| !Sets.intersection(
GramModelBasedTagMapper.parseTag(tag), closedPosSet)
.isEmpty();
}
public static String postProcessExternalTag(String tag) {
return !"null".equals(String.valueOf(tag)) ? tag : null;
}
public static final Set<String> closedClassPunctuationTags = ImmutableSet
.copyOf(punctuationTagMap.values());
public static final Function<Word, String> tagFunction() {
return tagFunction;
}
private static final Function<Word, String> tagFunction = new Function<Word, String>() {
@Override
public String apply(Word word) {
if (word == null) {
return null;
}
Wordform wf = MorphCasUtils.requireOnlyWordform(word);
return wf.getPos();
}
};
private TagUtils() {
}
}
| textocat/textokit-core | Textokit.PosTagger.API/src/main/java/com/textocat/textokit/morph/commons/TagUtils.java | Java | apache-2.0 | 3,235 |
package org.eclipse.jetty.server.handler;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.EOFException;
import java.io.IOException;
import java.net.Socket;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.junit.AfterClass;
/**
* @version $Revision$ $Date$
*/
public abstract class AbstractConnectHandlerTest
{
protected static Server server;
protected static Connector serverConnector;
protected static Server proxy;
protected static Connector proxyConnector;
protected static void startServer(Connector connector, Handler handler) throws Exception
{
server = new Server();
serverConnector = connector;
server.addConnector(serverConnector);
server.setHandler(handler);
server.start();
}
protected static void startProxy() throws Exception
{
proxy = new Server();
proxyConnector = new SelectChannelConnector();
proxy.addConnector(proxyConnector);
proxy.setHandler(new ConnectHandler());
proxy.start();
}
@AfterClass
public static void stop() throws Exception
{
stopProxy();
stopServer();
}
protected static void stopServer() throws Exception
{
server.stop();
server.join();
}
protected static void stopProxy() throws Exception
{
proxy.stop();
proxy.join();
}
protected Response readResponse(BufferedReader reader) throws IOException
{
// Simplified parser for HTTP responses
String line = reader.readLine();
if (line == null)
throw new EOFException();
Matcher responseLine = Pattern.compile("HTTP/1\\.1\\s+(\\d+)").matcher(line);
assertTrue(responseLine.lookingAt());
String code = responseLine.group(1);
Map<String, String> headers = new LinkedHashMap<String, String>();
while ((line = reader.readLine()) != null)
{
if (line.trim().length() == 0)
break;
Matcher header = Pattern.compile("([^:]+):\\s*(.*)").matcher(line);
assertTrue(header.lookingAt());
String headerName = header.group(1);
String headerValue = header.group(2);
headers.put(headerName.toLowerCase(), headerValue.toLowerCase());
}
StringBuilder body = new StringBuilder();
if (headers.containsKey("content-length"))
{
int length = Integer.parseInt(headers.get("content-length"));
for (int i = 0; i < length; ++i)
{
char c = (char)reader.read();
body.append(c);
}
}
else if ("chunked".equals(headers.get("transfer-encoding")))
{
while ((line = reader.readLine()) != null)
{
if ("0".equals(line))
{
line = reader.readLine();
assertEquals("", line);
break;
}
int length = Integer.parseInt(line, 16);
for (int i = 0; i < length; ++i)
{
char c = (char)reader.read();
body.append(c);
}
line = reader.readLine();
assertEquals("", line);
}
}
return new Response(code, headers, body.toString().trim());
}
protected Socket newSocket() throws IOException
{
Socket socket = new Socket("localhost", proxyConnector.getLocalPort());
socket.setSoTimeout(5000);
return socket;
}
protected class Response
{
private final String code;
private final Map<String, String> headers;
private final String body;
private Response(String code, Map<String, String> headers, String body)
{
this.code = code;
this.headers = headers;
this.body = body;
}
public String getCode()
{
return code;
}
public Map<String, String> getHeaders()
{
return headers;
}
public String getBody()
{
return body;
}
@Override
public String toString()
{
StringBuilder builder = new StringBuilder();
builder.append(code).append("\r\n");
for (Map.Entry<String, String> entry : headers.entrySet())
builder.append(entry.getKey()).append(": ").append(entry.getValue()).append("\r\n");
builder.append("\r\n");
builder.append(body);
return builder.toString();
}
}
}
| thomasbecker/jetty-7 | jetty-server/src/test/java/org/eclipse/jetty/server/handler/AbstractConnectHandlerTest.java | Java | apache-2.0 | 5,030 |
package org.apereo.cas.memcached.kryo;
import org.apereo.cas.authentication.AcceptUsersAuthenticationHandler;
import org.apereo.cas.authentication.AuthenticationBuilder;
import org.apereo.cas.authentication.BasicCredentialMetaData;
import org.apereo.cas.authentication.DefaultAuthenticationBuilder;
import org.apereo.cas.authentication.DefaultAuthenticationHandlerExecutionResult;
import org.apereo.cas.authentication.UsernamePasswordCredential;
import org.apereo.cas.authentication.principal.DefaultPrincipalFactory;
import org.apereo.cas.mock.MockServiceTicket;
import org.apereo.cas.mock.MockTicketGrantingTicket;
import org.apereo.cas.services.RegisteredServiceTestUtils;
import org.apereo.cas.ticket.TicketGrantingTicket;
import org.apereo.cas.ticket.TicketGrantingTicketImpl;
import org.apereo.cas.ticket.support.MultiTimeUseOrTimeoutExpirationPolicy;
import org.apereo.cas.ticket.support.NeverExpiresExpirationPolicy;
import com.esotericsoftware.kryo.KryoException;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.junit.Test;
import javax.security.auth.login.AccountNotFoundException;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.junit.Assert.*;
/**
* Unit test for {@link CasKryoTranscoder} class.
*
* @author Marvin S. Addison
* @since 3.0.0
*/
@Slf4j
public class CasKryoTranscoderTests {
private static final String ST_ID = "ST-1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890ABCDEFGHIJK";
private static final String TGT_ID = "TGT-1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890ABCDEFGHIJK-cas1";
private static final String USERNAME = "handymanbob";
private static final String PASSWORD = "foo";
private static final String NICKNAME_KEY = "nickname";
private static final String NICKNAME_VALUE = "bob";
private final CasKryoTranscoder transcoder;
private final Map<String, Object> principalAttributes;
public CasKryoTranscoderTests() {
val classesToRegister = new ArrayList<Class>();
classesToRegister.add(MockServiceTicket.class);
classesToRegister.add(MockTicketGrantingTicket.class);
this.transcoder = new CasKryoTranscoder(new CasKryoPool(classesToRegister));
this.principalAttributes = new HashMap<>();
this.principalAttributes.put(NICKNAME_KEY, NICKNAME_VALUE);
}
@Test
public void verifyEncodeDecodeTGTImpl() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final AuthenticationBuilder bldr = new DefaultAuthenticationBuilder(new DefaultPrincipalFactory()
.createPrincipal("user", new HashMap<>(this.principalAttributes)));
bldr.setAttributes(new HashMap<>(this.principalAttributes));
bldr.setAuthenticationDate(ZonedDateTime.now());
bldr.addCredential(new BasicCredentialMetaData(userPassCredential));
bldr.addFailure("error", new AccountNotFoundException());
bldr.addSuccess("authn", new DefaultAuthenticationHandlerExecutionResult(
new AcceptUsersAuthenticationHandler(""),
new BasicCredentialMetaData(userPassCredential)));
final TicketGrantingTicket expectedTGT = new TicketGrantingTicketImpl(TGT_ID,
RegisteredServiceTestUtils.getService(),
null, bldr.build(),
new NeverExpiresExpirationPolicy());
val ticket = expectedTGT.grantServiceTicket(ST_ID,
RegisteredServiceTestUtils.getService(),
new NeverExpiresExpirationPolicy(), false, true);
val result1 = transcoder.encode(expectedTGT);
val resultTicket = transcoder.decode(result1);
assertEquals(expectedTGT, resultTicket);
val result2 = transcoder.encode(ticket);
val resultStTicket1 = transcoder.decode(result2);
assertEquals(ticket, resultStTicket1);
val resultStTicket2 = transcoder.decode(result2);
assertEquals(ticket, resultStTicket2);
}
@Test
public void verifyEncodeDecode() {
val tgt = new MockTicketGrantingTicket(USERNAME);
val expectedST = new MockServiceTicket(ST_ID, RegisteredServiceTestUtils.getService(), tgt);
assertEquals(expectedST, transcoder.decode(transcoder.encode(expectedST)));
val expectedTGT = new MockTicketGrantingTicket(USERNAME);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
internalProxyTest();
}
private void internalProxyTest() {
val expectedTGT = new MockTicketGrantingTicket(USERNAME);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithUnmodifiableMap() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT =
new MockTicketGrantingTicket(TGT_ID, userPassCredential, new HashMap<>(this.principalAttributes));
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithUnmodifiableList() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
val values = new ArrayList<String>();
values.add(NICKNAME_VALUE);
val newAttributes = new HashMap<String, Object>();
newAttributes.put(NICKNAME_KEY, new ArrayList<>(values));
val expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithLinkedHashMap() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT =
new MockTicketGrantingTicket(TGT_ID, userPassCredential, new LinkedHashMap<>(this.principalAttributes));
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithListOrderedMap() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT =
new MockTicketGrantingTicket(TGT_ID, userPassCredential, this.principalAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithUnmodifiableSet() {
val newAttributes = new HashMap<String, Object>();
val values = new HashSet<String>();
values.add(NICKNAME_VALUE);
//CHECKSTYLE:OFF
newAttributes.put(NICKNAME_KEY, Collections.unmodifiableSet(values));
//CHECKSTYLE:ON
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
val expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithSingleton() {
val newAttributes = new HashMap<String, Object>();
newAttributes.put(NICKNAME_KEY, Collections.singleton(NICKNAME_VALUE));
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
val expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithSingletonMap() {
val newAttributes = Collections.<String, Object>singletonMap(NICKNAME_KEY, NICKNAME_VALUE);
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
val expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeRegisteredService() {
val service = RegisteredServiceTestUtils.getRegisteredService("helloworld");
val result = transcoder.encode(service);
assertEquals(service, transcoder.decode(result));
assertEquals(service, transcoder.decode(result));
}
@Test
public void verifySTWithServiceTicketExpirationPolicy() {
// ServiceTicketExpirationPolicy is not registered with Kryo...
transcoder.getKryo().getClassResolver().reset();
val tgt = new MockTicketGrantingTicket(USERNAME);
val expectedST = new MockServiceTicket(ST_ID, RegisteredServiceTestUtils.getService(), tgt);
val step
= new MultiTimeUseOrTimeoutExpirationPolicy.ServiceTicketExpirationPolicy(1, 600);
expectedST.setExpiration(step);
val result = transcoder.encode(expectedST);
assertEquals(expectedST, transcoder.decode(result));
// Test it a second time - Ensure there's no problem with subsequent de-serializations.
assertEquals(expectedST, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeNonRegisteredClass() {
val tgt = new MockTicketGrantingTicket(USERNAME);
val expectedST = new MockServiceTicket(ST_ID, RegisteredServiceTestUtils.getService(), tgt);
// This class is not registered with Kryo
val step = new UnregisteredServiceTicketExpirationPolicy(1, 600);
expectedST.setExpiration(step);
try {
transcoder.encode(expectedST);
throw new AssertionError("Unregistered class is not allowed by Kryo");
} catch (final KryoException e) {
LOGGER.trace(e.getMessage(), e);
} catch (final Exception e) {
throw new AssertionError("Unexpected exception due to not resetting Kryo between de-serializations with unregistered class.");
}
}
/**
* Class for testing Kryo unregistered class handling.
*/
private static class UnregisteredServiceTicketExpirationPolicy extends MultiTimeUseOrTimeoutExpirationPolicy {
private static final long serialVersionUID = -1704993954986738308L;
/**
* Instantiates a new Service ticket expiration policy.
*
* @param numberOfUses the number of uses
* @param timeToKillInSeconds the time to kill in seconds
*/
UnregisteredServiceTicketExpirationPolicy(final int numberOfUses, final long timeToKillInSeconds) {
super(numberOfUses, timeToKillInSeconds);
}
}
}
| robertoschwald/cas | support/cas-server-support-memcached-core/src/test/java/org/apereo/cas/memcached/kryo/CasKryoTranscoderTests.java | Java | apache-2.0 | 12,137 |
package org.apache.velocity.tools.view;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* <p>ToolInfo implementation to handle "primitive" data types.
* It currently supports String, Number, and Boolean data.</p>
*
* <p>An example of data elements specified in your toolbox.xml
* might be:
* <pre>
* <data type="string">
* <key>app_name</key>
* <value>FooWeb Deluxe</value>
* </data>
* <data type="number">
* <key>app_version</key>
* <value>4.2</value>
* </data>
* <data type="boolean">
* <key>debug</key>
* <value>true</value>
* </data>
* <data type="number">
* <key>screen_width</key>
* <value>400</value>
* </data>
* </pre></p>
*
* @author Nathan Bubna
* @deprecated Use {@link org.apache.velocity.tools.config.Data}
* @version $Id: DataInfo.java 651469 2008-04-25 00:46:13Z nbubna $
*/
@Deprecated
public class DataInfo implements ToolInfo
{
public static final String TYPE_STRING = "string";
public static final String TYPE_NUMBER = "number";
public static final String TYPE_BOOLEAN = "boolean";
private static final int TYPE_ID_STRING = 0;
private static final int TYPE_ID_NUMBER = 1;
private static final int TYPE_ID_BOOLEAN = 2;
private String key = null;
private int type_id = TYPE_ID_STRING;
private Object data = null;
public DataInfo() {}
/*********************** Mutators *************************/
public void setKey(String key)
{
this.key = key;
}
public void setType(String type)
{
if (TYPE_BOOLEAN.equalsIgnoreCase(type))
{
this.type_id = TYPE_ID_BOOLEAN;
}
else if (TYPE_NUMBER.equalsIgnoreCase(type))
{
this.type_id = TYPE_ID_NUMBER;
}
else /* if no type or type="string" */
{
this.type_id = TYPE_ID_STRING;
}
}
public void setValue(String value)
{
if (type_id == TYPE_ID_BOOLEAN)
{
this.data = Boolean.valueOf(value);
}
else if (type_id == TYPE_ID_NUMBER)
{
if (value.indexOf('.') >= 0)
{
this.data = new Double(value);
}
else
{
this.data = new Integer(value);
}
}
else /* type is "string" */
{
this.data = value;
}
}
/*********************** Accessors *************************/
public String getKey()
{
return key;
}
public String getClassname()
{
return data != null ? data.getClass().getName() : null;
}
/**
* Returns the data. Always returns the same
* object since the data is a constant. Initialization
* data is ignored.
*/
public Object getInstance(Object initData)
{
return data;
}
}
| fluidinfo/velocity-tools-packaging | src/main/java/org/apache/velocity/tools/view/DataInfo.java | Java | apache-2.0 | 3,790 |
/*
* Licensed to Diennea S.r.l. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Diennea S.r.l. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package docet.engine;
import java.util.Arrays;
/**
*
*
*/
public enum DocetDocFormat {
TYPE_HTML("html", false),
TYPE_PDF("pdf", true);
private String name;
private boolean includeResources;
private DocetDocFormat(final String name, final boolean includeResources) {
this.name = name;
this.includeResources = includeResources;
}
@Override
public String toString() {
return this.name;
}
public boolean isIncludeResources() {
return this.includeResources;
}
public static DocetDocFormat parseDocetRequestByName(final String name) {
return Arrays.asList(DocetDocFormat.values())
.stream()
.filter(req -> req.toString().equals(name)).findFirst().orElse(null);
}
} | diegosalvi/docetproject | docet-core/src/main/java/docet/engine/DocetDocFormat.java | Java | apache-2.0 | 1,574 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.coyote.http2;
import java.util.logging.Level;
import java.util.logging.LogManager;
import org.junit.Assert;
import org.junit.Test;
/**
* Unit tests for Section 6.1 of
* <a href="https://tools.ietf.org/html/rfc7540">RFC 7540</a>.
* <br>
* The order of tests in this class is aligned with the order of the
* requirements in the RFC.
*/
public class TestHttp2Section_6_1 extends Http2TestBase {
@Test
public void testDataFrame() throws Exception {
http2Connect();
// Disable overhead protection for window update as it breaks the test
http2Protocol.setOverheadWindowUpdateThreshold(0);
sendSimplePostRequest(3, null);
readSimplePostResponse(false);
Assert.assertEquals("0-WindowSize-[128]\n" +
"3-WindowSize-[128]\n" +
"3-HeadersStart\n" +
"3-Header-[:status]-[200]\n" +
"3-Header-[content-length]-[128]\n" +
"3-Header-[date]-[Wed, 11 Nov 2015 19:18:42 GMT]\n" +
"3-HeadersEnd\n" +
"3-Body-128\n" +
"3-EndOfStream\n", output.getTrace());
}
@Test
public void testDataFrameWithPadding() throws Exception {
LogManager.getLogManager().getLogger("org.apache.coyote").setLevel(Level.ALL);
LogManager.getLogManager().getLogger("org.apache.tomcat.util.net").setLevel(Level.ALL);
try {
http2Connect();
// Disable overhead protection for window update as it breaks the
// test
http2Protocol.setOverheadWindowUpdateThreshold(0);
byte[] padding = new byte[8];
sendSimplePostRequest(3, padding);
readSimplePostResponse(true);
// The window updates for padding could occur anywhere since they
// happen on a different thread to the response.
// The connection window update is always present if there is
// padding.
String trace = output.getTrace();
String paddingWindowUpdate = "0-WindowSize-[9]\n";
Assert.assertTrue(trace, trace.contains(paddingWindowUpdate));
trace = trace.replace(paddingWindowUpdate, "");
// The stream window update may or may not be present depending on
// timing. Remove it if present.
if (trace.contains("3-WindowSize-[9]\n")) {
trace = trace.replace("3-WindowSize-[9]\n", "");
}
Assert.assertEquals("0-WindowSize-[119]\n" +
"3-WindowSize-[119]\n" +
"3-HeadersStart\n" +
"3-Header-[:status]-[200]\n" +
"3-Header-[content-length]-[119]\n" +
"3-Header-[date]-[Wed, 11 Nov 2015 19:18:42 GMT]\n" +
"3-HeadersEnd\n" +
"3-Body-119\n" +
"3-EndOfStream\n", trace);
} finally {
LogManager.getLogManager().getLogger("org.apache.coyote").setLevel(Level.INFO);
LogManager.getLogManager().getLogger("org.apache.tomcat.util.net").setLevel(Level.INFO);
}
}
@Test
public void testDataFrameWithNonZeroPadding() throws Exception {
http2Connect();
byte[] padding = new byte[8];
padding[4] = 0x01;
sendSimplePostRequest(3, padding);
// May see Window updates depending on timing
skipWindowSizeFrames();
String trace = output.getTrace();
Assert.assertTrue(trace, trace.startsWith("0-Goaway-[3]-[1]-["));
}
@Test
public void testDataFrameOnStreamZero() throws Exception {
http2Connect();
byte[] dataFrame = new byte[10];
// Header
// length
ByteUtil.setThreeBytes(dataFrame, 0, 1);
// type (0 for data)
// flags (0)
// stream (0)
// payload (0)
os.write(dataFrame);
os.flush();
handleGoAwayResponse(1);
}
@Test
public void testDataFrameTooMuchPadding() throws Exception {
http2Connect();
byte[] dataFrame = new byte[10];
// Header
// length
ByteUtil.setThreeBytes(dataFrame, 0, 1);
// type 0 (data)
// flags 8 (padded)
dataFrame[4] = 0x08;
// stream 3
ByteUtil.set31Bits(dataFrame, 5, 3);
// payload (pad length of 1)
dataFrame[9] = 1;
os.write(dataFrame);
os.flush();
handleGoAwayResponse(1);
}
@Test
public void testDataFrameWithZeroLengthPadding() throws Exception {
http2Connect();
// Disable overhead protection for window update as it breaks the test
http2Protocol.setOverheadWindowUpdateThreshold(0);
byte[] padding = new byte[0];
sendSimplePostRequest(3, padding);
readSimplePostResponse(true);
// The window updates for padding could occur anywhere since they
// happen on a different thread to the response.
// The connection window update is always present if there is
// padding.
String trace = output.getTrace();
String paddingWindowUpdate = "0-WindowSize-[1]\n";
Assert.assertTrue(trace, trace.contains(paddingWindowUpdate));
trace = trace.replace(paddingWindowUpdate, "");
// The stream window update may or may not be present depending on
// timing. Remove it if present.
paddingWindowUpdate = "3-WindowSize-[1]\n";
if (trace.contains(paddingWindowUpdate)) {
trace = trace.replace(paddingWindowUpdate, "");
}
Assert.assertEquals("0-WindowSize-[127]\n" +
"3-WindowSize-[127]\n" +
"3-HeadersStart\n" +
"3-Header-[:status]-[200]\n" +
"3-Header-[content-length]-[127]\n" +
"3-Header-[date]-[Wed, 11 Nov 2015 19:18:42 GMT]\n" +
"3-HeadersEnd\n" +
"3-Body-127\n" +
"3-EndOfStream\n", trace);
}
}
| apache/tomcat | test/org/apache/coyote/http2/TestHttp2Section_6_1.java | Java | apache-2.0 | 6,884 |
/*
* Javolution - Java(TM) Solution for Real-Time and Embedded Systems
* Copyright (C) 2007 - Javolution (http://javolution.org/)
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software is
* freely granted, provided that this notice is preserved.
*/
package javolution.xml;
import java.io.Serializable;
/**
* <p> This interface identifies classes supporting XML serialization
* (XML serialization is still possible for classes not implementing this
* interface through dynamic {@link XMLBinding} though).</p>
*
* <p> Typically, classes implementing this interface have a protected static
* {@link XMLFormat} holding their default XML representation.
* For example:[code]
* public final class Complex implements XMLSerializable {
*
* // Use the cartesien form for the default XML representation.
* protected static final XMLFormat<Complex> XML = new XMLFormat<Complex>(Complex.class) {
* public Complex newInstance(Class<Complex> cls, InputElement xml) throws XMLStreamException {
* return Complex.valueOf(xml.getAttribute("real", 0.0),
* xml.getAttribute("imaginary", 0.0));
* }
* public void write(Complex complex, OutputElement xml) throws XMLStreamException {
* xml.setAttribute("real", complex.getReal());
* xml.setAttribute("imaginary", complex.getImaginary());
* }
* public void read(InputElement xml, Complex complex) {
* // Immutable, deserialization occurs at creation, ref. newIntance(...)
* }
* };
* ...
* }[/code]</p>
*
* @author <a href="mailto:jean-marie@dautelle.com">Jean-Marie Dautelle</a>
* @version 4.2, April 15, 2007
*/
public interface XMLSerializable extends Serializable {
// No method. Tagging interface.
}
| mariusj/org.openntf.domino | domino/externals/javolution/src/main/java/javolution/xml/XMLSerializable.java | Java | apache-2.0 | 1,977 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.