code
stringlengths 3
1.04M
| repo_name
stringlengths 5
109
| path
stringlengths 6
306
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.04M
|
---|---|---|---|---|---|
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glacier.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.glacier.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* PartListElement JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class PartListElementJsonUnmarshaller implements Unmarshaller<PartListElement, JsonUnmarshallerContext> {
public PartListElement unmarshall(JsonUnmarshallerContext context) throws Exception {
PartListElement partListElement = new PartListElement();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("RangeInBytes", targetDepth)) {
context.nextToken();
partListElement.setRangeInBytes(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("SHA256TreeHash", targetDepth)) {
context.nextToken();
partListElement.setSHA256TreeHash(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return partListElement;
}
private static PartListElementJsonUnmarshaller instance;
public static PartListElementJsonUnmarshaller getInstance() {
if (instance == null)
instance = new PartListElementJsonUnmarshaller();
return instance;
}
}
| dagnir/aws-sdk-java | aws-java-sdk-glacier/src/main/java/com/amazonaws/services/glacier/model/transform/PartListElementJsonUnmarshaller.java | Java | apache-2.0 | 2,994 |
package seborama.demo2.kafka.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public class Order {
private String id;
private boolean fulfilled;
private boolean dispatched;
private boolean completed;
public void setId(String id) {
this.id = id;
}
public void setFulfilled(Boolean fulfilled) {
this.fulfilled = fulfilled;
}
public void setDispatched(Boolean dispatched) {
this.dispatched = dispatched;
}
public void setCompleted(Boolean completed) {
this.completed = completed;
}
public String getId() {
return id;
}
public Boolean getFulfilled() {
return fulfilled;
}
public Boolean getDispatched() {
return dispatched;
}
public Boolean getCompleted() {
return completed;
}
@Override
public String toString() {
return "Order{" +
"id='" + id + '\'' +
", fulfilled=" + fulfilled +
", dispatched=" + dispatched +
", completed=" + completed +
'}';
}
}
| seborama/demo1-kafka | src/main/java/seborama/demo2/kafka/model/Order.java | Java | apache-2.0 | 1,265 |
public class ChickenBurger extends Burger {
@Override
public float price() {
return 50.5f;
}
@Override
public String name() {
return "Chicken Burger";
}
}
| Iamasoldier6/DesignPattern | BuilderPatternDemo/src/ChickenBurger.java | Java | apache-2.0 | 168 |
/*
* Copyright (c) 2017 Otávio Santana and others
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Apache License v2.0 is available at http://www.opensource.org/licenses/apache2.0.php.
*
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
*
* Otavio Santana
*/
package org.jnosql.artemis.column.query;
import org.jnosql.artemis.CDIExtension;
import org.jnosql.artemis.model.Person;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import javax.inject.Inject;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
@ExtendWith(CDIExtension.class)
public class DefaultColumnQueryMapperBuilderTest {
@Inject
private ColumnQueryMapperBuilder mapperBuilder;
@Test
public void shouldReturnErrorWhenEntityClassIsNull() {
assertThrows(NullPointerException.class, () -> mapperBuilder.selectFrom(null));
}
@Test
public void shouldReturnSelectFrom() {
ColumnMapperFrom columnFrom = mapperBuilder.selectFrom(Person.class);
assertNotNull(columnFrom);
}
@Test
public void shouldReturnErrorWhenDeleteEntityClassIsNull() {
assertThrows(NullPointerException.class, () -> mapperBuilder.deleteFrom(null));
}
@Test
public void shouldReturnDeleteFrom() {
ColumnMapperDeleteFrom columnDeleteFrom = mapperBuilder.deleteFrom(Person.class);
assertNotNull(columnDeleteFrom);
}
} | JNOSQL/artemis | artemis-column/src/test/java/org/jnosql/artemis/column/query/DefaultColumnQueryMapperBuilderTest.java | Java | apache-2.0 | 1,793 |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dmdl.directio.text;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Predicate;
import org.apache.hadoop.io.compress.CompressionCodec;
import com.asakusafw.dmdl.directio.util.CharsetUtil;
import com.asakusafw.dmdl.directio.util.ClassName;
import com.asakusafw.dmdl.directio.util.Value;
import com.asakusafw.dmdl.java.emitter.EmitContext;
import com.asakusafw.dmdl.java.util.JavaName;
import com.asakusafw.dmdl.model.BasicTypeKind;
import com.asakusafw.dmdl.semantics.ModelDeclaration;
import com.asakusafw.dmdl.semantics.PropertyDeclaration;
import com.asakusafw.dmdl.semantics.type.BasicType;
import com.asakusafw.dmdl.util.AttributeUtil;
import com.asakusafw.runtime.io.text.TextFormat;
import com.asakusafw.runtime.io.text.TextInput;
import com.asakusafw.runtime.io.text.directio.AbstractTextStreamFormat;
import com.asakusafw.runtime.io.text.driver.FieldDefinition;
import com.asakusafw.runtime.io.text.driver.RecordDefinition;
import com.asakusafw.runtime.io.text.value.BooleanOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.ByteOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DateOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DateTimeOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DecimalOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DoubleOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.FloatOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.IntOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.LongOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.ShortOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.StringOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.ValueOptionFieldAdapter;
import com.asakusafw.runtime.io.util.InputSplitter;
import com.asakusafw.runtime.io.util.InputSplitters;
import com.asakusafw.runtime.value.StringOption;
import com.asakusafw.utils.java.model.syntax.ClassDeclaration;
import com.asakusafw.utils.java.model.syntax.Expression;
import com.asakusafw.utils.java.model.syntax.InfixOperator;
import com.asakusafw.utils.java.model.syntax.MethodDeclaration;
import com.asakusafw.utils.java.model.syntax.ModelFactory;
import com.asakusafw.utils.java.model.syntax.SimpleName;
import com.asakusafw.utils.java.model.syntax.Statement;
import com.asakusafw.utils.java.model.syntax.Type;
import com.asakusafw.utils.java.model.syntax.TypeBodyDeclaration;
import com.asakusafw.utils.java.model.util.AttributeBuilder;
import com.asakusafw.utils.java.model.util.ExpressionBuilder;
import com.asakusafw.utils.java.model.util.JavadocBuilder;
import com.asakusafw.utils.java.model.util.Models;
import com.asakusafw.utils.java.model.util.TypeBuilder;
/**
* Generates {@link AbstractTextStreamFormat}.
* @since 0.9.1
*/
public abstract class AbstractTextStreamFormatGenerator {
private static final Map<BasicTypeKind, Class<? extends ValueOptionFieldAdapter<?>>> ADAPTER_TYPES;
static {
Map<BasicTypeKind, Class<? extends ValueOptionFieldAdapter<?>>> map = new EnumMap<>(BasicTypeKind.class);
map.put(BasicTypeKind.BYTE, ByteOptionFieldAdapter.class);
map.put(BasicTypeKind.SHORT, ShortOptionFieldAdapter.class);
map.put(BasicTypeKind.INT, IntOptionFieldAdapter.class);
map.put(BasicTypeKind.LONG, LongOptionFieldAdapter.class);
map.put(BasicTypeKind.FLOAT, FloatOptionFieldAdapter.class);
map.put(BasicTypeKind.DOUBLE, DoubleOptionFieldAdapter.class);
map.put(BasicTypeKind.DECIMAL, DecimalOptionFieldAdapter.class);
map.put(BasicTypeKind.TEXT, StringOptionFieldAdapter.class);
map.put(BasicTypeKind.BOOLEAN, BooleanOptionFieldAdapter.class);
map.put(BasicTypeKind.DATE, DateOptionFieldAdapter.class);
map.put(BasicTypeKind.DATETIME, DateTimeOptionFieldAdapter.class);
ADAPTER_TYPES = map;
}
/**
* The current context.
*/
protected final EmitContext context;
/**
* The target model.
*/
protected final ModelDeclaration model;
private final ModelFactory f;
private final TextFormatSettings formatSettings;
private final TextFieldSettings fieldDefaultSettings;
/**
* Creates a new instance.
* @param context the current context
* @param model the target model
* @param formatSettings the text format settings
* @param fieldDefaultSettings the field default settings
*/
public AbstractTextStreamFormatGenerator(
EmitContext context, ModelDeclaration model,
TextFormatSettings formatSettings, TextFieldSettings fieldDefaultSettings) {
this.context = context;
this.model = model;
this.formatSettings = formatSettings;
this.fieldDefaultSettings = fieldDefaultSettings;
this.f = context.getModelFactory();
}
/**
* Emits an implementation of {@link AbstractTextStreamFormat} class as a Java compilation unit.
* @param description the format description
* @throws IOException if I/O error was occurred while emitting the compilation unit
*/
protected void emit(String description) throws IOException {
ClassDeclaration decl = f.newClassDeclaration(
new JavadocBuilder(f)
.inline(Messages.getString("AbstractTextStreamFormatGenerator.javadocClassOverview"), //$NON-NLS-1$
d -> d.text(description),
d -> d.linkType(context.resolve(model.getSymbol())))
.toJavadoc(),
new AttributeBuilder(f)
.Public()
.toAttributes(),
context.getTypeName(),
f.newParameterizedType(
context.resolve(AbstractTextStreamFormat.class),
context.resolve(model.getSymbol())),
Collections.emptyList(),
createMembers());
context.emit(decl);
}
private List<? extends TypeBodyDeclaration> createMembers() {
List<TypeBodyDeclaration> results = new ArrayList<>();
results.add(createGetSupportedType());
results.add(createCreateTextFormat());
results.addAll(createCreateRecordDefinition());
createGetInputSplitter().ifPresent(results::add);
createGetCompressionCodecClass().ifPresent(results::add);
createAfterInput().ifPresent(results::add);
createBeforeOutput().ifPresent(results::add);
return results;
}
private MethodDeclaration createGetSupportedType() {
return f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Public()
.toAttributes(),
f.newParameterizedType(
context.resolve(Class.class),
context.resolve(model.getSymbol())),
f.newSimpleName("getSupportedType"), //$NON-NLS-1$
Collections.emptyList(),
Arrays.asList(new TypeBuilder(f, context.resolve(model.getSymbol()))
.dotClass()
.toReturnStatement()));
}
private MethodDeclaration createCreateTextFormat() {
return f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Public()
.toAttributes(),
context.resolve(TextFormat.class),
f.newSimpleName("createTextFormat"), //$NON-NLS-1$
Collections.emptyList(),
createGetTextFormatInternal());
}
/**
* Returns a body of {@link AbstractTextStreamFormat#getTextFormat()}.
* @return the body statements
*/
protected abstract List<Statement> createGetTextFormatInternal();
private List<MethodDeclaration> createCreateRecordDefinition() {
SimpleName builder = f.newSimpleName("builder"); //$NON-NLS-1$
List<Statement> statements = new ArrayList<>();
statements.add(new TypeBuilder(f, context.resolve(RecordDefinition.class))
.method("builder", f.newClassLiteral(context.resolve(model.getSymbol()))) //$NON-NLS-1$
.toLocalVariableDeclaration(
f.newParameterizedType(
context.resolve(RecordDefinition.Builder.class),
context.resolve(model.getSymbol())),
builder));
List<MethodDeclaration> fields = buildRecordDefinition(statements, builder);
statements.add(new ExpressionBuilder(f, builder)
.method("build") //$NON-NLS-1$
.toReturnStatement());
List<MethodDeclaration> results = new ArrayList<>();
results.add(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
f.newParameterizedType(
context.resolve(RecordDefinition.class),
context.resolve(model.getSymbol())),
f.newSimpleName("createRecordDefinition"), //$NON-NLS-1$
Collections.emptyList(),
statements));
results.addAll(fields);
return results;
}
private List<MethodDeclaration> buildRecordDefinition(List<Statement> statements, SimpleName builder) {
formatSettings.getHeaderType().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withHeaderType", resolve(v)) //$NON-NLS-1$
.toStatement()));
formatSettings.getLessInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnLessInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
formatSettings.getMoreInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnMoreInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getTrimInputWhitespaces().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withTrimInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getSkipEmptyInput().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withSkipEmptyInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getMalformedInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnMalformedInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getUnmappableOutputAction().ifPresent(v -> statements.add(
new ExpressionBuilder(f, builder)
.method("withOnUnmappableOutput", resolve(v)) //$NON-NLS-1$
.toStatement()));
List<MethodDeclaration> fields = new ArrayList<>();
for (PropertyDeclaration property : model.getDeclaredProperties()) {
if (TextFieldTrait.getKind(property) != TextFieldTrait.Kind.VALUE) {
continue;
}
MethodDeclaration method = createGetFieldDefinition(property);
fields.add(method);
statements.add(new ExpressionBuilder(f, builder)
.method("withField", //$NON-NLS-1$
new TypeBuilder(f, context.resolve(model.getSymbol()))
.methodReference(context.getOptionGetterName(property))
.toExpression(),
new ExpressionBuilder(f, f.newThis())
.method(method.getName())
.toExpression())
.toStatement());
}
return fields;
}
private MethodDeclaration createGetFieldDefinition(PropertyDeclaration property) {
SimpleName builder = f.newSimpleName("builder"); //$NON-NLS-1$
List<Statement> statements = new ArrayList<>();
statements.add(new TypeBuilder(f, context.resolve(FieldDefinition.class))
.method("builder", //$NON-NLS-1$
resolve(TextFieldTrait.getName(property)),
buildFieldAdapter(property))
.toLocalVariableDeclaration(
f.newParameterizedType(
context.resolve(FieldDefinition.Builder.class),
context.getFieldType(property)),
builder));
TextFieldSettings settings = TextFieldTrait.getSettings(property);
settings.getTrimInputWhitespaces().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withTrimInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getSkipEmptyInput().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withSkipEmptyInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getMalformedInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnMalformedInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getUnmappableOutputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnUnmappableOutput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getQuoteStyle().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOutputOption", resolve(v)) //$NON-NLS-1$
.toStatement()));
statements.add(new ExpressionBuilder(f, builder)
.method("build") //$NON-NLS-1$
.toReturnStatement());
JavaName name = JavaName.of(property.getName());
name.addFirst("get"); //$NON-NLS-1$
name.addLast("field"); //$NON-NLS-1$
name.addLast("definition"); //$NON-NLS-1$
return f.newMethodDeclaration(
new JavadocBuilder(f)
.inline(Messages.getString("AbstractTextStreamFormatGenerator.javafocGetFieldDefinitionOverview"), //$NON-NLS-1$
d -> d.linkMethod(
context.resolve(model.getSymbol()),
context.getOptionGetterName(property)))
.returns()
.text(Messages.getString("AbstractTextStreamFormatGenerator.javadocGetFieldDefinitionReturn")) //$NON-NLS-1$
.toJavadoc(),
new AttributeBuilder(f)
.Protected()
.toAttributes(),
f.newParameterizedType(
context.resolve(FieldDefinition.class),
context.getFieldType(property)),
f.newSimpleName(name.toMemberName()),
Collections.emptyList(),
statements);
}
private Expression buildFieldAdapter(PropertyDeclaration property) {
TextFieldSettings settings = TextFieldTrait.getSettings(property);
Value<ClassName> adapterClass = setting(settings, TextFieldSettings::getAdapterClass);
if (adapterClass.isPresent()) {
return new TypeBuilder(f, resolve(adapterClass.getEntity()))
.constructorReference()
.toExpression();
}
BasicTypeKind kind = ((BasicType) property.getType()).getKind();
Class<? extends ValueOptionFieldAdapter<?>> basicAdapterClass = ADAPTER_TYPES.get(kind);
assert basicAdapterClass != null;
ExpressionBuilder builder = new TypeBuilder(f, context.resolve(basicAdapterClass)).method("builder"); //$NON-NLS-1$
setting(settings, TextFieldSettings::getNullFormat).ifPresent(v -> builder
.method("withNullFormat", resolve(v))); //$NON-NLS-1$
switch (kind) {
case BOOLEAN:
setting(settings, TextFieldSettings::getTrueFormat).ifPresent(v -> builder
.method("withTrueFormat", resolve(v))); //$NON-NLS-1$
setting(settings, TextFieldSettings::getFalseFormat).ifPresent(v -> builder
.method("withFalseFormat", resolve(v))); //$NON-NLS-1$
break;
case DATE:
setting(settings, TextFieldSettings::getDateFormat).ifPresent(v -> builder
.method("withDateFormat", resolve(v.toString()))); //$NON-NLS-1$
break;
case DATETIME:
setting(settings, TextFieldSettings::getDateTimeFormat).ifPresent(v -> builder
.method("withDateTimeFormat", resolve(v.toString()))); //$NON-NLS-1$
setting(settings, TextFieldSettings::getTimeZone).ifPresent(v -> builder
.method("withTimeZone", resolve(v.getId()))); //$NON-NLS-1$
break;
case DECIMAL:
setting(settings, TextFieldSettings::getNumberFormat).ifPresent(v -> builder
.method("withNumberFormat", resolve(v.toString()))); //$NON-NLS-1$
setting(settings, TextFieldSettings::getDecimalOutputStyle).ifPresent(v -> builder
.method("withOutputStyle", resolve(v))); //$NON-NLS-1$
break;
case BYTE:
case INT:
case SHORT:
case LONG:
case FLOAT:
case DOUBLE:
setting(settings, TextFieldSettings::getNumberFormat).ifPresent(v -> builder
.method("withNumberFormat", resolve(v.toString()))); //$NON-NLS-1$
break;
case TEXT:
// no special members
break;
default:
throw new AssertionError(kind);
}
return builder.method("lazy").toExpression(); //$NON-NLS-1$
}
private <T> Value<T> setting(TextFieldSettings settings, Function<TextFieldSettings, Value<T>> getter) {
return getter.apply(settings).orDefault(getter.apply(fieldDefaultSettings));
}
private Optional<MethodDeclaration> createGetInputSplitter() {
if (isSplittable()) {
return Optional.of(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
context.resolve(InputSplitter.class),
f.newSimpleName("getInputSplitter"), //$NON-NLS-1$
Collections.emptyList(),
Arrays.asList(new TypeBuilder(f, context.resolve(InputSplitters.class))
.method("byLineFeed") //$NON-NLS-1$
.toReturnStatement())));
} else {
return Optional.empty();
}
}
private boolean isSplittable() {
if (formatSettings.getCharset().isPresent()) {
if (!CharsetUtil.isAsciiCompatible(formatSettings.getCharset().getEntity())) {
return false;
}
}
if (formatSettings.getCompressionType().isPresent()) {
return false;
}
if (model.getDeclaredProperties().stream()
.map(TextFieldTrait::getKind)
.anyMatch(Predicate.isEqual(TextFieldTrait.Kind.LINE_NUMBER)
.or(Predicate.isEqual(TextFieldTrait.Kind.RECORD_NUMBER)))) {
return false;
}
return isSplittableInternal();
}
/**
* Returns whether or not the input is splittable.
* @return {@code true} if it is splittable, otherwise {@code false}
*/
protected abstract boolean isSplittableInternal();
private Optional<MethodDeclaration> createGetCompressionCodecClass() {
if (formatSettings.getCompressionType().isPresent()) {
ClassName codec = formatSettings.getCompressionType().getEntity();
return Optional.of(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
new TypeBuilder(f, context.resolve(Class.class))
.parameterize(f.newWildcardExtends(context.resolve(CompressionCodec.class)))
.toType(),
f.newSimpleName("getCompressionCodecClass"), //$NON-NLS-1$
Collections.emptyList(),
Arrays.asList(new TypeBuilder(f, resolve(codec))
.dotClass()
.toReturnStatement())));
} else {
return Optional.empty();
}
}
private Optional<MethodDeclaration> createAfterInput() {
SimpleName object = f.newSimpleName("object"); //$NON-NLS-1$
SimpleName path = f.newSimpleName("path"); //$NON-NLS-1$
SimpleName input = f.newSimpleName("input"); //$NON-NLS-1$
List<Statement> statements = new ArrayList<>();
for (PropertyDeclaration property : model.getDeclaredProperties()) {
switch (TextFieldTrait.getKind(property)) {
case VALUE:
break; // does nothing
case IGNORE:
statements.add(new ExpressionBuilder(f, object)
.method(context.getOptionSetterName(property), Models.toNullLiteral(f))
.toStatement());
break;
case FILE_NAME:
statements.add(new ExpressionBuilder(f, object)
.method(context.getOptionSetterName(property), path)
.toStatement());
break;
case LINE_NUMBER:
statements.add(new ExpressionBuilder(f, object)
.method(context.getValueSetterName(property),
adjustLong(property, new ExpressionBuilder(f, input)
.method("getLineNumber") //$NON-NLS-1$
.apply(InfixOperator.PLUS, Models.toLiteral(f, 1L))))
.toStatement());
break;
case RECORD_NUMBER:
statements.add(new ExpressionBuilder(f, object)
.method(context.getValueSetterName(property),
adjustLong(property, new ExpressionBuilder(f, input)
.method("getRecordIndex") //$NON-NLS-1$
.apply(InfixOperator.PLUS, Models.toLiteral(f, 1L))))
.toStatement());
break;
default:
throw new AssertionError(TextFieldTrait.getKind(property));
}
}
if (statements.isEmpty()) {
return Optional.empty();
} else {
return Optional.of(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
context.resolve(void.class),
f.newSimpleName("afterInput"), //$NON-NLS-1$
Arrays.asList(
f.newFormalParameterDeclaration(context.resolve(model.getSymbol()), object),
f.newFormalParameterDeclaration(context.resolve(StringOption.class), path),
f.newFormalParameterDeclaration(
f.newParameterizedType(
context.resolve(TextInput.class),
context.resolve(model.getSymbol())),
input)),
statements));
}
}
private Expression adjustLong(PropertyDeclaration property, ExpressionBuilder builder) {
if (AttributeUtil.hasFieldType(property, BasicTypeKind.LONG)) {
return builder.toExpression();
} else if (AttributeUtil.hasFieldType(property, BasicTypeKind.INT)) {
return builder.castTo(context.resolve(int.class)).toExpression();
} else {
throw new AssertionError(property);
}
}
private Optional<MethodDeclaration> createBeforeOutput() {
return Optional.empty();
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(boolean value) {
return Models.toLiteral(f, value);
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(char value) {
return Models.toLiteral(f, value);
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(String value) {
return Models.toLiteral(f, value);
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(Enum<?> value) {
return new TypeBuilder(f, context.resolve(value.getDeclaringClass()))
.field(value.name())
.toExpression();
}
/**
* Resolves a value.
* @param type the value
* @return the resolved expression
*/
protected Type resolve(ClassName type) {
return context.resolve(Models.toName(f, type.toString()));
}
}
| asakusafw/asakusafw | directio-project/asakusa-directio-dmdl/src/main/java/com/asakusafw/dmdl/directio/text/AbstractTextStreamFormatGenerator.java | Java | apache-2.0 | 26,894 |
/*
* Copyright 2007 Sascha Weinreuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.plugins.relaxNG.references;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.LocalQuickFixProvider;
import com.intellij.codeInspection.XmlQuickFixFactory;
import com.intellij.lang.xml.XMLLanguage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.intellij.psi.PsiReferenceProvider;
import com.intellij.psi.XmlElementFactory;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.BasicAttributeValueReference;
import com.intellij.psi.impl.source.xml.SchemaPrefix;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlAttributeValue;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ProcessingContext;
/*
* Created by IntelliJ IDEA.
* User: sweinreuter
* Date: 24.07.2007
*/
public class PrefixReferenceProvider extends PsiReferenceProvider
{
private static final Logger LOG = Logger.getInstance("#org.intellij.plugins.relaxNG.references.PrefixReferenceProvider");
@Override
@NotNull
public PsiReference[] getReferencesByElement(@NotNull PsiElement element, @NotNull ProcessingContext context)
{
final XmlAttributeValue value = (XmlAttributeValue) element;
final String s = value.getValue();
final int i = s.indexOf(':');
if(i <= 0 || s.startsWith("xml:"))
{
return PsiReference.EMPTY_ARRAY;
}
return new PsiReference[]{
new PrefixReference(value, i)
};
}
private static class PrefixReference extends BasicAttributeValueReference implements EmptyResolveMessageProvider, LocalQuickFixProvider
{
public PrefixReference(XmlAttributeValue value, int length)
{
super(value, TextRange.from(1, length));
}
@Override
@Nullable
public PsiElement resolve()
{
final String prefix = getCanonicalText();
XmlTag tag = PsiTreeUtil.getParentOfType(getElement(), XmlTag.class);
while(tag != null)
{
if(tag.getLocalNamespaceDeclarations().containsKey(prefix))
{
final XmlAttribute attribute = tag.getAttribute("xmlns:" + prefix, "");
final TextRange textRange = TextRange.from("xmlns:".length(), prefix.length());
return new SchemaPrefix(attribute, textRange, prefix);
}
tag = tag.getParentTag();
}
return null;
}
@Override
public boolean isReferenceTo(PsiElement element)
{
if(element instanceof SchemaPrefix && element.getContainingFile() == myElement.getContainingFile())
{
final PsiElement e = resolve();
if(e instanceof SchemaPrefix)
{
final String s = ((SchemaPrefix) e).getName();
return s != null && s.equals(((SchemaPrefix) element).getName());
}
}
return super.isReferenceTo(element);
}
@Nullable
@Override
public LocalQuickFix[] getQuickFixes()
{
final PsiElement element = getElement();
final XmlElementFactory factory = XmlElementFactory.getInstance(element.getProject());
final String value = ((XmlAttributeValue) element).getValue();
final String[] name = value.split(":");
final XmlTag tag = factory.createTagFromText("<" + (name.length > 1 ? name[1] : value) + " />", XMLLanguage.INSTANCE);
return new LocalQuickFix[]{XmlQuickFixFactory.getInstance().createNSDeclarationIntentionFix(tag, getCanonicalText(), null)};
}
@Override
@NotNull
public Object[] getVariants()
{
return ArrayUtil.EMPTY_OBJECT_ARRAY;
}
@Override
public boolean isSoft()
{
return false;
}
@Override
@NotNull
public String getUnresolvedMessagePattern()
{
return "Undefined namespace prefix ''{0}''";
}
}
} | consulo/consulo-relaxng | src/org/intellij/plugins/relaxNG/references/PrefixReferenceProvider.java | Java | apache-2.0 | 4,459 |
/**
* Copyright 2017 Hortonworks.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package com.hortonworks.streamline.streams.service;
import com.codahale.metrics.annotation.Timed;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.hortonworks.streamline.common.exception.service.exception.request.BadRequestException;
import com.hortonworks.streamline.common.exception.service.exception.request.EntityNotFoundException;
import com.hortonworks.streamline.common.exception.service.exception.server.UnhandledServerException;
import com.hortonworks.streamline.common.util.WSUtils;
import com.hortonworks.streamline.streams.actions.topology.service.TopologyActionsService;
import com.hortonworks.streamline.streams.catalog.Topology;
import com.hortonworks.streamline.streams.catalog.TopologySink;
import com.hortonworks.streamline.streams.catalog.TopologySource;
import com.hortonworks.streamline.streams.catalog.TopologyTestRunCase;
import com.hortonworks.streamline.streams.catalog.TopologyTestRunCaseSink;
import com.hortonworks.streamline.streams.catalog.TopologyTestRunCaseSource;
import com.hortonworks.streamline.streams.catalog.TopologyTestRunHistory;
import com.hortonworks.streamline.streams.catalog.service.StreamCatalogService;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.BooleanUtils;
import org.datanucleus.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toList;
import static javax.ws.rs.core.Response.Status.CREATED;
import static javax.ws.rs.core.Response.Status.OK;
@Path("/v1/catalog")
@Produces(MediaType.APPLICATION_JSON)
public class TopologyTestRunResource {
private static final Logger LOG = LoggerFactory.getLogger(TopologyTestRunResource.class);
private static final Integer DEFAULT_LIST_ENTITIES_COUNT = 5;
public static final Charset ENCODING_UTF_8 = Charset.forName("UTF-8");
private final StreamCatalogService catalogService;
private final TopologyActionsService actionsService;
private final ObjectMapper objectMapper;
public TopologyTestRunResource(StreamCatalogService catalogService, TopologyActionsService actionsService) {
this.catalogService = catalogService;
this.actionsService = actionsService;
this.objectMapper = new ObjectMapper();
}
@POST
@Path("/topologies/{topologyId}/actions/testrun")
@Timed
public Response testRunTopology (@Context UriInfo urlInfo,
@PathParam("topologyId") Long topologyId,
String testRunInputJson) throws Exception {
Topology result = catalogService.getTopology(topologyId);
if (result != null) {
TopologyTestRunHistory history = actionsService.testRunTopology(result, testRunInputJson);
return WSUtils.respondEntity(history, OK);
}
throw EntityNotFoundException.byId(topologyId.toString());
}
@GET
@Path("/topologies/{topologyId}/testhistories")
@Timed
public Response getHistoriesOfTestRunTopology (@Context UriInfo urlInfo,
@PathParam("topologyId") Long topologyId,
@QueryParam("limit") Integer limit) throws Exception {
Collection<TopologyTestRunHistory> histories = catalogService.listTopologyTestRunHistory(topologyId);
if (histories == null) {
throw EntityNotFoundException.byFilter("topology id " + topologyId);
}
List<TopologyTestRunHistory> filteredHistories = filterHistories(limit, histories);
return WSUtils.respondEntities(filteredHistories, OK);
}
@GET
@Path("/topologies/{topologyId}/versions/{versionId}/testhistories")
@Timed
public Response getHistoriesOfTestRunTopology (@Context UriInfo urlInfo,
@PathParam("topologyId") Long topologyId,
@PathParam("versionId") Long versionId,
@QueryParam("limit") Integer limit) throws Exception {
Collection<TopologyTestRunHistory> histories = catalogService.listTopologyTestRunHistory(topologyId, versionId);
if (histories == null) {
throw EntityNotFoundException.byFilter("topology id " + topologyId);
}
List<TopologyTestRunHistory> filteredHistories = filterHistories(limit, histories);
return WSUtils.respondEntities(filteredHistories, OK);
}
@GET
@Path("/topologies/{topologyId}/testhistories/{historyId}")
@Timed
public Response getHistoryOfTestRunTopology (@Context UriInfo urlInfo,
@PathParam("topologyId") Long topologyId,
@PathParam("historyId") Long historyId,
@QueryParam("simplify") Boolean simplify) throws Exception {
TopologyTestRunHistory history = catalogService.getTopologyTestRunHistory(historyId);
if (history == null) {
throw EntityNotFoundException.byId(String.valueOf(historyId));
}
if (!history.getTopologyId().equals(topologyId)) {
throw BadRequestException.message("Test history " + historyId + " is not belong to topology " + topologyId);
}
if (BooleanUtils.isTrue(simplify)) {
return WSUtils.respondEntity(new SimplifiedTopologyTestRunHistory(history), OK);
} else {
return WSUtils.respondEntity(history, OK);
}
}
@GET
@Path("/topologies/{topologyId}/testhistories/{historyId}/events")
public Response getEventsOfTestRunTopologyHistory(@Context UriInfo urlInfo,
@PathParam("topologyId") Long topologyId,
@PathParam("historyId") Long historyId) throws Exception {
return getEventsOfTestRunTopologyHistory(topologyId, historyId, null);
}
@GET
@Path("/topologies/{topologyId}/testhistories/{historyId}/events/{componentName}")
public Response getEventsOfTestRunTopologyHistory(@Context UriInfo urlInfo,
@PathParam("topologyId") Long topologyId,
@PathParam("historyId") Long historyId,
@PathParam("componentName") String componentName) throws Exception {
return getEventsOfTestRunTopologyHistory(topologyId, historyId, componentName);
}
@GET
@Path("/topologies/{topologyId}/testhistories/{historyId}/events/download")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response downloadEventsOfTestRunTopologyHistory(@Context UriInfo urlInfo,
@PathParam("topologyId") Long topologyId,
@PathParam("historyId") Long historyId) throws Exception {
File eventLogFile = getEventLogFile(topologyId, historyId);
String content = FileUtils.readFileToString(eventLogFile, ENCODING_UTF_8);
InputStream is = new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8));
String fileName = String.format("events-topology-%d-history-%d.log", topologyId, historyId);
return Response.status(OK)
.entity(is)
.header("Content-Disposition", "attachment; filename=\"" + fileName + "\"")
.build();
}
private Response getEventsOfTestRunTopologyHistory(Long topologyId, Long historyId, String componentName) throws IOException {
File eventLogFile = getEventLogFile(topologyId, historyId);
List<String> lines = FileUtils.readLines(eventLogFile, ENCODING_UTF_8);
Stream<Map<String, Object>> eventsStream = lines.stream().map(line -> {
try {
return objectMapper.readValue(line, new TypeReference<Map<String, Object>>() {});
} catch (IOException e) {
throw new RuntimeException(e);
}
});
if (!StringUtils.isEmpty(componentName)) {
eventsStream = eventsStream.filter(event -> {
String eventComponentName = (String) event.get("componentName");
return eventComponentName != null && eventComponentName.equals(componentName);
});
}
return WSUtils.respondEntities(eventsStream.collect(toList()), OK);
}
private File getEventLogFile(Long topologyId, Long historyId) {
TopologyTestRunHistory history = catalogService.getTopologyTestRunHistory(historyId);
if (history == null) {
throw EntityNotFoundException.byId(String.valueOf(historyId));
}
if (!history.getTopologyId().equals(topologyId)) {
throw BadRequestException.message("Test history " + historyId + " is not belong to topology " + topologyId);
}
String eventLogFilePath = history.getEventLogFilePath();
File eventLogFile = new File(eventLogFilePath);
if (!eventLogFile.exists() || eventLogFile.isDirectory() || !eventLogFile.canRead()) {
throw BadRequestException.message("Event log file of history " + historyId + " does not exist or is not readable.");
}
return eventLogFile;
}
private List<TopologyTestRunHistory> filterHistories(Integer limit, Collection<TopologyTestRunHistory> histories) {
if (limit == null) {
limit = DEFAULT_LIST_ENTITIES_COUNT;
}
return histories.stream()
// reverse order
.sorted((h1, h2) -> (int) (h2.getId() - h1.getId()))
.limit(limit)
.collect(toList());
}
@POST
@Path("/topologies/{topologyId}/testcases")
public Response addTestRunCase(@PathParam("topologyId") Long topologyId,
TopologyTestRunCase testRunCase) {
testRunCase.setTopologyId(topologyId);
Long currentVersionId = catalogService.getCurrentVersionId(topologyId);
testRunCase.setVersionId(currentVersionId);
TopologyTestRunCase addedCase = catalogService.addTopologyTestRunCase(testRunCase);
return WSUtils.respondEntity(addedCase, CREATED);
}
@POST
@Path("/topologies/{topologyId}/versions/{versionId}/testcases")
public Response addTestRunCase(@PathParam("topologyId") Long topologyId,
@PathParam("versionId") Long versionId,
TopologyTestRunCase testRunCase) {
testRunCase.setTopologyId(topologyId);
testRunCase.setVersionId(versionId);
TopologyTestRunCase addedCase = catalogService.addTopologyTestRunCase(testRunCase);
return WSUtils.respondEntity(addedCase, CREATED);
}
@PUT
@Path("/topologies/{topologyId}/testcases/{testCaseId}")
public Response addOrUpdateTestRunCase(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId,
TopologyTestRunCase testRunCase) {
testRunCase.setTopologyId(topologyId);
testRunCase.setId(testCaseId);
TopologyTestRunCase updatedCase = catalogService.addOrUpdateTopologyTestRunCase(topologyId, testRunCase);
return WSUtils.respondEntity(updatedCase, OK);
}
@GET
@Path("/topologies/{topologyId}/testcases/{testCaseId}")
public Response getTestRunCase(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId) {
TopologyTestRunCase testcase = catalogService.getTopologyTestRunCase(topologyId, testCaseId);
if (testcase == null) {
throw EntityNotFoundException.byId(Long.toString(testCaseId));
}
return WSUtils.respondEntity(testcase, OK);
}
@GET
@Path("/topologies/{topologyId}/testcases")
@Timed
public Response listTestRunCases(@Context UriInfo urlInfo,
@PathParam("topologyId") Long topologyId,
@QueryParam("limit") Integer limit) throws Exception {
Long currentVersionId = catalogService.getCurrentVersionId(topologyId);
Collection<TopologyTestRunCase> cases = catalogService.listTopologyTestRunCase(topologyId, currentVersionId);
if (cases == null) {
throw EntityNotFoundException.byFilter("topology id " + topologyId);
}
List<TopologyTestRunCase> filteredCases = filterTestRunCases(limit, cases);
return WSUtils.respondEntities(filteredCases, OK);
}
@GET
@Path("/topologies/{topologyId}/versions/{versionId}/testcases")
@Timed
public Response listTestRunCases(@Context UriInfo urlInfo,
@PathParam("topologyId") Long topologyId,
@PathParam("versionId") Long versionId,
@QueryParam("limit") Integer limit) throws Exception {
Collection<TopologyTestRunCase> cases = catalogService.listTopologyTestRunCase(topologyId, versionId);
if (cases == null) {
throw EntityNotFoundException.byFilter("topology id " + topologyId);
}
List<TopologyTestRunCase> filteredCases = filterTestRunCases(limit, cases);
return WSUtils.respondEntities(filteredCases, OK);
}
@DELETE
@Path("/topologies/{topologyId}/testcases/{testCaseId}")
public Response removeTestRunCase(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId) {
TopologyTestRunCase testRunCase = catalogService.removeTestRunCase(topologyId, testCaseId);
if (testRunCase != null) {
return WSUtils.respondEntity(testRunCase, OK);
}
throw EntityNotFoundException.byId(testCaseId.toString());
}
private List<TopologyTestRunCase> filterTestRunCases(Integer limit, Collection<TopologyTestRunCase> cases) {
if (limit == null) {
limit = DEFAULT_LIST_ENTITIES_COUNT;
}
return cases.stream()
// reverse order
.sorted((h1, h2) -> (int) (h2.getId() - h1.getId()))
.limit(limit)
.collect(toList());
}
@POST
@Path("/topologies/{topologyId}/testcases/{testCaseId}/sources")
public Response addTestRunCaseSource(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId,
TopologyTestRunCaseSource testRunCaseSource) {
TopologySource topologySource = getAssociatedTopologySource(topologyId, testCaseId, testRunCaseSource.getSourceId());
testRunCaseSource.setVersionId(topologySource.getVersionId());
TopologyTestRunCaseSource addedCaseSource = catalogService.addTopologyTestRunCaseSource(testRunCaseSource);
return WSUtils.respondEntity(addedCaseSource, CREATED);
}
@PUT
@Path("/topologies/{topologyId}/testcases/{testCaseId}/sources/{id}")
public Response addOrUpdateTestRunCaseSource(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId,
@PathParam("id") Long id,
TopologyTestRunCaseSource testRunCaseSource) {
testRunCaseSource.setId(id);
testRunCaseSource.setTestCaseId(testCaseId);
TopologySource topologySource = getAssociatedTopologySource(topologyId, testCaseId, testRunCaseSource.getSourceId());
testRunCaseSource.setVersionId(topologySource.getVersionId());
TopologyTestRunCaseSource updatedCase = catalogService.addOrUpdateTopologyTestRunCaseSource(testRunCaseSource.getId(), testRunCaseSource);
return WSUtils.respondEntity(updatedCase, OK);
}
private TopologySource getAssociatedTopologySource(Long topologyId, Long testCaseId, Long topologySourceId) {
TopologyTestRunCase testCase = catalogService.getTopologyTestRunCase(topologyId, testCaseId);
if (testCase == null) {
throw EntityNotFoundException.byId("Topology test case with topology id " + topologyId +
" and test case id " + testCaseId);
}
TopologySource topologySource = catalogService.getTopologySource(topologyId, topologySourceId,
testCase.getVersionId());
if (topologySource == null) {
throw EntityNotFoundException.byId("Topology source with topology id " + topologyId +
" and version id " + testCase.getVersionId());
} else if (!testCase.getVersionId().equals(topologySource.getVersionId())) {
throw new IllegalStateException("Test case and topology source point to the different version id: "
+ "version id of test case: " + testCase.getVersionId() + " / "
+ "version id of topology source: " + topologySource.getVersionId());
}
return topologySource;
}
@GET
@Path("/topologies/{topologyId}/testcases/{testcaseId}/sources/{id}")
public Response getTestRunCaseSource(@PathParam("topologyId") Long topologyId,
@PathParam("testcaseId") Long testcaseId,
@PathParam("id") Long id) {
TopologyTestRunCaseSource testCaseSource = catalogService.getTopologyTestRunCaseSource(testcaseId, id);
if (testCaseSource == null) {
throw EntityNotFoundException.byId(Long.toString(id));
}
return WSUtils.respondEntity(testCaseSource, OK);
}
@GET
@Path("/topologies/{topologyId}/testcases/{testCaseId}/sources/topologysource/{sourceId}")
public Response getTestRunCaseSourceByTopologySource(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId,
@PathParam("sourceId") Long sourceId) {
TopologyTestRunCaseSource testCaseSource = catalogService.getTopologyTestRunCaseSourceBySourceId(testCaseId, sourceId);
if (testCaseSource == null) {
throw EntityNotFoundException.byId("test case id: " + testCaseId + " , topology source id: " + sourceId);
}
return WSUtils.respondEntity(testCaseSource, OK);
}
@GET
@Path("/topologies/{topologyId}/testcases/{testCaseId}/sources")
public Response listTestRunCaseSource(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId) {
Collection<TopologyTestRunCaseSource> sources = catalogService.listTopologyTestRunCaseSource(topologyId, testCaseId);
if (sources == null) {
throw EntityNotFoundException.byFilter("topologyId: " + topologyId + " / testCaseId: " + testCaseId);
}
return WSUtils.respondEntities(sources, OK);
}
@POST
@Path("/topologies/{topologyId}/testcases/{testCaseId}/sinks")
public Response addTestRunCaseSink(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId,
TopologyTestRunCaseSink testRunCaseSink) {
TopologySink topologySink = getAssociatedTopologySink(topologyId, testCaseId, testRunCaseSink.getSinkId());
testRunCaseSink.setVersionId(topologySink.getVersionId());
TopologyTestRunCaseSink addedCaseSink = catalogService.addTopologyTestRunCaseSink(testRunCaseSink);
return WSUtils.respondEntity(addedCaseSink, CREATED);
}
@PUT
@Path("/topologies/{topologyId}/testcases/{testCaseId}/sinks/{id}")
public Response addOrUpdateTestRunCaseSink(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId,
@PathParam("id") Long id,
TopologyTestRunCaseSink testRunCaseSink) {
testRunCaseSink.setId(id);
testRunCaseSink.setTestCaseId(testCaseId);
TopologySink topologySink = getAssociatedTopologySink(topologyId, testCaseId, testRunCaseSink.getSinkId());
testRunCaseSink.setVersionId(topologySink.getVersionId());
TopologyTestRunCaseSink updatedCase = catalogService.addOrUpdateTopologyTestRunCaseSink(testRunCaseSink.getId(), testRunCaseSink);
return WSUtils.respondEntity(updatedCase, OK);
}
private TopologySink getAssociatedTopologySink(Long topologyId, Long testCaseId, Long topologySinkId) {
TopologyTestRunCase testCase = catalogService.getTopologyTestRunCase(topologyId, testCaseId);
if (testCase == null) {
throw EntityNotFoundException.byId("Topology test case with topology id " + topologyId +
" and test case id " + testCaseId);
}
TopologySink topologySink = catalogService.getTopologySink(topologyId, topologySinkId,
testCase.getVersionId());
if (topologySink == null) {
throw EntityNotFoundException.byId("Topology sink with topology id " + topologyId +
" and version id " + testCase.getVersionId());
} else if (!testCase.getVersionId().equals(topologySink.getVersionId())) {
throw new IllegalStateException("Test case and topology sink point to the different version id: "
+ "version id of test case: " + testCase.getVersionId() + " / "
+ "version id of topology sink: " + topologySink.getVersionId());
}
return topologySink;
}
@GET
@Path("/topologies/{topologyId}/testcases/{testcaseId}/sinks/{id}")
public Response getTestRunCaseSink(@PathParam("topologyId") Long topologyId,
@PathParam("testcaseId") Long testcaseId,
@PathParam("id") Long id) {
TopologyTestRunCaseSink testCaseSink = catalogService.getTopologyTestRunCaseSink(testcaseId, id);
if (testCaseSink == null) {
throw EntityNotFoundException.byId(Long.toString(id));
}
return WSUtils.respondEntity(testCaseSink, OK);
}
@GET
@Path("/topologies/{topologyId}/testcases/{testCaseId}/sinks/topologysink/{sinkId}")
public Response getTestRunCaseSinkByTopologySink(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId,
@PathParam("sinkId") Long sinkId) {
TopologyTestRunCaseSink testCaseSink = catalogService.getTopologyTestRunCaseSinkBySinkId(testCaseId, sinkId);
if (testCaseSink == null) {
throw EntityNotFoundException.byId("test case id: " + testCaseId + " , topology source id: " + sinkId);
}
return WSUtils.respondEntity(testCaseSink, OK);
}
@GET
@Path("/topologies/{topologyId}/testcases/{testCaseId}/sinks")
public Response listTestRunCaseSink(@PathParam("topologyId") Long topologyId,
@PathParam("testCaseId") Long testCaseId) {
Collection<TopologyTestRunCaseSink> sources = catalogService.listTopologyTestRunCaseSink(topologyId, testCaseId);
if (sources == null) {
throw EntityNotFoundException.byFilter("topologyId: " + topologyId + " / testCaseId: " + testCaseId);
}
return WSUtils.respondEntities(sources, OK);
}
private static class SimplifiedTopologyTestRunHistory {
private Long id;
private Long topologyId;
private Long versionId;
private Boolean finished = false;
private Boolean success = false;
private Boolean matched = false;
private Long startTime;
private Long finishTime;
private Long timestamp;
SimplifiedTopologyTestRunHistory(TopologyTestRunHistory history) {
id = history.getId();
topologyId = history.getTopologyId();
versionId = history.getVersionId();
finished = history.getFinished();
success = history.getSuccess();
matched = history.getMatched();
startTime = history.getStartTime();
finishTime = history.getFinishTime();
timestamp = history.getTimestamp();
}
public Long getId() {
return id;
}
public Long getTopologyId() {
return topologyId;
}
public Long getVersionId() {
return versionId;
}
public Boolean getFinished() {
return finished;
}
public Boolean getSuccess() {
return success;
}
public Boolean getMatched() {
return matched;
}
public Long getStartTime() {
return startTime;
}
public Long getFinishTime() {
return finishTime;
}
public Long getTimestamp() {
return timestamp;
}
}
}
| hmcl/Streams | streams/service/src/main/java/com/hortonworks/streamline/streams/service/TopologyTestRunResource.java | Java | apache-2.0 | 26,797 |
package org.jruby.ext.ffi.jna;
import java.util.ArrayList;
import org.jruby.runtime.ThreadContext;
/**
* An invocation session.
* This provides post-invoke cleanup.
*/
final class Invocation {
private final ThreadContext context;
private ArrayList<Runnable> postInvokeList;
Invocation(ThreadContext context) {
this.context = context;
}
void finish() {
if (postInvokeList != null) {
for (Runnable r : postInvokeList) {
r.run();
}
}
}
void addPostInvoke(Runnable postInvoke) {
if (postInvokeList == null) {
postInvokeList = new ArrayList<Runnable>();
}
postInvokeList.add(postInvoke);
}
ThreadContext getThreadContext() {
return context;
}
}
| google-code/android-scripting | jruby/src/src/org/jruby/ext/ffi/jna/Invocation.java | Java | apache-2.0 | 796 |
package org.onetwo.ext.security.utils;
import java.util.Collection;
import org.onetwo.common.web.userdetails.UserDetail;
import org.onetwo.common.web.userdetails.UserRoot;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.User;
@SuppressWarnings("serial")
public class LoginUserDetails extends User implements UserDetail, /*SsoTokenable,*/ UserRoot {
final private long userId;
// private String token;
private String nickname;
private String avatar;
public LoginUserDetails(long userId, String username, String password,
Collection<? extends GrantedAuthority> authorities) {
super(username, password, authorities);
this.userId = userId;
}
public long getUserId() {
return userId;
}
@Override
public String getUserName() {
return getUsername();
}
@Override
public boolean isSystemRootUser() {
return userId==ROOT_USER_ID;
}
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public String getAvatar() {
return avatar;
}
public void setAvatar(String avatar) {
this.avatar = avatar;
}
/*public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}*/
}
| wayshall/onetwo | core/modules/security/src/main/java/org/onetwo/ext/security/utils/LoginUserDetails.java | Java | apache-2.0 | 1,289 |
/*
* Copyright (C) 2015 P100 OG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.shiftconnects.android.auth.example.util;
import com.google.gson.Gson;
import com.google.gson.JsonParseException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Type;
import retrofit.converter.ConversionException;
import retrofit.converter.Converter;
import retrofit.mime.MimeUtil;
import retrofit.mime.TypedInput;
import retrofit.mime.TypedOutput;
/**
* A {@link Converter} which uses GSON for serialization and deserialization of entities.
*
* @author Jake Wharton (jw@squareup.com)
*/
public class GsonConverter implements Converter {
private final Gson gson;
private String charset;
/**
* Create an instance using the supplied {@link Gson} object for conversion. Encoding to JSON and
* decoding from JSON (when no charset is specified by a header) will use UTF-8.
*/
public GsonConverter(Gson gson) {
this(gson, "UTF-8");
}
/**
* Create an instance using the supplied {@link Gson} object for conversion. Encoding to JSON and
* decoding from JSON (when no charset is specified by a header) will use the specified charset.
*/
public GsonConverter(Gson gson, String charset) {
this.gson = gson;
this.charset = charset;
}
@Override public Object fromBody(TypedInput body, Type type) throws ConversionException {
String charset = this.charset;
if (body.mimeType() != null) {
charset = MimeUtil.parseCharset(body.mimeType(), charset);
}
InputStreamReader isr = null;
try {
isr = new InputStreamReader(body.in(), charset);
return gson.fromJson(isr, type);
} catch (IOException e) {
throw new ConversionException(e);
} catch (JsonParseException e) {
throw new ConversionException(e);
} finally {
if (isr != null) {
try {
isr.close();
} catch (IOException ignored) {
}
}
}
}
@Override public TypedOutput toBody(Object object) {
try {
return new JsonTypedOutput(gson.toJson(object).getBytes(charset), charset);
} catch (UnsupportedEncodingException e) {
throw new AssertionError(e);
}
}
private static class JsonTypedOutput implements TypedOutput {
private final byte[] jsonBytes;
private final String mimeType;
JsonTypedOutput(byte[] jsonBytes, String encode) {
this.jsonBytes = jsonBytes;
this.mimeType = "application/json; charset=" + encode;
}
@Override public String fileName() {
return null;
}
@Override public String mimeType() {
return mimeType;
}
@Override public long length() {
return jsonBytes.length;
}
@Override public void writeTo(OutputStream out) throws IOException {
out.write(jsonBytes);
}
}
}
| shiftconnects/android-auth-manager | sample/src/main/java/com/shiftconnects/android/auth/example/util/GsonConverter.java | Java | apache-2.0 | 3,430 |
package acceptance;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import utils.CommandStatus;
import utils.TemporaryDigdagServer;
import java.nio.file.Path;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static utils.TestUtils.copyResource;
import static utils.TestUtils.main;
//
// This file doesn't contain normal case.
// It defined in another test.
//
public class ValidateProjectIT
{
@Rule
public TemporaryFolder folder = new TemporaryFolder();
@Rule
public TemporaryDigdagServer server = TemporaryDigdagServer.builder()
.build();
private Path config;
private Path projectDir;
@Before
public void setUp()
throws Exception
{
projectDir = folder.getRoot().toPath().resolve("foobar");
config = folder.newFile().toPath();
}
@Test
public void uploadInvalidTaskProject()
throws Exception
{
// Create new project
CommandStatus initStatus = main("init",
"-c", config.toString(),
projectDir.toString());
assertThat(initStatus.code(), is(0));
copyResource("acceptance/error_task/invalid_at_group.dig", projectDir.resolve("invalid_at_group.dig"));
// Push the project
CommandStatus pushStatus = main(
"push",
"--project", projectDir.toString(),
"foobar",
"-c", config.toString(),
"-e", server.endpoint());
assertThat(pushStatus.code(), is(1));
assertThat(pushStatus.errUtf8(), containsString("A task can't have more than one operator"));
}
@Test
public void uploadInvalidScheduleProject()
throws Exception
{
// Create new project
CommandStatus initStatus = main("init",
"-c", config.toString(),
projectDir.toString());
assertThat(initStatus.code(), is(0));
copyResource("acceptance/schedule/invalid_schedule.dig", projectDir.resolve("invalid_schedule.dig"));
// Push the project
CommandStatus pushStatus = main(
"push",
"--project", projectDir.toString(),
"foobar",
"-c", config.toString(),
"-e", server.endpoint());
assertThat(pushStatus.code(), is(1));
assertThat(pushStatus.errUtf8(), containsString("scheduler requires mm:ss format"));
}
}
| treasure-data/digdag | digdag-tests/src/test/java/acceptance/ValidateProjectIT.java | Java | apache-2.0 | 2,609 |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.api.kafka.model.connect.build;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.strimzi.api.kafka.model.UnknownPropertyPreserving;
import io.strimzi.crdgenerator.annotations.Description;
import lombok.EqualsAndHashCode;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
/**
* Abstract baseclass for different representations of connect build outputs, discriminated by {@link #getType() type}.
*/
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.EXISTING_PROPERTY,
property = "type"
)
@JsonSubTypes(
{
@JsonSubTypes.Type(value = DockerOutput.class, name = Output.TYPE_DOCKER),
@JsonSubTypes.Type(value = ImageStreamOutput.class, name = Output.TYPE_IMAGESTREAM)
}
)
@JsonInclude(JsonInclude.Include.NON_NULL)
@EqualsAndHashCode
public abstract class Output implements UnknownPropertyPreserving, Serializable {
private static final long serialVersionUID = 1L;
public static final String TYPE_DOCKER = "docker";
public static final String TYPE_IMAGESTREAM = "imagestream";
private String image;
private Map<String, Object> additionalProperties = new HashMap<>(0);
@Description("Output type. " +
"Must be either `docker` for pushing the newly build image to Docker compatible registry or `imagestream` for pushing the image to OpenShift ImageStream. " +
"Required.")
public abstract String getType();
@Description("The name of the image which will be built. " +
"Required")
@JsonProperty(required = true)
public String getImage() {
return image;
}
public void setImage(String image) {
this.image = image;
}
@Override
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@Override
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}
| scholzj/barnabas | api/src/main/java/io/strimzi/api/kafka/model/connect/build/Output.java | Java | apache-2.0 | 2,326 |
/**
*
*/
package com.transcend.rds.worker;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.hibernate.Session;
import org.slf4j.Logger;
import org.springframework.transaction.annotation.Transactional;
import com.msi.tough.cf.json.DatabagParameter;
import com.msi.tough.core.Appctx;
import com.msi.tough.core.HibernateUtil;
import com.msi.tough.core.JsonUtil;
import com.msi.tough.model.AccountBean;
import com.msi.tough.model.rds.RdsDbinstance;
import com.msi.tough.model.rds.RdsDbparameterGroup;
import com.msi.tough.model.rds.RdsParameter;
import com.msi.tough.query.ErrorResponse;
import com.msi.tough.query.QueryFaults;
import com.msi.tough.query.ServiceRequestContext;
import com.msi.tough.rds.ValidationManager;
import com.msi.tough.rds.json.RDSConfigDatabagItem;
import com.msi.tough.rds.json.RDSDatabag;
import com.msi.tough.rds.json.RDSParameterGroupDatabagItem;
import com.msi.tough.utils.AccountUtil;
import com.msi.tough.utils.ChefUtil;
import com.msi.tough.utils.ConfigurationUtil;
import com.msi.tough.utils.Constants;
import com.msi.tough.utils.RDSQueryFaults;
import com.msi.tough.utils.rds.InstanceEntity;
import com.msi.tough.utils.rds.ParameterGroupEntity;
import com.msi.tough.utils.rds.RDSUtilities;
import com.msi.tough.workflow.core.AbstractWorker;
import com.transcend.rds.message.ModifyDBParameterGroupActionMessage.ModifyDBParameterGroupActionRequestMessage;
import com.transcend.rds.message.ModifyDBParameterGroupActionMessage.ModifyDBParameterGroupActionResultMessage;
import com.transcend.rds.message.RDSMessage.Parameter;
/**
* @author tdhite
*/
public class ModifyDBParameterGroupActionWorker extends
AbstractWorker<ModifyDBParameterGroupActionRequestMessage, ModifyDBParameterGroupActionResultMessage> {
private final static Logger logger = Appctx
.getLogger(ModifyDBParameterGroupActionWorker.class.getName());
/**
* We need a local copy of this doWork to provide the transactional
* annotation. Transaction management is handled by the annotation, which
* can only be on a concrete class.
* @param req
* @return
* @throws Exception
*/
@Transactional
public ModifyDBParameterGroupActionResultMessage doWork(
ModifyDBParameterGroupActionRequestMessage req) throws Exception {
logger.debug("Performing work for ModifyDBParameterGroupAction.");
return super.doWork(req, getSession());
}
/**
* modifyDBParameterGroup ************************************************
* This Operation modifies the parameters associated with the named
* DBParameterGroup. It essentially adds/updates parameters associated with
* a DBParameterGroup If parameter exists then update if parameter doesn't
* exist then insert Request: DBParameterGroupName(R) List of Parameter
* records(R) Parameters: List of up to 20 parameter records Response:
* DBParameterGroup Exceptions: DBParameterGroupNotFound
* InvalidDBParameterGroupState Processing 1. Confirm that ParamaterGroup
* exists and is in the appropriate state 2. Update the Parameter records by
* inserting or updating new parameter 3. Return response
*/
@Override
protected ModifyDBParameterGroupActionResultMessage doWork0(ModifyDBParameterGroupActionRequestMessage req,
ServiceRequestContext context) throws Exception {
logger.debug("ModifyDBParameterGroup action is called.");
final Session sess = HibernateUtil.newSession();
final AccountBean ac = context.getAccountBean();
final ModifyDBParameterGroupActionResultMessage.Builder resp = ModifyDBParameterGroupActionResultMessage.newBuilder();
try {
sess.beginTransaction();
final long userId = ac.getId();
final String grpName = ValidationManager.validateIdentifier(
req.getDbParameterGroupName(), 255, true);
final List<Parameter> pList = req.getParametersList();
final int pListLen = pList.size();
logger.info("ModifyDBParameterGroup: " + " UserID = " + userId
+ " ParameterGroupName = " + grpName
+ " Total Number of Listed Parameters = " + pListLen);
if (grpName.equals("default.mysql5.5")) {
throw RDSQueryFaults
.InvalidClientTokenId("You do not have privilege to modify default DBParameterGroup.");
}
// check that DBParameterGroup exists
final RdsDbparameterGroup pGrpRec = ParameterGroupEntity
.getParameterGroup(sess, grpName, ac.getId());
if (pGrpRec == null) {
throw RDSQueryFaults.DBParameterGroupNotFound();
}
final Collection<RdsDbinstance> dbInstances = InstanceEntity
.selectDBInstancesByParameterGroup(sess, grpName, -1, ac);
// make sure that all DBInstances using this DBParameterGroup are in
// available state
for (final RdsDbinstance dbinstance : dbInstances) {
if (!dbinstance.getDbinstanceStatus().equals(
RDSUtilities.STATUS_AVAILABLE)) {
throw RDSQueryFaults
.InvalidDBParameterGroupState("Currently there are DBInstance(s) that use this DBParameterGroup and it"
+ " is not in available state.");
}
}
// reset the parameters in the DB
List<RdsParameter> forRebootPending = new LinkedList<RdsParameter>();
final String paramGrpFamily = pGrpRec.getDbparameterGroupFamily();
final AccountBean sac = AccountUtil.readAccount(sess, 1L);
for (final Parameter p : pList) {
final RdsParameter target = ParameterGroupEntity.getParameter(
sess, grpName, p.getParameterName(), userId);
if (target == null) {
throw RDSQueryFaults.InvalidParameterValue(p
.getParameterName() + " parameter does not exist.");
}
logger.debug("Current target parameter: " + target.toString());
if (!target.getIsModifiable()) {
throw RDSQueryFaults.InvalidParameterValue(p
.getParameterName()
+ " is not modifiable parameter.");
}
// TODO validate p.getParameterValue along with
// p.getParameterName to ensure the value is allowed
else if (p.getApplyMethod().equals(
RDSUtilities.PARM_APPMETHOD_IMMEDIATE)) {
if (target.getApplyType().equals(
RDSUtilities.PARM_APPTYPE_STATIC)) {
throw QueryFaults
.InvalidParameterCombination(target
.getParameterName()
+ " is not dynamic. You can only"
+ " use \"pending-reboot\" as valid ApplyMethod for this parameter.");
}
target.setParameterValue(p.getParameterValue());
target.setSource(Constants.USER);
sess.save(target);
} else if (p.getApplyMethod().equals(
RDSUtilities.PARM_APPMETHOD_PENDING)) {
final RdsParameter temp = new RdsParameter();
temp.setParameterName(p.getParameterName());
temp.setApplyMethod(p.getApplyMethod());
temp.setParameterValue(p.getParameterValue());
forRebootPending.add(temp);
}
}
// Delete and regenerate the Databag
logger.debug("There are " + dbInstances.size()
+ " databags to modify.");
for (final RdsDbinstance instance : dbInstances) {
logger.debug("Currently updating the databag for DBInstance "
+ instance.getDbinstanceId());
final String databagName = "rds-" + ac.getId() + "-"
+ instance.getDbinstanceId();
logger.debug("Deleting the databag " + databagName);
ChefUtil.deleteDatabagItem(databagName, "config");
final String postWaitUrl = (String) ConfigurationUtil
.getConfiguration(Arrays.asList(new String[] {
"TRANSCEND_URL", instance.getAvailabilityZone() }));
final String servletUrl = (String) ConfigurationUtil
.getConfiguration(Arrays.asList(new String[] {
"SERVLET_URL", instance.getAvailabilityZone() }));
final RDSConfigDatabagItem configDataBagItem = new RDSConfigDatabagItem(
"config", instance.getAllocatedStorage().toString(),
instance.getMasterUsername(),
instance.getMasterUserPassword(),
instance.getAutoMinorVersionUpgrade(),
instance.getEngine(), instance.getEngineVersion(),
instance.getDbName(), instance
.getBackupRetentionPeriod().toString(),
instance.getPreferredBackupWindow(),
instance.getPreferredMaintenanceWindow(), instance
.getPort().toString(), postWaitUrl, servletUrl,
instance.getDbinstanceId(), "rds." + ac.getId() + "."
+ instance.getDbinstanceId(), ac.getId(), instance.getDbinstanceClass(), "false");
final RDSParameterGroupDatabagItem parameterGroupDatabagItem = new RDSParameterGroupDatabagItem(
"parameters", pGrpRec);
parameterGroupDatabagItem.getParameters().remove("read_only");
parameterGroupDatabagItem.getParameters().put(
"read_only",
DatabagParameter.factory("boolean",
"" + instance.getRead_only(), true, "dynamic"));
parameterGroupDatabagItem.getParameters().remove("port");
parameterGroupDatabagItem.getParameters().put(
"port",
DatabagParameter.factory("integer",
"" + instance.getPort(), false, "static"));
final RDSDatabag bag = new RDSDatabag(configDataBagItem,
parameterGroupDatabagItem);
logger.debug("Databag: "
+ JsonUtil.toJsonPrettyPrintString(bag));
logger.debug("Regenerating the databag " + databagName);
ChefUtil.createDatabagItem(databagName, "config", bag.toJson());
}
if (forRebootPending != null && forRebootPending.size() > 0) {
// forRebootPending is now a list of static parameters and
// dynamic parameters with pending-reboot ApplyMethod
forRebootPending = ParameterGroupEntity
.modifyParamGroupWithPartialList(sess, pGrpRec,
forRebootPending, userId);
// code below may need to be rewritten for better performance;
// Hibernate may be useful to improve the snippet below
for (final RdsDbinstance instance : dbInstances) {
final List<RdsParameter> alreadyPending = instance
.getPendingRebootParameters();
if (alreadyPending == null || alreadyPending.size() == 0) {
instance.setPendingRebootParameters(forRebootPending);
// instance.setDbinstanceStatus(RDSUtilities.STATUS_MODIFYING);
sess.save(instance);
} else {
for (final RdsParameter newParam : forRebootPending) {
boolean found = false;
int i = 0;
while (!found && i < alreadyPending.size()) {
if (alreadyPending.get(i).getParameterName()
.equals(newParam.getParameterName())) {
alreadyPending.get(i).setParameterValue(
newParam.getParameterValue());
found = true;
}
++i;
}
if (!found) {
alreadyPending.add(newParam);
}
}
}
}
}
// build response document - returns DBParameterGroupName
resp.setDbParameterGroupName(grpName);
logger.debug("Committing all the changes...");
sess.getTransaction().commit();
} catch (final ErrorResponse rde) {
sess.getTransaction().rollback();
throw rde;
} catch (final Exception e) {
e.printStackTrace();
sess.getTransaction().rollback();
final String msg = "CreateInstance: Class: " + e.getClass()
+ "Msg:" + e.getMessage();
logger.error(msg);
throw RDSQueryFaults.InternalFailure();
} finally {
sess.close();
}
return resp.buildPartial();
}
}
| TranscendComputing/TopStackRDS | src/com/transcend/rds/worker/ModifyDBParameterGroupActionWorker.java | Java | apache-2.0 | 11,169 |
/*
* Copyright (C) 2014 Johannes Donath <johannesd@evil-co.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.evilco.emulator.extension.chip8;
import org.evilco.emulator.ui_old.extension.AbstractEmulatorExtension;
import org.evilco.emulator.ui_old.extension.InterfaceExtensionManager;
/**
* @author Johannes Donath <johannesd@evil-co.com>
* @copyright Copyright (C) 2014 Evil-Co <http://www.evil-co.com>
*/
public class Chip8Extension extends AbstractEmulatorExtension {
/**
* {@inheritDoc}
*/
@Override
public String getIdentifier () {
return "org.evilco.emulator.extension.chip8";
}
/**
* {@inheritDoc}
*/
@Override
public void onEnable (InterfaceExtensionManager extensionManager) {
super.onEnable (extensionManager);
extensionManager.registerExtension (this, "c8", Chip8Emulator.class);
}
} | Evil-Co-Legacy/CyborgEmulator | extension/chip8/src/main/java/org/evilco/emulator/extension/chip8/Chip8Extension.java | Java | apache-2.0 | 1,398 |
package com.fordprog.matrix.interpreter.type;
public enum Type {
RATIONAL,
MATRIX,
FUNCTION,
VOID
}
| daergoth/MatrixC | src/main/java/com/fordprog/matrix/interpreter/type/Type.java | Java | apache-2.0 | 115 |
/*
* Copyright 2016 peter.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package onl.area51.filesystem.io;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Map;
import org.kohsuke.MetaInfServices;
/**
* A flat FileSystem which locally matches it's structure
*/
@MetaInfServices(FileSystemIO.class)
public class Flat
extends LocalFileSystemIO
{
public Flat( Path basePath,
Map<String, ?> env )
{
super( basePath, env );
}
@Override
protected String getPath( char[] path )
throws IOException
{
return String.valueOf( path );
}
}
| peter-mount/filesystem | filesystem-core/src/main/java/onl/area51/filesystem/io/Flat.java | Java | apache-2.0 | 1,156 |
/*
* @author Flavio Keller
*
* Copyright 2014 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.sna.constants;
/**
* Enumeration for the different classes that can occur
* when running a SNA method algorithm
*/
public enum SNAClassNames {
DEGREE("Degree"), PAGERANK("PageRank"), CLOSENESS("Closeness"), BETWEENNESS("Betweenness"), PATH("Path"), LOCALCLUSTERCOEFFICIENT(
"LocalClusterCoefficient"), TRIADCENSUS("Triad Census"), LABELPROPAGATION(
"Label Propagation") ;
private final String className;
SNAClassNames(String name) {
this.className = name;
}
}
| fkzrh/signal-collect-sna | src/main/java/com/signalcollect/sna/constants/SNAClassNames.java | Java | apache-2.0 | 1,155 |
/***************************************************************************
* Copyright 2015 Kieker Project (http://kieker-monitoring.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
package kieker.test.common.junit.record.flow.trace.concurrency.monitor;
import java.nio.ByteBuffer;
import org.junit.Assert;
import org.junit.Test;
import kieker.common.record.flow.trace.concurrency.monitor.MonitorNotifyEvent;
import kieker.common.util.registry.IRegistry;
import kieker.common.util.registry.Registry;
import kieker.test.common.junit.AbstractKiekerTest;
/**
* @author Jan Waller
*
* @since 1.8
*/
public class TestMonitorNotifyEvent extends AbstractKiekerTest {
private static final long TSTAMP = 987998L;
private static final long TRACE_ID = 23444L;
private static final int ORDER_INDEX = 234;
private static final int LOCK_ID = 13;
/**
* Default constructor.
*/
public TestMonitorNotifyEvent() {
// empty default constructor
}
/**
* Tests the constructor and toArray(..) methods of {@link MonitorNotifyEvent}.
*
* Assert that a record instance event1 equals an instance event2 created by serializing event1 to an array event1Array
* and using event1Array to construct event2. This ignores a set loggingTimestamp!
*/
@Test
public void testSerializeDeserializeEquals() {
final MonitorNotifyEvent event1 = new MonitorNotifyEvent(TSTAMP, TRACE_ID, ORDER_INDEX, LOCK_ID);
Assert.assertEquals("Unexpected timestamp", TSTAMP, event1.getTimestamp());
Assert.assertEquals("Unexpected trace ID", TRACE_ID, event1.getTraceId());
Assert.assertEquals("Unexpected order index", ORDER_INDEX, event1.getOrderIndex());
Assert.assertEquals("Unexpected lock id", LOCK_ID, event1.getLockId());
final Object[] event1Array = event1.toArray();
final MonitorNotifyEvent event2 = new MonitorNotifyEvent(event1Array);
Assert.assertEquals(event1, event2);
Assert.assertEquals(0, event1.compareTo(event2));
}
/**
* Tests the constructor and writeBytes(..) methods of {@link MonitorNotifyEvent}.
*/
@Test
public void testSerializeDeserializeBinaryEquals() {
final MonitorNotifyEvent event1 = new MonitorNotifyEvent(TSTAMP, TRACE_ID, ORDER_INDEX, LOCK_ID);
Assert.assertEquals("Unexpected timestamp", TSTAMP, event1.getTimestamp());
Assert.assertEquals("Unexpected trace ID", TRACE_ID, event1.getTraceId());
Assert.assertEquals("Unexpected order index", ORDER_INDEX, event1.getOrderIndex());
Assert.assertEquals("Unexpected lock id", LOCK_ID, event1.getLockId());
final IRegistry<String> stringRegistry = new Registry<String>();
final ByteBuffer buffer = ByteBuffer.allocate(event1.getSize());
event1.writeBytes(buffer, stringRegistry);
buffer.flip();
final MonitorNotifyEvent event2 = new MonitorNotifyEvent(buffer, stringRegistry);
Assert.assertEquals(event1, event2);
Assert.assertEquals(0, event1.compareTo(event2));
}
}
| HaStr/kieker | kieker-common/test/kieker/test/common/junit/record/flow/trace/concurrency/monitor/TestMonitorNotifyEvent.java | Java | apache-2.0 | 3,493 |
/**
* Copyright © 2013 enioka. All rights reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.enioka.jqm.api.test;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.Test;
import com.enioka.jqm.api.JqmClientFactory;
import com.enioka.jqm.api.Query;
import com.enioka.jqm.api.Query.Sort;
import com.enioka.jqm.api.State;
import com.enioka.jqm.jdbc.Db;
import com.enioka.jqm.jdbc.DbConn;
import com.enioka.jqm.model.Instruction;
import com.enioka.jqm.model.JobDef;
import com.enioka.jqm.model.JobDef.PathType;
import com.enioka.jqm.model.JobInstance;
import com.enioka.jqm.model.Queue;
/**
* Simple tests for checking query syntax (no data)
*/
public class BasicTest
{
private static Logger jqmlogger = Logger.getLogger(BasicTest.class);
@Test
public void testChain()
{
// No exception allowed!
JqmClientFactory.getClient().getQueues();
jqmlogger.info("q1");
JqmClientFactory.getClient().getQueues();
jqmlogger.info("q2");
}
@Test
public void testQuery()
{
Query q = new Query("toto", null);
q.setInstanceApplication("marsu");
q.setInstanceKeyword2("pouet");
q.setInstanceModule("module");
q.setParentId(12);
q.setJobInstanceId(132);
q.setQueryLiveInstances(true);
q.setJobDefKeyword2("pouet2");
JqmClientFactory.getClient().getJobs(q);
}
@Test
public void testQueryDate()
{
Query q = new Query("toto", null);
q.setInstanceApplication("marsu");
q.setInstanceKeyword2("pouet");
q.setInstanceModule("module");
q.setParentId(12);
q.setJobInstanceId(132);
q.setQueryLiveInstances(true);
q.setEnqueuedBefore(Calendar.getInstance());
q.setEndedAfter(Calendar.getInstance());
q.setBeganRunningAfter(Calendar.getInstance());
q.setBeganRunningBefore(Calendar.getInstance());
q.setEnqueuedAfter(Calendar.getInstance());
q.setEnqueuedBefore(Calendar.getInstance());
q.setJobDefKeyword2("pouet2");
JqmClientFactory.getClient().getJobs(q);
}
@Test
public void testQueryStatusOne()
{
Query q = new Query("toto", null);
q.setQueryLiveInstances(true);
q.setInstanceApplication("marsu");
q.addStatusFilter(State.CRASHED);
JqmClientFactory.getClient().getJobs(q);
}
@Test
public void testQueryStatusTwo()
{
Query q = new Query("toto", null);
q.setQueryLiveInstances(true);
q.setInstanceApplication("marsu");
q.addStatusFilter(State.CRASHED);
q.addStatusFilter(State.HOLDED);
JqmClientFactory.getClient().getJobs(q);
}
@Test
public void testFluentQuery()
{
Query q = new Query("toto", null);
q.setQueryLiveInstances(true);
q.setInstanceApplication("marsu");
q.addStatusFilter(State.CRASHED);
q.addStatusFilter(State.HOLDED);
JqmClientFactory.getClient().getJobs(Query.create().addStatusFilter(State.RUNNING).setApplicationName("MARSU"));
}
@Test
public void testQueryPercent()
{
JqmClientFactory.getClient().getJobs(Query.create().setApplicationName("%TEST"));
}
@Test
public void testQueryNull()
{
JqmClientFactory.getClient().getJobs(new Query("", null));
}
@Test
public void testQueueNameId()
{
Query.create().setQueueName("test").run();
Query.create().setQueueId(12).run();
}
@Test
public void testPaginationWithFilter()
{
Query.create().setQueueName("test").setPageSize(10).run();
Query.create().setQueueId(12).setPageSize(10).run();
}
@Test
public void testUsername()
{
Query.create().setUser("test").setPageSize(10).run();
}
@Test
public void testSortHistory()
{
Query.create().setUser("test").setPageSize(10).addSortAsc(Sort.APPLICATIONNAME).addSortDesc(Sort.DATEATTRIBUTION)
.addSortAsc(Sort.DATEEND).addSortDesc(Sort.DATEENQUEUE).addSortAsc(Sort.ID).addSortDesc(Sort.QUEUENAME)
.addSortAsc(Sort.STATUS).addSortDesc(Sort.USERNAME).addSortAsc(Sort.PARENTID).run();
}
@Test
public void testSortJi()
{
Query.create().setQueryHistoryInstances(false).setQueryLiveInstances(true).setUser("test").addSortAsc(Sort.APPLICATIONNAME)
.addSortDesc(Sort.DATEATTRIBUTION).addSortDesc(Sort.DATEENQUEUE).addSortAsc(Sort.ID).addSortDesc(Sort.QUEUENAME)
.addSortAsc(Sort.STATUS).addSortDesc(Sort.USERNAME).addSortAsc(Sort.PARENTID).run();
}
@Test
public void testOnlyQueue()
{
Query.create().setQueryLiveInstances(true).setQueryHistoryInstances(false).setUser("test").run();
}
@Test
public void testBug159()
{
Query.create().setJobInstanceId(1234).setQueryLiveInstances(true).setQueryHistoryInstances(false).setPageSize(15).setFirstRow(0)
.run();
}
@Test
public void testBug292()
{
Query.create().addSortDesc(Query.Sort.ID).setQueueName("QBATCH").setQueryHistoryInstances(true).setQueryLiveInstances(true).run();
}
@Test
public void testBug305()
{
Properties p = new Properties();
p.putAll(Db.loadProperties());
Db db = new Db(p);
DbConn cnx = null;
try
{
cnx = db.getConn();
int qId = Queue.create(cnx, "q1", "q1 description", true);
int jobDefdId = JobDef.create(cnx, "test description", "class", null, "jar", qId, 1, "appName", null, null, null, null, null,
false, null, PathType.FS);
JobInstance.enqueue(cnx, com.enioka.jqm.model.State.RUNNING, qId, jobDefdId, null, null, null, null, null, null, null, null,
null, false, false, null, 1, Instruction.RUN, new HashMap<String, String>());
JobInstance.enqueue(cnx, com.enioka.jqm.model.State.RUNNING, qId, jobDefdId, null, null, null, null, null, null, null, null,
null, false, false, null, 1, Instruction.RUN, new HashMap<String, String>());
cnx.commit();
Properties p2 = new Properties();
p2.put("com.enioka.jqm.jdbc.contextobject", db);
List<com.enioka.jqm.api.JobInstance> res = JqmClientFactory.getClient("test", p2, false)
.getJobs(Query.create().setQueryHistoryInstances(false).setQueryLiveInstances(true).addSortDesc(Query.Sort.ID)
.setPageSize(1).setApplicationName("appName"));
Assert.assertEquals(1, res.size());
}
finally
{
if (cnx != null)
{
cnx.closeQuietly(cnx);
}
}
}
}
| enioka/jqm | jqm-all/jqm-client/jqm-api-client-jdbc/src/test/java/com/enioka/jqm/api/test/BasicTest.java | Java | apache-2.0 | 7,479 |
/**
* Copyright (c) 2013-2020 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.iterator;
import java.util.AbstractMap;
import java.util.Map;
import java.util.Map.Entry;
/**
*
* @author Nikita Koksharov
*
* @param <V> value type
*/
public abstract class RedissonBaseMapIterator<V> extends BaseIterator<V, Entry<Object, Object>> {
@SuppressWarnings("unchecked")
protected V getValue(Map.Entry<Object, Object> entry) {
return (V) new AbstractMap.SimpleEntry(entry.getKey(), entry.getValue()) {
@Override
public Object setValue(Object value) {
return put(entry, value);
}
};
}
protected abstract Object put(Entry<Object, Object> entry, Object value);
}
| mrniko/redisson | redisson/src/main/java/org/redisson/iterator/RedissonBaseMapIterator.java | Java | apache-2.0 | 1,297 |
package org.artifactory.ui.rest.resource.home;
import org.artifactory.api.security.AuthorizationService;
import org.artifactory.ui.rest.resource.BaseResource;
import org.artifactory.ui.rest.service.general.GeneralServiceFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.annotation.security.RolesAllowed;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
* @author Chen keinan
*/
@Path("home")
@RolesAllowed({AuthorizationService.ROLE_ADMIN, AuthorizationService.ROLE_USER})
@Component
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
public class HomeResource extends BaseResource {
@Autowired
GeneralServiceFactory generalFactory;
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getHomeData()
throws Exception {
return runService(generalFactory.getHomePage());
}
}
| alancnet/artifactory | web/rest-ui/src/main/java/org/artifactory/ui/rest/resource/home/HomeResource.java | Java | apache-2.0 | 1,122 |
/*
* Licensed to Cloudkick, Inc ('Cloudkick') under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Cloudkick licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudkick;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.View;
import android.widget.EditText;
import android.widget.RelativeLayout;
import android.widget.Toast;
public class LoginActivity extends Activity {
private static final int SETTINGS_ACTIVITY_ID = 0;
RelativeLayout loginView = null;
private String user = null;
private String pass = null;
private ProgressDialog progress = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.login);
setTitle("Cloudkick for Android");
findViewById(R.id.button_login).setOnClickListener(new LoginClickListener());
findViewById(R.id.button_signup).setOnClickListener(new SignupClickListener());
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == SETTINGS_ACTIVITY_ID) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(LoginActivity.this);
if (prefs.getString("editKey", "").equals("") && prefs.getString("editSecret", "").equals("")) {
finish();
}
else {
Intent result = new Intent();
result.putExtra("login", true);
setResult(Activity.RESULT_OK, result);
finish();
}
}
}
private class LoginClickListener implements View.OnClickListener {
public void onClick(View v) {
new AccountLister().execute();
}
}
private class SignupClickListener implements View.OnClickListener {
public void onClick(View v) {
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://www.cloudkick.com/pricing/")));
}
}
private class AccountLister extends AsyncTask<Void, Void, ArrayList<String>>{
private Integer statusCode = null;
@Override
protected void onPreExecute() {
user = ((EditText) findViewById(R.id.input_email)).getText().toString();
pass = ((EditText) findViewById(R.id.input_password)).getText().toString();
progress = ProgressDialog.show(LoginActivity.this, "", "Logging In...", true);
}
@Override
protected ArrayList<String> doInBackground(Void...voids) {
ArrayList<String> accounts = new ArrayList<String>();
try {
HttpClient client = new DefaultHttpClient();
HttpPost post = new HttpPost("https://www.cloudkick.com/oauth/list_accounts/");
ArrayList<NameValuePair> values = new ArrayList<NameValuePair>(2);
values.add(new BasicNameValuePair("user", user));
values.add(new BasicNameValuePair("password", pass));
post.setEntity(new UrlEncodedFormEntity(values));
HttpResponse response = client.execute(post);
statusCode = response.getStatusLine().getStatusCode();
InputStream is = response.getEntity().getContent();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
while ((line = rd.readLine()) != null) {
accounts.add(line);
Log.i("LoginActivity", line);
}
}
catch (Exception e) {
e.printStackTrace();
statusCode = 0;
}
return accounts;
}
@Override
protected void onPostExecute(ArrayList<String> accounts) {
switch (statusCode) {
case 200:
if (accounts.size() == 1) {
new KeyRetriever().execute(accounts.get(0));
}
else {
String[] tmpAccountArray = new String[accounts.size()];
final String[] accountArray = accounts.toArray(tmpAccountArray);
AlertDialog.Builder builder = new AlertDialog.Builder(LoginActivity.this);
builder.setTitle("Select an Account");
builder.setItems(accountArray, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
new KeyRetriever().execute(accountArray[item]);
}
});
AlertDialog selectAccount = builder.create();
selectAccount.show();
}
break;
case 400:
progress.dismiss();
if (accounts.get(0).equals("You have enabled multi factor authentication for this account. To access the API key list, please visit the website.")) {
AlertDialog.Builder builder = new AlertDialog.Builder(LoginActivity.this);
builder.setTitle("MFA is Enabled");
String mfaMessage = ("You appear to have multi-factor authentication enabled on your account. "
+ "You will need to manually create an API key with read permissions in the "
+ "web interface, then enter it directly in the settings panel.");
builder.setMessage(mfaMessage);
builder.setPositiveButton("Settings", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
Intent settingsActivity = new Intent(getBaseContext(), Preferences.class);
startActivityForResult(settingsActivity, SETTINGS_ACTIVITY_ID);
}
});
AlertDialog mfaDialog = builder.create();
mfaDialog.show();
}
else {
Toast.makeText(LoginActivity.this, "Invalid Username or Password", Toast.LENGTH_LONG).show();
}
break;
default:
progress.dismiss();
Toast.makeText(LoginActivity.this, "An Error Occurred Retrieving Your Accounts", Toast.LENGTH_LONG).show();
};
}
}
private class KeyRetriever extends AsyncTask<String, Void, String[]>{
private Integer statusCode = null;
@Override
protected String[] doInBackground(String...accts) {
Log.i("LoginActivity", "Selected Account: " + accts[0]);
String[] creds = new String[2];
try {
HttpClient client = new DefaultHttpClient();
HttpPost post = new HttpPost("https://www.cloudkick.com/oauth/create_consumer/");
ArrayList<NameValuePair> values = new ArrayList<NameValuePair>(2);
values.add(new BasicNameValuePair("user", user));
values.add(new BasicNameValuePair("password", pass));
values.add(new BasicNameValuePair("account", accts[0]));
values.add(new BasicNameValuePair("system", "Cloudkick for Android"));
values.add(new BasicNameValuePair("perm_read", "True"));
values.add(new BasicNameValuePair("perm_write", "False"));
values.add(new BasicNameValuePair("perm_execute", "False"));
post.setEntity(new UrlEncodedFormEntity(values));
HttpResponse response = client.execute(post);
statusCode = response.getStatusLine().getStatusCode();
Log.i("LoginActivity", "Return Code: " + statusCode);
InputStream is = response.getEntity().getContent();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
for (int i = 0; i < 2; i++) {
line = rd.readLine();
if (line == null) {
return creds;
}
creds[i] = line;
}
}
catch (Exception e) {
statusCode = 0;
}
return creds;
}
@Override
protected void onPostExecute(String[] creds) {
progress.dismiss();
if (statusCode != 200) {
// Show short error messages - this is a dirty hack
if (creds[0] != null && creds[0].startsWith("User with role")) {
Toast.makeText(LoginActivity.this, creds[0], Toast.LENGTH_LONG).show();
}
else {
Toast.makeText(LoginActivity.this, "An Error Occurred on Login", Toast.LENGTH_LONG).show();
return;
}
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(LoginActivity.this);
SharedPreferences.Editor editor = prefs.edit();
editor.putString("editKey", creds[0]);
editor.putString("editSecret", creds[1]);
editor.commit();
Intent result = new Intent();
result.putExtra("login", true);
setResult(Activity.RESULT_OK, result);
LoginActivity.this.finish();
}
}
}
| cloudkick/cloudkick-android | src/com/cloudkick/LoginActivity.java | Java | apache-2.0 | 9,030 |
package ai.api.test;
/***********************************************************************************************************************
*
* API.AI Java SDK - client-side libraries for API.AI
* =================================================
*
* Copyright (C) 2014 by Speaktoit, Inc. (https://www.speaktoit.com)
* https://www.api.ai
*
***********************************************************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
***********************************************************************************************************************/
public class ProtocolProdTest extends ProtocolTestBase {
// Testing keys
protected static final String ACCESS_TOKEN = "3485a96fb27744db83e78b8c4bc9e7b7";
protected String getAccessToken() {
return ACCESS_TOKEN;
}
@Override
protected String getSecondAccessToken() {
return "968235e8e4954cf0bb0dc07736725ecd";
}
protected String getRuAccessToken(){
return "07806228a357411d83064309a279c7fd";
}
protected String getBrAccessToken(){
// TODO
return "";
}
protected String getPtBrAccessToken(){
return "42db6ad6a51c47088318a8104833b66c";
}
@Override
protected String getJaAccessToken() {
// TODO
return "";
}
}
| deternan/Weather-line-bot | libai/src/test/java/ai/api/test/ProtocolProdTest.java | Java | apache-2.0 | 1,887 |
package org.drools.persistence;
import javax.transaction.xa.XAResource;
public interface PersistenceManager {
XAResource getXAResource();
Transaction getTransaction();
void save();
void load();
} | bobmcwhirter/drools | drools-core/src/main/java/org/drools/persistence/PersistenceManager.java | Java | apache-2.0 | 232 |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.handler.codec.serialization;
import java.util.concurrent.Executor;
import org.jboss.netty.channel.ChannelFactory;
import org.jboss.netty.channel.socket.oio.OioClientSocketChannelFactory;
import org.jboss.netty.channel.socket.oio.OioServerSocketChannelFactory;
public class OioOioSocketCompatibleObjectStreamEchoTest extends AbstractSocketCompatibleObjectStreamEchoTest {
@Override
protected ChannelFactory newClientSocketChannelFactory(Executor executor) {
return new OioClientSocketChannelFactory(executor);
}
@Override
protected ChannelFactory newServerSocketChannelFactory(Executor executor) {
return new OioServerSocketChannelFactory(executor, executor);
}
}
| CliffYuan/netty | src/test/java/org/jboss/netty/handler/codec/serialization/OioOioSocketCompatibleObjectStreamEchoTest.java | Java | apache-2.0 | 1,368 |
package com.coolweather.android;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.Build;
import android.preference.PreferenceManager;
import android.support.v4.view.GravityCompat;
import android.support.v4.view.ScrollingView;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.coolweather.android.gson.Forecast;
import com.coolweather.android.gson.Weather;
import com.coolweather.android.service.AutoUpdateService;
import com.coolweather.android.util.HttpUtil;
import com.coolweather.android.util.Utility;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class WeatherActivity extends AppCompatActivity {
private ScrollView weatherLayout;
private TextView titleCity;
private TextView titleUpdateTime;
private TextView degreeText;
private TextView weatherInfoText;
private LinearLayout forecastLayout;
private TextView aqiText;
private TextView pm25Text;
private TextView comfortText;
private TextView carWashText;
private TextView sportText;
private ImageView bingPicImg;
public SwipeRefreshLayout swipeRefreshLayout;
private String mWeatherId;
public DrawerLayout drawerLayout;
private Button navButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (Build.VERSION.SDK_INT >= 21) {
View decorView = getWindow().getDecorView();
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
getWindow().setStatusBarColor(Color.TRANSPARENT);
}
setContentView(R.layout.activity_weather);
//初始条件
weatherLayout = (ScrollView) findViewById(R.id.weather_layout);
titleCity = (TextView) findViewById(R.id.title_city);
titleUpdateTime = (TextView) findViewById(R.id.title_update_time);
degreeText = (TextView) findViewById(R.id.degree_text);
weatherInfoText = (TextView) findViewById(R.id.weather_info_text);
forecastLayout = (LinearLayout) findViewById(R.id.forecast_layout);
aqiText = (TextView) findViewById(R.id.aqi_text);
pm25Text = (TextView) findViewById(R.id.pm25_text);
comfortText = (TextView) findViewById(R.id.comfort_text);
carWashText = (TextView) findViewById(R.id.car_wash_text);
sportText = (TextView) findViewById(R.id.sport_text);
bingPicImg = (ImageView) findViewById(R.id.bing_pic_img);
drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
navButton = (Button) findViewById(R.id.nav_button);
swipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.swipe_refresh);
swipeRefreshLayout.setColorSchemeResources(R.color.colorTopic);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String weatherString = prefs.getString("weather", null);
if (weatherString != null) {
//有缓存时直接解析天气数据
Weather weather = Utility.handleWeatherResponse(weatherString);
mWeatherId = weather.basic.weatherId;
showWeatherInfo(weather);
} else {
//无缓存时去服务器查询天气
mWeatherId = getIntent().getStringExtra("weather_id");
String weatherId = getIntent().getStringExtra("weather_id");
weatherLayout.setVisibility(View.INVISIBLE);
requestWeather(weatherId);
}
navButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
drawerLayout.openDrawer(GravityCompat.START);
}
});
swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
requestWeather(mWeatherId);
}
});
String bingPic = prefs.getString("bing_pic", null);
if (bingPic != null) {
Glide.with(this).load(bingPic).into(bingPicImg);
} else {
loadBingPic();
}
}
/**
* 根据天气ID请求城市天气信息
*/
public void requestWeather(final String weatherId) {
String weatherUtl = "http://guolin.tech/api/weather?cityid=" + weatherId + "&key=04ae9fa43fb341b596f719aa6d6babda";
HttpUtil.sendOkHttpRequest(weatherUtl, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show();
swipeRefreshLayout.setRefreshing(false);
}
});
}
@Override
public void onResponse(Call call, Response response) throws IOException {
final String responseText = response.body().string();
final Weather weather = Utility.handleWeatherResponse(responseText);
runOnUiThread(new Runnable() {
@Override
public void run() {
if (weather != null && "ok".equals(weather.status)) {
SharedPreferences.Editor editor = PreferenceManager
.getDefaultSharedPreferences(WeatherActivity.this).edit();
editor.putString("weather", responseText);
editor.apply();
Toast.makeText(WeatherActivity.this, "成功更新最新天气", Toast.LENGTH_SHORT).show();
showWeatherInfo(weather);
} else {
Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show();
}
swipeRefreshLayout.setRefreshing(false);
}
});
}
});
loadBingPic();
}
private void loadBingPic() {
String requestBingPic = "http://guolin.tech/api/bing_pic";
HttpUtil.sendOkHttpRequest(requestBingPic, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
}
@Override
public void onResponse(Call call, Response response) throws IOException {
final String bingPic = response.body().string();
SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit();
editor.putString("bing_pic", bingPic);
editor.apply();
runOnUiThread(new Runnable() {
@Override
public void run() {
Glide.with(WeatherActivity.this).load(bingPic).into(bingPicImg);
}
});
}
});
}
/**
* 处理并展示Weather实体类中的数据
*/
private void showWeatherInfo(Weather weather) {
String cityName = weather.basic.cityName;
String updateTime = "更新时间: " + weather.basic.update.updateTime.split(" ")[1];
String degree = weather.now.temperature + "ºC";
String weatherInfo = weather.now.more.info;
titleCity.setText(cityName);
titleUpdateTime.setText(updateTime);
degreeText.setText(degree);
weatherInfoText.setText(weatherInfo);
forecastLayout.removeAllViews();
for (Forecast forecast : weather.forecastList) {
View view = LayoutInflater.from(this).inflate(R.layout.forecast_item, forecastLayout, false);
TextView dateText = (TextView) view.findViewById(R.id.date_text);
TextView infoText = (TextView) view.findViewById(R.id.info_text);
TextView maxText = (TextView) view.findViewById(R.id.max_text);
TextView minText = (TextView) view.findViewById(R.id.min_text);
dateText.setText(forecast.date);
infoText.setText(forecast.more.info);
maxText.setText(forecast.temperature.max);
minText.setText(forecast.temperature.min);
forecastLayout.addView(view);
}
if (weather.aqi != null) {
aqiText.setText(weather.aqi.city.aqi);
pm25Text.setText(weather.aqi.city.pm25);
}
String comfort = "舒适度:" + weather.suggestion.comfort.info;
String catWash = "洗车指数:" + weather.suggestion.carWash.info;
String sport = "运动指数:" + weather.suggestion.sport.info;
comfortText.setText(comfort);
carWashText.setText(catWash);
sportText.setText(sport);
weatherLayout.setVisibility(View.VISIBLE);
if (weather != null && "ok".equals(weather.status)) {
Intent intent = new Intent(this, AutoUpdateService.class);
startService(intent);
} else {
Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show();
}
}
}
| MarkManYUN/coolweather | app/src/main/java/com/coolweather/android/WeatherActivity.java | Java | apache-2.0 | 9,815 |
package com.example.cdm.huntfun.activity;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.view.ViewPager.OnPageChangeListener;
import android.widget.TextView;
import com.example.cdm.huntfun.R;
import com.example.cdm.huntfun.photoView.ImageDetailFragment;
import com.example.cdm.huntfun.widget.HackyViewPager;
import java.util.List;
/**
* 图片查看器
*/
public class ImagePagerActivity extends FragmentActivity {
private static final String STATE_POSITION = "STATE_POSITION";
public static final String EXTRA_IMAGE_INDEX = "image_index";
public static final String EXTRA_IMAGE_URLS = "image_urls";
private HackyViewPager mPager;
private int pagerPosition;
private TextView indicator;
// public static Drawable DEFAULTDRAWABLE;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.umessage_image_detail_pager);
// DEFAULTDRAWABLE=this.getResources().getDrawable(R.drawable.umessage_load_default);
pagerPosition = getIntent().getIntExtra(EXTRA_IMAGE_INDEX, 0);
List<String> urls = getIntent().getStringArrayListExtra(EXTRA_IMAGE_URLS);
mPager = (HackyViewPager) findViewById(R.id.pager);
ImagePagerAdapter mAdapter = new ImagePagerAdapter(getSupportFragmentManager(), urls);
mPager.setAdapter(mAdapter);
indicator = (TextView) findViewById(R.id.indicator);
CharSequence text = getString(R.string.xq_viewpager_indicator, 1, mPager.getAdapter().getCount());
indicator.setText(text);
// 更新下标
mPager.addOnPageChangeListener(new OnPageChangeListener() {
@Override
public void onPageScrollStateChanged(int arg0) {
}
@Override
public void onPageScrolled(int arg0, float arg1, int arg2) {
}
@Override
public void onPageSelected(int arg0) {
CharSequence text = getString(R.string.xq_viewpager_indicator, arg0 + 1, mPager.getAdapter().getCount());
indicator.setText(text);
}
});
if (savedInstanceState != null) {
pagerPosition = savedInstanceState.getInt(STATE_POSITION);
}
mPager.setCurrentItem(pagerPosition);
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putInt(STATE_POSITION, mPager.getCurrentItem());
}
private class ImagePagerAdapter extends FragmentStatePagerAdapter {
public List<String> fileList;
public ImagePagerAdapter(FragmentManager fm, List<String> fileList) {
super(fm);
this.fileList = fileList;
}
@Override
public int getCount() {
return fileList == null ? 0 : fileList.size();
}
@Override
public Fragment getItem(int position) {
String url = fileList.get(position);
return ImageDetailFragment.newInstance(url);
}
}
}
| skycdm/HuntFun | app/src/main/java/com/example/cdm/huntfun/activity/ImagePagerActivity.java | Java | apache-2.0 | 2,872 |
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.util.version.impl;
import net.sf.mmm.util.version.api.DevelopmentPhase;
import net.sf.mmm.util.version.api.VersionIdentifier;
/**
* This is the implementation of {@link net.sf.mmm.util.lang.api.Formatter} for the {@link DevelopmentPhase#getValue()
* value} of the {@link VersionIdentifier#getPhase() phase}.
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
* @since 3.0.0
*/
public class VersionIdentifierFormatterPhaseValue extends AbstractVersionIdentifierFormatterString {
/**
* The constructor.
*
* @param prefix is the static prefix to append before the {@link VersionIdentifier#getPhase() phase}. Will be omitted
* if {@link VersionIdentifier#getPhase() phase} is {@code null}.
* @param maximumLength is the maximum number of letters for the {@link VersionIdentifier#getPhase() phase}. The
* default is {@link Integer#MAX_VALUE}.
*/
public VersionIdentifierFormatterPhaseValue(String prefix, int maximumLength) {
super(prefix, maximumLength);
}
@Override
protected String getString(VersionIdentifier value) {
DevelopmentPhase phase = value.getPhase();
if (phase != null) {
return phase.getValue();
}
return null;
}
}
| m-m-m/util | version/src/main/java/net/sf/mmm/util/version/impl/VersionIdentifierFormatterPhaseValue.java | Java | apache-2.0 | 1,372 |
package Escape;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.EventQueue;
import java.awt.Insets;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTabbedPane;
import Escape.Controller.Controller;
import Escape.Model.Arena;
import Escape.Service.Service;
import Escape.View.Rank;
import Escape.View.View;
import java.awt.Color;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.io.IOException;
import java.io.InputStream;
import java.util.logging.LogManager;
/**
* The main class of the program.
*/
public class Escape extends JFrame {
static {
InputStream in = Escape.class.getResourceAsStream("/logging.properties");
if (in != null) {
try {
LogManager.getLogManager().readConfiguration(in);
} catch(IOException e) {
e.printStackTrace();
}
}
}
/**
* The <code>serialVersionUID</code> of the class.
*/
private static final long serialVersionUID = -3689415169655758824L;
/**
* The main JPanel of the <code>frame</code>.
*/
private JPanel contentPane;
/**
* The main <code>Arena</code> object of the program.
*/
private Arena arena;
/**
* Part of the Game tab, the main <code>View</code> object.
*/
private View view;
/**
* The main <code>Controller</code> object of the program.
*/
private Controller control;
/**
* Part of the Rank tab, the main <code>Rank</code> object.
*/
private Rank rank;
/**
* The name of the player.
* Default is "Guest".
*/
private String username = "Guest";
/**
* The password for the database.
*/
private String DAOpassword = "pwd";
/**
* Main method of the program.
* Creates the main JFrame object and asks the user to set <code>DAOpassword</code>
* and <code>username</code> before start the game.
*
* @param args command-line parameters
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
Escape frame = new Escape();
frame.setVisible(true);
do{
frame.DAOpassword = JOptionPane.showInputDialog(frame, "Enter password for database!");
} while(frame.DAOpassword.equals("pwd"));
do{
frame.username = JOptionPane.showInputDialog(frame, "Enter your in-game name!");
} while(frame.username.equals("") || frame.username == null);
frame.rank.setDAOpassword(frame.DAOpassword);
frame.rank.refreshRank();
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Constructor for the main JFrame object.
* Sets the <code>frame</code> and initialize the <code>arena</code>, <code>view</code>,
* <code>control</code>, <code>rank</code> variables, add tabs.
* Calls the <code>initMenu</code> for add menu.
*/
public Escape() {
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setTitle("Escape");
setBounds(300, 0, 0, 0);
pack();
Insets insets = getInsets();
setSize(new Dimension(insets.left + insets.right + 600,
insets.top + insets.bottom + 630));
contentPane = new JPanel();
contentPane.setBackground(Color.WHITE);
arena = new Arena(6, 600);
view = new View(arena);
control = new Controller(arena, view);
view.setControl(control);
rank = new Rank();
setContentPane(contentPane);
contentPane.setLayout(new BorderLayout());
createMenuBar();
JTabbedPane tabbedPane = new JTabbedPane();
tabbedPane.addTab("Game", view);
tabbedPane.addTab("Rank", rank);
tabbedPane.setFocusable(false);
contentPane.add(tabbedPane);
setLocationRelativeTo(view.getPlayer());
}
/**
* Creates the Menu and add to the main JFrame.
* Creates the "New Game", "Save Game" and "Exit" items and
* add ActionListener for control actions.
*/
private void createMenuBar() {
JMenuBar menubar = new JMenuBar();
JMenu file = new JMenu("File");
file.setMnemonic(KeyEvent.VK_F);
JMenuItem newGameMenuItem = new JMenuItem("New Game");
newGameMenuItem.setMnemonic(KeyEvent.VK_E);
newGameMenuItem.setToolTipText("Start a new game");
newGameMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
Service.newGame(arena, control, view);
}
});
JMenuItem saveGameMenuItem = new JMenuItem("Save Game");
saveGameMenuItem.setMnemonic(KeyEvent.VK_E);
saveGameMenuItem.setToolTipText("Save the actual score and start a new game!");
saveGameMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
System.out.println(control.getPlayerScore()+control.getEnemyScore());
Service.saveGame(control, username, DAOpassword);
Service.newGame(arena, control, view);
}
});
JMenuItem exitMenuItem = new JMenuItem("Exit");
exitMenuItem.setMnemonic(KeyEvent.VK_E);
exitMenuItem.setToolTipText("Exit application");
exitMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
System.exit(0);
}
});
file.add(newGameMenuItem);
file.add(saveGameMenuItem);
file.add(exitMenuItem);
menubar.add(file);
setJMenuBar(menubar);
}
}
| Zakemi/Escape | src/main/java/Escape/Escape.java | Java | apache-2.0 | 5,644 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.influxdb;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.support.DefaultEndpoint;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.CamelContextHelper;
import org.influxdb.InfluxDB;
/**
* The influxdb component allows you to interact with <a href="https://influxdata.com/time-series-platform/influxdb/">InfluxDB</a>, a time series database.
*/
@UriEndpoint(firstVersion = "2.18.0", scheme = "influxdb", title = "InfluxDB", syntax = "influxdb:connectionBean", label = "database", producerOnly = true)
public class InfluxDbEndpoint extends DefaultEndpoint {
private InfluxDB influxDB;
@UriPath
@Metadata(required = "true")
private String connectionBean;
@UriParam
private String databaseName;
@UriParam(defaultValue = "default")
private String retentionPolicy = "default";
@UriParam(defaultValue = "false")
private boolean batch;
@UriParam(defaultValue = InfluxDbOperations.INSERT)
private String operation = InfluxDbOperations.INSERT;
@UriParam
private String query;
public InfluxDbEndpoint(String uri, InfluxDbComponent component) {
super(uri, component);
}
@Override
public Producer createProducer() throws Exception {
return new InfluxDbProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("You cannot receive messages from this endpoint");
}
@Override
protected void doStart() throws Exception {
influxDB = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, InfluxDB.class);
log.debug("Resolved the connection with the name {} as {}", connectionBean, influxDB);
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
}
@Override
public boolean isSingleton() {
return true;
}
public InfluxDB getInfluxDB() {
return influxDB;
}
/**
* The Influx DB to use
*/
public void setInfluxDB(InfluxDB influxDB) {
this.influxDB = influxDB;
}
public String getDatabaseName() {
return databaseName;
}
/**
* The name of the database where the time series will be stored
*/
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
public String getRetentionPolicy() {
return retentionPolicy;
}
/**
* The string that defines the retention policy to the data created by the endpoint
*/
public void setRetentionPolicy(String retentionPolicy) {
this.retentionPolicy = retentionPolicy;
}
public String getConnectionBean() {
return connectionBean;
}
/**
* Connection to the influx database, of class InfluxDB.class
*/
public void setConnectionBean(String connectionBean) {
this.connectionBean = connectionBean;
}
public boolean isBatch() {
return batch;
}
/**
* Define if this operation is a batch operation or not
*/
public void setBatch(boolean batch) {
this.batch = batch;
}
public String getOperation() {
return operation;
}
/**
* Define if this operation is an insert or a query
*/
public void setOperation(String operation) {
this.operation = operation;
}
public String getQuery() {
return query;
}
/**
* Define the query in case of operation query
*/
public void setQuery(String query) {
this.query = query;
}
}
| kevinearls/camel | components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbEndpoint.java | Java | apache-2.0 | 4,672 |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.xiaomi.smarthome.common.ui.dialog;
import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
import java.lang.ref.WeakReference;
import android.content.Context;
import android.content.DialogInterface;
import android.database.Cursor;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckedTextView;
import android.widget.CursorAdapter;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.ScrollView;
import android.widget.SimpleCursorAdapter;
import android.widget.TextView;
import com.xiaomi.common.R;
public class MLAlertController {
private static final int BIT_BUTTON_POSITIVE = 1;
private static final int BIT_BUTTON_NEGATIVE = 2;
private static final int BIT_BUTTON_NEUTRAL = 4;
private final Context mContext;
private final DialogInterface mDialogInterface;
private final Window mWindow;
private CharSequence mTitle;
private CharSequence mMessage;
private ListView mListView;
private View mView;
private int mViewSpacingLeft;
private int mViewSpacingTop;
private int mViewSpacingRight;
private int mViewSpacingBottom;
private boolean mViewSpacingSpecified = false;
private Button mButtonPositive;
private CharSequence mButtonPositiveText;
private Message mButtonPositiveMessage;
private Button mButtonNegative;
private CharSequence mButtonNegativeText;
private Message mButtonNegativeMessage;
private Button mButtonNeutral;
private CharSequence mButtonNeutralText;
private Message mButtonNeutralMessage;
private ScrollView mScrollView;
private int mIconId = -1;
private Drawable mIcon;
private ImageView mIconView;
private TextView mTitleView;
private TextView mMessageView;
private View mCustomTitleView;
private boolean mForceInverseBackground;
private ListAdapter mAdapter;
private int mCheckedItem = -1;
private int mAlertDialogLayout;
private int mListLayout;
private int mListLayoutWithTitle;
private int mMultiChoiceItemLayout;
private int mSingleChoiceItemLayout;
private int mListItemLayout;
// add by afei for progressDialog Top and normal is Bottom
private int mGravity;
private Handler mHandler;
private boolean mTransplantBg = false;
private boolean mAutoDismiss = true; // 对话框在点击按钮之后是否自动消失
private boolean mCustomBgTransplant = false;
View.OnClickListener mButtonHandler = new View.OnClickListener() {
public void onClick(View v) {
Message m = null;
if (v == mButtonPositive && mButtonPositiveMessage != null) {
m = Message.obtain(mButtonPositiveMessage);
} else if (v == mButtonNegative && mButtonNegativeMessage != null) {
m = Message.obtain(mButtonNegativeMessage);
} else if (v == mButtonNeutral && mButtonNeutralMessage != null) {
m = Message.obtain(mButtonNeutralMessage);
}
if (m != null) {
m.sendToTarget();
}
if (mAutoDismiss) {
// Post a message so we dismiss after the above handlers are
// executed
mHandler.obtainMessage(ButtonHandler.MSG_DISMISS_DIALOG, mDialogInterface)
.sendToTarget();
}
}
};
private static final class ButtonHandler extends Handler {
// Button clicks have Message.what as the BUTTON{1,2,3} constant
private static final int MSG_DISMISS_DIALOG = 1;
private WeakReference<DialogInterface> mDialog;
public ButtonHandler(DialogInterface dialog) {
mDialog = new WeakReference<DialogInterface>(dialog);
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case DialogInterface.BUTTON_POSITIVE:
case DialogInterface.BUTTON_NEGATIVE:
case DialogInterface.BUTTON_NEUTRAL:
((DialogInterface.OnClickListener) msg.obj).onClick(mDialog.get(), msg.what);
break;
case MSG_DISMISS_DIALOG:
((DialogInterface) msg.obj).dismiss();
}
}
}
public void sendDismissMessage() {
mHandler.obtainMessage(ButtonHandler.MSG_DISMISS_DIALOG, mDialogInterface).sendToTarget();
}
public MLAlertController(Context context, DialogInterface di, Window window) {
this(context, di, window, Gravity.BOTTOM);
}
public MLAlertController(Context context, DialogInterface di, Window window, int gravity) {
mContext = context;
mDialogInterface = di;
mWindow = window;
mHandler = new ButtonHandler(di);
mAlertDialogLayout = R.layout.ml_alert_dialog;
mListLayout = R.layout.ml_select_dialog;
mListLayoutWithTitle = R.layout.ml_select_dialog_center;
mMultiChoiceItemLayout = R.layout.ml_select_dialog_multichoice;
mSingleChoiceItemLayout = R.layout.ml_select_dialog_singlechoice;
mListItemLayout = R.layout.ml_select_dialog_item;
mGravity = gravity;
}
static boolean canTextInput(View v) {
if (v.onCheckIsTextEditor()) {
return true;
}
if (!(v instanceof ViewGroup)) {
return false;
}
ViewGroup vg = (ViewGroup) v;
int i = vg.getChildCount();
while (i > 0) {
i--;
v = vg.getChildAt(i);
if (canTextInput(v)) {
return true;
}
}
return false;
}
public void installContent() {
/* We use a custom title so never request a window title */
mWindow.requestFeature(Window.FEATURE_NO_TITLE);
mWindow.setGravity(mGravity);
if (mView == null || !canTextInput(mView)) {
mWindow.setFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM,
WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
}
mWindow.setContentView(mAlertDialogLayout);
setupView();
}
public void setTitle(CharSequence title) {
mTitle = title;
if (mTitleView != null) {
mTitleView.setText(title);
}
}
/**
* @see android.app.AlertDialog.Builder#setCustomTitle(View)
*/
public void setCustomTitle(View customTitleView) {
mCustomTitleView = customTitleView;
}
public void setAudoDismiss(boolean autoDismiss) {
mAutoDismiss = autoDismiss;
}
public void setMessage(CharSequence message) {
mMessage = message;
if (mMessageView != null) {
mMessageView.setText(message);
}
}
/**
* Set the view to display in the dialog.
*/
public void setView(View view) {
mView = view;
mViewSpacingSpecified = false;
}
public void setCustomTransplant(boolean b) {
mCustomBgTransplant = b;
}
/**
* Set the view to display in the dialog along with the spacing around that
* view
*/
public void setView(View view, int viewSpacingLeft, int viewSpacingTop, int viewSpacingRight,
int viewSpacingBottom) {
mView = view;
mViewSpacingSpecified = true;
mViewSpacingLeft = viewSpacingLeft;
mViewSpacingTop = viewSpacingTop;
mViewSpacingRight = viewSpacingRight;
mViewSpacingBottom = viewSpacingBottom;
}
/**
* Sets a click listener or a message to be sent when the button is clicked.
* You only need to pass one of {@code listener} or {@code msg}.
*
* @param whichButton Which button, can be one of
* {@link DialogInterface#BUTTON_POSITIVE},
* {@link DialogInterface#BUTTON_NEGATIVE}, or
* {@link DialogInterface#BUTTON_NEUTRAL}
* @param text The text to display in positive button.
* @param listener The
* {@link DialogInterface.OnClickListener} to
* use.
* @param msg The {@link Message} to be sent when clicked.
*/
public void setButton(int whichButton, CharSequence text,
DialogInterface.OnClickListener listener, Message msg) {
if (msg == null && listener != null) {
msg = mHandler.obtainMessage(whichButton, listener);
}
switch (whichButton) {
case DialogInterface.BUTTON_POSITIVE:
mButtonPositiveText = text;
mButtonPositiveMessage = msg;
break;
case DialogInterface.BUTTON_NEGATIVE:
mButtonNegativeText = text;
mButtonNegativeMessage = msg;
break;
case DialogInterface.BUTTON_NEUTRAL:
mButtonNeutralText = text;
mButtonNeutralMessage = msg;
break;
default:
throw new IllegalArgumentException("Button does not exist");
}
}
/**
* Set resId to 0 if you don't want an icon.
*
* @param resId the resourceId of the drawable to use as the icon or 0 if
* you don't want an icon.
*/
public void setIcon(int resId) {
mIconId = resId;
if (mIconView != null) {
if (resId > 0) {
mIconView.setImageResource(mIconId);
} else if (resId == 0) {
mIconView.setVisibility(View.GONE);
}
}
}
public void setIcon(Drawable icon) {
mIcon = icon;
if ((mIconView != null) && (mIcon != null)) {
mIconView.setImageDrawable(icon);
}
}
public void setInverseBackgroundForced(boolean forceInverseBackground) {
mForceInverseBackground = forceInverseBackground;
}
public ListView getListView() {
return mListView;
}
public View getView() {
return mView;
}
public Button getButton(int whichButton) {
switch (whichButton) {
case DialogInterface.BUTTON_POSITIVE:
return mButtonPositive;
case DialogInterface.BUTTON_NEGATIVE:
return mButtonNegative;
case DialogInterface.BUTTON_NEUTRAL:
return mButtonNeutral;
default:
return null;
}
}
@SuppressWarnings({
"UnusedDeclaration"
})
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_MENU && mListView != null
&& mListView.getVisibility() == View.VISIBLE) {
this.mDialogInterface.dismiss();
}
return mScrollView != null && mScrollView.executeKeyEvent(event);
}
@SuppressWarnings({
"UnusedDeclaration"
})
public boolean onKeyUp(int keyCode, KeyEvent event) {
return mScrollView != null && mScrollView.executeKeyEvent(event);
}
private void setupView() {
LinearLayout contentPanel = (LinearLayout) mWindow.findViewById(R.id.contentPanel);
setupContent(contentPanel);
boolean hasButtons = setupButtons();
LinearLayout topPanel = (LinearLayout) mWindow.findViewById(R.id.topPanel);
boolean hasTitle = setupTitle(topPanel);
View buttonPanel = mWindow.findViewById(R.id.buttonPanel);
if (!hasButtons) {
buttonPanel.setVisibility(View.GONE);
}
FrameLayout customPanel = (FrameLayout) mWindow.findViewById(R.id.customPanel);
if (mView != null) {
// 自定义dialog透明背景
// mWindow.findViewById(R.id.parentPanel).setBackgroundColor(mContext.getResources().getColor(android.R.color.transparent));
FrameLayout custom = (FrameLayout) mWindow.findViewById(R.id.custom);
custom.addView(mView);
if (mViewSpacingSpecified) {
custom.setPadding(mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight,
mViewSpacingBottom);
if (mCustomBgTransplant)
mTransplantBg = true;
}
if (mListView != null) {
((LinearLayout.LayoutParams) customPanel.getLayoutParams()).weight = 0;
}
} else {
customPanel.setVisibility(View.GONE);
}
if (mTransplantBg) {
mWindow.findViewById(R.id.parentPanel).setBackgroundColor(
mContext.getResources().getColor(android.R.color.transparent));
} else {
// mWindow.findViewById(R.id.parentPanel).setBackgroundColor(0xffffffff);
}
if (mListView != null) {
// Listview有分割线divider,因此header和listview需要显示分割线
mWindow.findViewById(R.id.title_divider_line).setVisibility(View.VISIBLE);
mWindow.findViewById(R.id.title_divider_line_bottom).setVisibility(View.VISIBLE);
} else {
mWindow.findViewById(R.id.title_divider_line).setVisibility(View.GONE);
mWindow.findViewById(R.id.title_divider_line_bottom).setVisibility(View.GONE);
}
/**
* Add margin top for the button panel if we have not any panel
*/
if (topPanel.getVisibility() == View.GONE && contentPanel.getVisibility() == View.GONE
&& customPanel.getVisibility() == View.GONE && hasButtons) {
buttonPanel.setPadding(buttonPanel.getPaddingLeft(), buttonPanel.getPaddingBottom(),
buttonPanel.getPaddingRight(), buttonPanel.getPaddingBottom());
}
/*
* Only display the divider if we have a title and a custom view or a
* message.
*/
if (hasTitle) {
// View divider = null;
// if (mMessage != null || mView != null || mListView != null) {
// divider = mWindow.findViewById(R.id.titleDivider);
// } else {
// divider = mWindow.findViewById(R.id.titleDividerTop);
// }
//
// if (divider != null) {
// divider.setVisibility(View.VISIBLE);
// }
}
setBackground(topPanel, contentPanel, customPanel, hasButtons, hasTitle, buttonPanel);
if (TextUtils.isEmpty(mTitle) && TextUtils.isEmpty(mMessage)) {
mWindow.findViewById(R.id.empty_view).setVisibility(View.GONE);
}
}
private boolean setupTitle(LinearLayout topPanel) {
boolean hasTitle = true;
if (mCustomTitleView != null) {
// Add the custom title view directly to the topPanel layout
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
topPanel.addView(mCustomTitleView, 0, lp);
// Hide the title template
View titleTemplate = mWindow.findViewById(R.id.title_template);
titleTemplate.setVisibility(View.GONE);
} else {
final boolean hasTextTitle = !TextUtils.isEmpty(mTitle);
mIconView = (ImageView) mWindow.findViewById(R.id.icon);
if (hasTextTitle) {
/* Display the title if a title is supplied, else hide it */
mTitleView = (TextView) mWindow.findViewById(R.id.alertTitle);
mTitleView.setText(mTitle);
/*
* Do this last so that if the user has supplied any icons we
* use them instead of the default ones. If the user has
* specified 0 then make it disappear.
*/
if (mIconId > 0) {
mIconView.setImageResource(mIconId);
} else if (mIcon != null) {
mIconView.setImageDrawable(mIcon);
} else if (mIconId == 0) {
/*
* Apply the padding from the icon to ensure the title is
* aligned correctly.
*/
mTitleView.setPadding(mIconView.getPaddingLeft(),
mIconView.getPaddingTop(),
mIconView.getPaddingRight(),
mIconView.getPaddingBottom());
mIconView.setVisibility(View.GONE);
}
} else {
// Hide the title template
View titleTemplate = mWindow.findViewById(R.id.title_template);
titleTemplate.setVisibility(View.GONE);
mIconView.setVisibility(View.GONE);
topPanel.setVisibility(View.GONE);
hasTitle = false;
}
}
return hasTitle;
}
private void setupContent(LinearLayout contentPanel) {
mScrollView = (ScrollView) mWindow.findViewById(R.id.scrollView);
mScrollView.setFocusable(false);
// Special case for users that only want to display a String
mMessageView = (TextView) mWindow.findViewById(R.id.message);
if (mMessageView == null) {
return;
}
if (mMessage != null) {
mMessageView.setText(mMessage);
} else {
mMessageView.setVisibility(View.GONE);
mScrollView.removeView(mMessageView);
if (mListView != null) {
contentPanel.removeView(mWindow.findViewById(R.id.scrollView));
contentPanel.addView(mListView,
new LinearLayout.LayoutParams(MATCH_PARENT, MATCH_PARENT));
contentPanel.setLayoutParams(new LinearLayout.LayoutParams(MATCH_PARENT, 0, 1.0f));
} else {
contentPanel.setVisibility(View.GONE);
}
}
}
private boolean setupButtons() {
int whichButtons = 0;
mButtonPositive = (Button) mWindow.findViewById(R.id.button1);
mButtonPositive.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonPositiveText)) {
mButtonPositive.setVisibility(View.GONE);
} else {
mButtonPositive.setText(mButtonPositiveText);
mButtonPositive.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_POSITIVE;
}
mButtonNegative = (Button) mWindow.findViewById(R.id.button2);
mButtonNegative.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonNegativeText)) {
mButtonNegative.setVisibility(View.GONE);
} else {
mButtonNegative.setText(mButtonNegativeText);
mButtonNegative.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_NEGATIVE;
}
mButtonNeutral = (Button) mWindow.findViewById(R.id.button3);
mButtonNeutral.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonNeutralText)) {
mButtonNeutral.setVisibility(View.GONE);
} else {
mButtonNeutral.setText(mButtonNeutralText);
mButtonNeutral.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_NEUTRAL;
}
if (shouldCenterSingleButton(whichButtons)) {
if (whichButtons == BIT_BUTTON_POSITIVE) {
centerButton(mButtonPositive);
} else if (whichButtons == BIT_BUTTON_NEGATIVE) {
centerButton(mButtonNegative);
} else if (whichButtons == BIT_BUTTON_NEUTRAL) {
centerButton(mButtonNeutral);
}
}
return whichButtons != 0;
}
private static boolean shouldCenterSingleButton(int whichButton) {
return whichButton == BIT_BUTTON_POSITIVE
|| whichButton == BIT_BUTTON_NEGATIVE
|| whichButton == BIT_BUTTON_NEUTRAL;
}
private void centerButton(TextView button) {
LinearLayout.LayoutParams params = (LinearLayout.LayoutParams) button.getLayoutParams();
params.gravity = Gravity.CENTER_HORIZONTAL;
params.weight = 0.5f;
button.setLayoutParams(params);
button.setBackgroundResource(R.drawable.common_button);
}
private void setBackground(LinearLayout topPanel, LinearLayout contentPanel,
View customPanel, boolean hasButtons, boolean hasTitle,
View buttonPanel) {
if (mTransplantBg) {
/* Get all the different background required */
int fullDark = mContext.getResources().getColor(android.R.color.transparent);
int topDark = mContext.getResources().getColor(android.R.color.transparent);
int centerDark = mContext.getResources().getColor(android.R.color.transparent);
int bottomDark = mContext.getResources().getColor(android.R.color.transparent);
int fullBright = mContext.getResources().getColor(android.R.color.transparent);
int topBright = mContext.getResources().getColor(android.R.color.transparent);
int centerBright = mContext.getResources().getColor(android.R.color.transparent);
int bottomBright = mContext.getResources().getColor(android.R.color.transparent);
int bottomMedium = mContext.getResources().getColor(android.R.color.transparent);
/*
* We now set the background of all of the sections of the alert.
* First collect together each section that is being displayed along
* with whether it is on a light or dark background, then run
* through them setting their backgrounds. This is complicated
* because we need to correctly use the full, top, middle, and
* bottom graphics depending on how many views they are and where
* they appear.
*/
View[] views = new View[4];
boolean[] light = new boolean[4];
View lastView = null;
boolean lastLight = false;
int pos = 0;
if (hasTitle) {
views[pos] = topPanel;
light[pos] = false;
pos++;
}
/*
* The contentPanel displays either a custom text message or a
* ListView. If it's text we should use the dark background for
* ListView we should use the light background. If neither are there
* the contentPanel will be hidden so set it as null.
*/
views[pos] = (contentPanel.getVisibility() == View.GONE)
? null : contentPanel;
light[pos] = mListView != null;
pos++;
if (customPanel != null) {
views[pos] = customPanel;
light[pos] = mForceInverseBackground;
pos++;
}
if (hasButtons) {
views[pos] = buttonPanel;
light[pos] = true;
}
boolean setView = false;
for (pos = 0; pos < views.length; pos++) {
View v = views[pos];
if (v == null) {
continue;
}
if (lastView != null) {
if (!setView) {
lastView.setBackgroundResource(lastLight ? topBright : topDark);
} else {
lastView.setBackgroundResource(lastLight ? centerBright : centerDark);
}
setView = true;
}
lastView = v;
lastLight = light[pos];
}
if (lastView != null) {
if (setView) {
/*
* ListViews will use the Bright background but buttons use
* the Medium background.
*/
lastView.setBackgroundResource(
lastLight ? (hasButtons ? bottomMedium : bottomBright) : bottomDark);
} else {
lastView.setBackgroundResource(lastLight ? fullBright : fullDark);
}
}
}
if ((mListView != null) && (mAdapter != null)) {
mListView.setAdapter(mAdapter);
if (mCheckedItem > -1) {
mListView.setItemChecked(mCheckedItem, true);
mListView.setSelection(mCheckedItem);
}
}
}
public static class RecycleListView extends ListView {
boolean mRecycleOnMeasure = true;
public RecycleListView(Context context) {
super(context);
}
public RecycleListView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public RecycleListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
protected boolean recycleOnMeasure() {
return mRecycleOnMeasure;
}
}
public static class AlertParams {
public final Context mContext;
public final LayoutInflater mInflater;
public int mIconId = 0;
public Drawable mIcon;
public CharSequence mTitle;
public View mCustomTitleView;
public CharSequence mMessage;
public CharSequence mPositiveButtonText;
public DialogInterface.OnClickListener mPositiveButtonListener;
public CharSequence mNegativeButtonText;
public DialogInterface.OnClickListener mNegativeButtonListener;
public CharSequence mNeutralButtonText;
public DialogInterface.OnClickListener mNeutralButtonListener;
public boolean mCancelable;
public DialogInterface.OnCancelListener mOnCancelListener;
public DialogInterface.OnKeyListener mOnKeyListener;
public CharSequence[] mItems;
public ListAdapter mAdapter;
public DialogInterface.OnClickListener mOnClickListener;
public View mView;
public int mViewSpacingLeft;
public int mViewSpacingTop;
public int mViewSpacingRight;
public int mViewSpacingBottom;
public boolean mViewSpacingSpecified = false;
public boolean[] mCheckedItems;
public boolean mIsMultiChoice;
public boolean mIsSingleChoice;
public int mCheckedItem = -1;
public DialogInterface.OnMultiChoiceClickListener mOnCheckboxClickListener;
public Cursor mCursor;
public String mLabelColumn;
public String mIsCheckedColumn;
public boolean mForceInverseBackground;
public AdapterView.OnItemSelectedListener mOnItemSelectedListener;
public OnPrepareListViewListener mOnPrepareListViewListener;
public boolean mRecycleOnMeasure = true;
public boolean mAutoDismiss = true;
public MLAlertDialog.DismissCallBack mDismissCallBack;
public CharSequence mCustomTitle;
public boolean mCustomBgTransplant = false;
/**
* Interface definition for a callback to be invoked before the ListView
* will be bound to an adapter.
*/
public interface OnPrepareListViewListener {
/**
* Called before the ListView is bound to an adapter.
*
* @param listView The ListView that will be shown in the dialog.
*/
void onPrepareListView(ListView listView);
}
public AlertParams(Context context) {
mContext = context;
mCancelable = true;
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
public void apply(MLAlertController dialog) {
if (mCustomTitleView != null) {
dialog.setCustomTitle(mCustomTitleView);
} else {
if (mTitle != null) {
dialog.setTitle(mTitle);
}
if (mIcon != null) {
dialog.setIcon(mIcon);
}
if (mIconId >= 0) {
dialog.setIcon(mIconId);
}
}
if (mMessage != null) {
dialog.setMessage(mMessage);
}
if (mPositiveButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_POSITIVE, mPositiveButtonText,
mPositiveButtonListener, null);
}
if (mNegativeButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_NEGATIVE, mNegativeButtonText,
mNegativeButtonListener, null);
}
if (mNeutralButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_NEUTRAL, mNeutralButtonText,
mNeutralButtonListener, null);
}
if (mForceInverseBackground) {
dialog.setInverseBackgroundForced(true);
}
// For a list, the client can either supply an array of items or an
// adapter or a cursor
dialog.mTransplantBg = false;
if ((mItems != null) || (mCursor != null) || (mAdapter != null)) {
if (dialog.mGravity == Gravity.CENTER) {
createCenterListView(dialog);
} else {
createListView(dialog);
}
}
if (mView != null) {
if (mViewSpacingSpecified) {
dialog.setView(mView, mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight,
mViewSpacingBottom);
} else {
dialog.setView(mView);
}
}
dialog.setAudoDismiss(mAutoDismiss);
dialog.setCustomTransplant(mCustomBgTransplant);
}
private void createCenterListView(final MLAlertController dialog) {
final LinearLayout customView = (LinearLayout)
mInflater.inflate(dialog.mListLayoutWithTitle, null);
final RecycleListView listView = (RecycleListView) customView
.findViewById(R.id.select_dialog_listview);
ListAdapter adapter;
int layout = R.layout.ml_center_item;
if (mCursor == null) {
adapter = (mAdapter != null) ? mAdapter
: new ArrayAdapter<CharSequence>(mContext, layout, R.id.text1, mItems);
} else {
adapter = new SimpleCursorAdapter(mContext, layout,
mCursor, new String[] {
mLabelColumn
}, new int[] {
R.id.text1
});
}
if (mCustomTitle != null) {
((TextView) (customView.findViewById(R.id.title))).setText(mCustomTitle);
}
if (mOnPrepareListViewListener != null) {
mOnPrepareListViewListener.onPrepareListView(listView);
}
/*
* Don't directly set the adapter on the ListView as we might want
* to add a footer to the ListView later.
*/
dialog.mAdapter = adapter;
listView.setAdapter(adapter);
dialog.mCheckedItem = mCheckedItem;
if (mOnClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
mOnClickListener.onClick(dialog.mDialogInterface, position);
if (!mIsSingleChoice) {
dialog.mDialogInterface.dismiss();
}
}
});
} else if (mOnCheckboxClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
if (mCheckedItems != null) {
mCheckedItems[position] = listView.isItemChecked(position);
}
mOnCheckboxClickListener.onClick(
dialog.mDialogInterface, position, listView.isItemChecked(position));
}
});
}
// Attach a given OnItemSelectedListener to the ListView
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mIsSingleChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
} else if (mIsMultiChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
}
listView.mRecycleOnMeasure = mRecycleOnMeasure;
dialog.mView = customView;
dialog.mTransplantBg = true;
dialog.setCustomTransplant(mCustomBgTransplant);
}
private void createListView(final MLAlertController dialog) {
final RecycleListView listView = (RecycleListView)
mInflater.inflate(dialog.mListLayout, null);
ListAdapter adapter;
if (mIsMultiChoice) {
if (mCursor == null) {
adapter = new ArrayAdapter<CharSequence>(
mContext, dialog.mMultiChoiceItemLayout, R.id.text1, mItems) {
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = super.getView(position, convertView, parent);
if (mCheckedItems != null) {
boolean isItemChecked = mCheckedItems[position];
if (isItemChecked) {
listView.setItemChecked(position, true);
}
}
return view;
}
};
} else {
adapter = new CursorAdapter(mContext, mCursor, false) {
private final int mLabelIndex;
private final int mIsCheckedIndex;
{
final Cursor cursor = getCursor();
mLabelIndex = cursor.getColumnIndexOrThrow(mLabelColumn);
mIsCheckedIndex = cursor.getColumnIndexOrThrow(mIsCheckedColumn);
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
CheckedTextView text = (CheckedTextView) view.findViewById(R.id.text1);
text.setText(cursor.getString(mLabelIndex));
listView.setItemChecked(cursor.getPosition(),
cursor.getInt(mIsCheckedIndex) == 1);
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return mInflater.inflate(dialog.mMultiChoiceItemLayout,
parent, false);
}
};
}
} else {
int layout = mIsSingleChoice
? dialog.mSingleChoiceItemLayout : dialog.mListItemLayout;
if (mCursor == null) {
adapter = (mAdapter != null) ? mAdapter
: new ArrayAdapter<CharSequence>(mContext, layout, R.id.text1, mItems);
} else {
adapter = new SimpleCursorAdapter(mContext, layout,
mCursor, new String[] {
mLabelColumn
}, new int[] {
R.id.text1
});
}
}
if (mOnPrepareListViewListener != null) {
mOnPrepareListViewListener.onPrepareListView(listView);
}
/*
* Don't directly set the adapter on the ListView as we might want
* to add a footer to the ListView later.
*/
dialog.mAdapter = adapter;
dialog.mCheckedItem = mCheckedItem;
if (mOnClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
mOnClickListener.onClick(dialog.mDialogInterface, position);
if (!mIsSingleChoice) {
dialog.mDialogInterface.dismiss();
}
}
});
} else if (mOnCheckboxClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
if (mCheckedItems != null) {
mCheckedItems[position] = listView.isItemChecked(position);
}
mOnCheckboxClickListener.onClick(
dialog.mDialogInterface, position, listView.isItemChecked(position));
}
});
}
// Attach a given OnItemSelectedListener to the ListView
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mIsSingleChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
} else if (mIsMultiChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
}
listView.mRecycleOnMeasure = mRecycleOnMeasure;
dialog.mListView = listView;
dialog.setCustomTransplant(mCustomBgTransplant);
}
}
}
| Liyueyang/NewXmPluginSDK | common_ui/src/main/java/com/xiaomi/smarthome/common/ui/dialog/MLAlertController.java | Java | apache-2.0 | 39,780 |
/**
* Copyright 2011 Micheal Swiggs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.reddcoin.net.discovery;
import com.google.reddcoin.params.MainNetParams;
import org.junit.Test;
import java.net.InetSocketAddress;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
public class SeedPeersTest {
@Test
public void getPeer_one() throws Exception{
SeedPeers seedPeers = new SeedPeers(MainNetParams.get());
assertThat(seedPeers.getPeer(), notNullValue());
}
@Test
public void getPeer_all() throws Exception{
SeedPeers seedPeers = new SeedPeers(MainNetParams.get());
for(int i = 0; i < SeedPeers.seedAddrs.length; ++i){
assertThat("Failed on index: "+i, seedPeers.getPeer(), notNullValue());
}
assertThat(seedPeers.getPeer(), equalTo(null));
}
@Test
public void getPeers_length() throws Exception{
SeedPeers seedPeers = new SeedPeers(MainNetParams.get());
InetSocketAddress[] addresses = seedPeers.getPeers(0, TimeUnit.SECONDS);
assertThat(addresses.length, equalTo(SeedPeers.seedAddrs.length));
}
}
| reddcoin-project/reddcoinj-pow | core/src/test/java/com/google/reddcoin/net/discovery/SeedPeersTest.java | Java | apache-2.0 | 1,795 |
/*
* Copyright 2007 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.reteoo;
import org.drools.core.base.ClassObjectType;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.InternalWorkingMemoryEntryPoint;
import org.drools.core.common.PropagationContextFactory;
import org.drools.core.common.RuleBasePartitionId;
import org.drools.core.util.Iterator;
import org.drools.core.util.ObjectHashSet.ObjectEntry;
import org.drools.core.reteoo.LeftInputAdapterNode.LiaNodeMemory;
import org.drools.core.reteoo.ObjectTypeNode.ObjectTypeNodeMemory;
import org.drools.core.reteoo.builder.BuildContext;
import org.drools.core.rule.EntryPointId;
import org.drools.core.spi.ObjectType;
import org.drools.core.spi.PropagationContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A node that is an entry point into the Rete network.
*
* As we move the design to support network partitions and concurrent processing
* of parts of the network, we also need to support multiple, independent entry
* points and this class represents that.
*
* It replaces the function of the Rete Node class in previous designs.
*
* @see ObjectTypeNode
*/
public class EntryPointNode extends ObjectSource
implements
Externalizable,
ObjectSink {
// ------------------------------------------------------------
// Instance members
// ------------------------------------------------------------
private static final long serialVersionUID = 510l;
protected static transient Logger log = LoggerFactory.getLogger(EntryPointNode.class);
/**
* The entry point ID for this node
*/
private EntryPointId entryPoint;
/**
* The object type nodes under this node
*/
private Map<ObjectType, ObjectTypeNode> objectTypeNodes;
private ObjectTypeNode queryNode;
private ObjectTypeNode activationNode;
// ------------------------------------------------------------
// Constructors
// ------------------------------------------------------------
public EntryPointNode() {
}
public EntryPointNode(final int id,
final ObjectSource objectSource,
final BuildContext context) {
this( id,
context.getPartitionId(),
context.getKnowledgeBase().getConfiguration().isMultithreadEvaluation(),
objectSource,
context.getCurrentEntryPoint() ); // irrelevant for this node, since it overrides sink management
}
public EntryPointNode(final int id,
final RuleBasePartitionId partitionId,
final boolean partitionsEnabled,
final ObjectSource objectSource,
final EntryPointId entryPoint) {
super( id,
partitionId,
partitionsEnabled,
objectSource,
999 ); // irrelevant for this node, since it overrides sink management
this.entryPoint = entryPoint;
this.objectTypeNodes = new ConcurrentHashMap<ObjectType, ObjectTypeNode>();
}
// ------------------------------------------------------------
// Instance methods
// ------------------------------------------------------------
@SuppressWarnings("unchecked")
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
super.readExternal( in );
entryPoint = (EntryPointId) in.readObject();
objectTypeNodes = (Map<ObjectType, ObjectTypeNode>) in.readObject();
}
public void writeExternal(ObjectOutput out) throws IOException {
super.writeExternal( out );
out.writeObject( entryPoint );
out.writeObject( objectTypeNodes );
}
public short getType() {
return NodeTypeEnums.EntryPointNode;
}
/**
* @return the entryPoint
*/
public EntryPointId getEntryPoint() {
return entryPoint;
}
void setEntryPoint(EntryPointId entryPoint) {
this.entryPoint = entryPoint;
}
public void assertQuery(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( queryNode == null ) {
this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType );
}
if ( queryNode != null ) {
// There may be no queries defined
this.queryNode.assertObject( factHandle, context, workingMemory );
}
}
public void retractQuery(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( queryNode == null ) {
this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType );
}
if ( queryNode != null ) {
// There may be no queries defined
this.queryNode.retractObject( factHandle, context, workingMemory );
}
}
public void modifyQuery(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( queryNode == null ) {
this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType );
}
if ( queryNode != null ) {
ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples(factHandle.getFirstLeftTuple(), factHandle.getFirstRightTuple(), this );
factHandle.clearLeftTuples();
factHandle.clearRightTuples();
// There may be no queries defined
this.queryNode.modifyObject( factHandle, modifyPreviousTuples, context, workingMemory );
modifyPreviousTuples.retractTuples( context, workingMemory );
}
}
public ObjectTypeNode getQueryNode() {
if ( queryNode == null ) {
this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType );
}
return this.queryNode;
}
public void assertActivation(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( activationNode == null ) {
this.activationNode = objectTypeNodes.get( ClassObjectType.Match_ObjectType );
}
if ( activationNode != null ) {
// There may be no queries defined
this.activationNode.assertObject( factHandle, context, workingMemory );
}
}
public void retractActivation(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( activationNode == null ) {
this.activationNode = objectTypeNodes.get( ClassObjectType.Match_ObjectType );
}
if ( activationNode != null ) {
// There may be no queries defined
this.activationNode.retractObject( factHandle, context, workingMemory );
}
}
public void modifyActivation(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( activationNode == null ) {
this.activationNode = objectTypeNodes.get( ClassObjectType.Match_ObjectType );
}
if ( activationNode != null ) {
ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples(factHandle.getFirstLeftTuple(), factHandle.getFirstRightTuple(), this );
factHandle.clearLeftTuples();
factHandle.clearRightTuples();
// There may be no queries defined
this.activationNode.modifyObject( factHandle, modifyPreviousTuples, context, workingMemory );
modifyPreviousTuples.retractTuples( context, workingMemory );
}
}
public void assertObject(final InternalFactHandle handle,
final PropagationContext context,
final ObjectTypeConf objectTypeConf,
final InternalWorkingMemory workingMemory) {
if ( log.isTraceEnabled() ) {
log.trace( "Insert {}", handle.toString() );
}
ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes();
for ( int i = 0, length = cachedNodes.length; i < length; i++ ) {
cachedNodes[i].assertObject( handle,
context,
workingMemory );
}
}
public void modifyObject(final InternalFactHandle handle,
final PropagationContext pctx,
final ObjectTypeConf objectTypeConf,
final InternalWorkingMemory wm) {
if ( log.isTraceEnabled() ) {
log.trace( "Update {}", handle.toString() );
}
ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes();
// make a reference to the previous tuples, then null then on the handle
ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples(handle.getFirstLeftTuple(), handle.getFirstRightTuple(), this );
handle.clearLeftTuples();
handle.clearRightTuples();
for ( int i = 0, length = cachedNodes.length; i < length; i++ ) {
cachedNodes[i].modifyObject( handle,
modifyPreviousTuples,
pctx, wm );
// remove any right tuples that matches the current OTN before continue the modify on the next OTN cache entry
if (i < cachedNodes.length - 1) {
RightTuple rightTuple = modifyPreviousTuples.peekRightTuple();
while ( rightTuple != null &&
(( BetaNode ) rightTuple.getRightTupleSink()).getObjectTypeNode() == cachedNodes[i] ) {
modifyPreviousTuples.removeRightTuple();
doRightDelete(pctx, wm, rightTuple);
rightTuple = modifyPreviousTuples.peekRightTuple();
}
LeftTuple leftTuple;
ObjectTypeNode otn;
while ( true ) {
leftTuple = modifyPreviousTuples.peekLeftTuple();
otn = null;
if (leftTuple != null) {
LeftTupleSink leftTupleSink = leftTuple.getLeftTupleSink();
if (leftTupleSink instanceof LeftTupleSource) {
otn = ((LeftTupleSource)leftTupleSink).getLeftTupleSource().getObjectTypeNode();
} else if (leftTupleSink instanceof RuleTerminalNode) {
otn = ((RuleTerminalNode)leftTupleSink).getObjectTypeNode();
}
}
if ( otn == null || otn == cachedNodes[i+1] ) break;
modifyPreviousTuples.removeLeftTuple();
doDeleteObject(pctx, wm, leftTuple);
}
}
}
modifyPreviousTuples.retractTuples( pctx, wm );
}
public void doDeleteObject(PropagationContext pctx, InternalWorkingMemory wm, LeftTuple leftTuple) {
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) leftTuple.getLeftTupleSink().getLeftTupleSource();
LiaNodeMemory lm = ( LiaNodeMemory ) wm.getNodeMemory( liaNode );
LeftInputAdapterNode.doDeleteObject( leftTuple, pctx, lm.getSegmentMemory(), wm, liaNode, true, lm );
}
public void doRightDelete(PropagationContext pctx, InternalWorkingMemory wm, RightTuple rightTuple) {
rightTuple.setPropagationContext( pctx );
rightTuple.getRightTupleSink().retractRightTuple( rightTuple, pctx, wm );
}
public void modifyObject(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
// this method was silently failing, so I am now throwing an exception to make
// sure no one calls it by mistake
throw new UnsupportedOperationException( "This method should NEVER EVER be called" );
}
/**
* This is the entry point into the network for all asserted Facts. Iterates a cache
* of matching <code>ObjectTypdeNode</code>s asserting the Fact. If the cache does not
* exist it first iterates and builds the cache.
*
* @param factHandle
* The FactHandle of the fact to assert
* @param context
* The <code>PropagationContext</code> of the <code>WorkingMemory</code> action
* @param workingMemory
* The working memory session.
*/
public void assertObject(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
// this method was silently failing, so I am now throwing an exception to make
// sure no one calls it by mistake
throw new UnsupportedOperationException( "This method should NEVER EVER be called" );
}
/**
* Retract a fact object from this <code>RuleBase</code> and the specified
* <code>WorkingMemory</code>.
*
* @param handle
* The handle of the fact to retract.
* @param workingMemory
* The working memory session.
*/
public void retractObject(final InternalFactHandle handle,
final PropagationContext context,
final ObjectTypeConf objectTypeConf,
final InternalWorkingMemory workingMemory) {
if ( log.isTraceEnabled() ) {
log.trace( "Delete {}", handle.toString() );
}
ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes();
if ( cachedNodes == null ) {
// it is possible that there are no ObjectTypeNodes for an object being retracted
return;
}
for ( int i = 0; i < cachedNodes.length; i++ ) {
cachedNodes[i].retractObject( handle,
context,
workingMemory );
}
}
/**
* Adds the <code>ObjectSink</code> so that it may receive
* <code>Objects</code> propagated from this <code>ObjectSource</code>.
*
* @param objectSink
* The <code>ObjectSink</code> to receive propagated
* <code>Objects</code>. Rete only accepts <code>ObjectTypeNode</code>s
* as parameters to this method, though.
*/
public void addObjectSink(final ObjectSink objectSink) {
final ObjectTypeNode node = (ObjectTypeNode) objectSink;
this.objectTypeNodes.put( node.getObjectType(),
node );
}
public void removeObjectSink(final ObjectSink objectSink) {
final ObjectTypeNode node = (ObjectTypeNode) objectSink;
this.objectTypeNodes.remove( node.getObjectType() );
}
public void attach( BuildContext context ) {
this.source.addObjectSink( this );
if (context == null ) {
return;
}
if ( context.getKnowledgeBase().getConfiguration().isPhreakEnabled() ) {
for ( InternalWorkingMemory workingMemory : context.getWorkingMemories() ) {
workingMemory.updateEntryPointsCache();
}
return;
}
for ( InternalWorkingMemory workingMemory : context.getWorkingMemories() ) {
workingMemory.updateEntryPointsCache();
PropagationContextFactory pctxFactory = workingMemory.getKnowledgeBase().getConfiguration().getComponentFactory().getPropagationContextFactory();
final PropagationContext propagationContext = pctxFactory.createPropagationContext(workingMemory.getNextPropagationIdCounter(), PropagationContext.RULE_ADDITION, null, null, null);
this.source.updateSink( this,
propagationContext,
workingMemory );
}
}
protected void doRemove(final RuleRemovalContext context,
final ReteooBuilder builder,
final InternalWorkingMemory[] workingMemories) {
}
public Map<ObjectType, ObjectTypeNode> getObjectTypeNodes() {
return this.objectTypeNodes;
}
public int hashCode() {
return this.entryPoint.hashCode();
}
public boolean equals(final Object object) {
if ( object == this ) {
return true;
}
if ( object == null || !(object instanceof EntryPointNode) ) {
return false;
}
final EntryPointNode other = (EntryPointNode) object;
return this.entryPoint.equals( other.entryPoint );
}
public void updateSink(final ObjectSink sink,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
// @todo
// JBRULES-612: the cache MUST be invalidated when a new node type is added to the network, so iterate and reset all caches.
final ObjectTypeNode node = (ObjectTypeNode) sink;
final ObjectType newObjectType = node.getObjectType();
InternalWorkingMemoryEntryPoint wmEntryPoint = (InternalWorkingMemoryEntryPoint) workingMemory.getWorkingMemoryEntryPoint( this.entryPoint.getEntryPointId() );
for ( ObjectTypeConf objectTypeConf : wmEntryPoint.getObjectTypeConfigurationRegistry().values() ) {
if ( newObjectType.isAssignableFrom( objectTypeConf.getConcreteObjectTypeNode().getObjectType() ) ) {
objectTypeConf.resetCache();
ObjectTypeNode sourceNode = objectTypeConf.getConcreteObjectTypeNode();
Iterator it = ((ObjectTypeNodeMemory) workingMemory.getNodeMemory( sourceNode )).memory.iterator();
for ( ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next() ) {
sink.assertObject( (InternalFactHandle) entry.getValue(),
context,
workingMemory );
}
}
}
}
public boolean isObjectMemoryEnabled() {
return false;
}
public void setObjectMemoryEnabled(boolean objectMemoryEnabled) {
throw new UnsupportedOperationException( "Entry Point Node has no Object memory" );
}
public String toString() {
return "[EntryPointNode(" + this.id + ") " + this.entryPoint + " ]";
}
public void byPassModifyToBetaNode(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
throw new UnsupportedOperationException();
}
@Override
public long calculateDeclaredMask(List<String> settableProperties) {
throw new UnsupportedOperationException();
}
}
| bxf12315/drools | drools-core/src/main/java/org/drools/core/reteoo/EntryPointNode.java | Java | apache-2.0 | 20,815 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.route53.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* A complex type that contains information about that can be associated with your hosted zone.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/route53-2013-04-01/ListVPCAssociationAuthorizations"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListVPCAssociationAuthorizationsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* </p>
*/
private String hostedZoneId;
/**
* <p>
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and include
* the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another
* <code>ListVPCAssociationAuthorizations</code> request.
* </p>
*/
private String nextToken;
/**
* <p>
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If
* you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page.
* </p>
*/
private String maxResults;
/**
* <p>
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* </p>
*
* @param hostedZoneId
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
*/
public void setHostedZoneId(String hostedZoneId) {
this.hostedZoneId = hostedZoneId;
}
/**
* <p>
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* </p>
*
* @return The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
*/
public String getHostedZoneId() {
return this.hostedZoneId;
}
/**
* <p>
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* </p>
*
* @param hostedZoneId
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListVPCAssociationAuthorizationsRequest withHostedZoneId(String hostedZoneId) {
setHostedZoneId(hostedZoneId);
return this;
}
/**
* <p>
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and include
* the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another
* <code>ListVPCAssociationAuthorizations</code> request.
* </p>
*
* @param nextToken
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and
* include the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in
* another <code>ListVPCAssociationAuthorizations</code> request.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and include
* the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another
* <code>ListVPCAssociationAuthorizations</code> request.
* </p>
*
* @return <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and
* include the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in
* another <code>ListVPCAssociationAuthorizations</code> request.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and include
* the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another
* <code>ListVPCAssociationAuthorizations</code> request.
* </p>
*
* @param nextToken
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and
* include the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in
* another <code>ListVPCAssociationAuthorizations</code> request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListVPCAssociationAuthorizationsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If
* you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page.
* </p>
*
* @param maxResults
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to
* return. If you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs
* per page.
*/
public void setMaxResults(String maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If
* you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page.
* </p>
*
* @return <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to
* return. If you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs
* per page.
*/
public String getMaxResults() {
return this.maxResults;
}
/**
* <p>
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If
* you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page.
* </p>
*
* @param maxResults
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to
* return. If you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs
* per page.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListVPCAssociationAuthorizationsRequest withMaxResults(String maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getHostedZoneId() != null)
sb.append("HostedZoneId: ").append(getHostedZoneId()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListVPCAssociationAuthorizationsRequest == false)
return false;
ListVPCAssociationAuthorizationsRequest other = (ListVPCAssociationAuthorizationsRequest) obj;
if (other.getHostedZoneId() == null ^ this.getHostedZoneId() == null)
return false;
if (other.getHostedZoneId() != null && other.getHostedZoneId().equals(this.getHostedZoneId()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getHostedZoneId() == null) ? 0 : getHostedZoneId().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
return hashCode;
}
@Override
public ListVPCAssociationAuthorizationsRequest clone() {
return (ListVPCAssociationAuthorizationsRequest) super.clone();
}
}
| dagnir/aws-sdk-java | aws-java-sdk-route53/src/main/java/com/amazonaws/services/route53/model/ListVPCAssociationAuthorizationsRequest.java | Java | apache-2.0 | 11,109 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.invertedindex.index;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.roaringbitmap.RoaringBitmap;
/**
* @author yangli9
*/
public interface ColumnValueContainer {
void append(ImmutableBytesWritable valueBytes);
void closeForChange();
int getSize();
// works only after closeForChange()
void getValueAt(int i, ImmutableBytesWritable valueBytes);
RoaringBitmap getBitMap(Integer startId, Integer endId);
int getMaxValueId();
}
| lemire/incubator-kylin | invertedindex/src/main/java/org/apache/kylin/invertedindex/index/ColumnValueContainer.java | Java | apache-2.0 | 1,361 |
package org.valuereporter.observation;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.valuereporter.QueryOperations;
import org.valuereporter.WriteOperations;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
/**
* @author <a href="mailto:erik-dev@fjas.no">Erik Drolshammer</a>
*/
@Component
@Path("/observedmethods")
public class ObservedMethodsResouce {
private static final Logger log = LoggerFactory.getLogger(ObservedMethodsResouce.class);
private final QueryOperations queryOperations;
private final WriteOperations writeOperations;
private final ObjectMapper mapper;
/**
@Autowired
public ObservedMethodsResouce(QueryOperations queryOperations, WriteOperations writeOperations, ObjectMapper mapper) {
this.queryOperations = queryOperations;
this.writeOperations = writeOperations;
this.mapper = mapper;
}
**/
@Autowired
public ObservedMethodsResouce(ObservationsService observationsService, ObjectMapper mapper) {
this.queryOperations = observationsService;
this.writeOperations = observationsService;
this.mapper = mapper;
}
//http://localhost:4901/reporter/observe/observedmethods/{prefix}/{name}
/**
* A request with no filtering parameters should return a list of all observations.
*
* @param prefix prefix used to identify running process
* @param name package.classname.method
* @return List of observations
*/
@GET
@Path("/{prefix}/{name}")
@Produces(MediaType.APPLICATION_JSON)
public Response findObservationsByName(@PathParam("prefix") String prefix,@PathParam("name") String name) {
final List<ObservedMethod> observedMethods;
//Should also support no queryParams -> findAll
if (name != null ) {
log.trace("findObservationsByName name={}", name);
observedMethods = queryOperations.findObservationsByName(prefix, name);
} else {
throw new UnsupportedOperationException("You must supply a name. <package.classname.method>");
}
Writer strWriter = new StringWriter();
try {
mapper.writeValue(strWriter, observedMethods);
} catch (IOException e) {
log.error("Could not convert {} ObservedMethod to JSON.", observedMethods.size(), e);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("Error converting to requested format.").build();
}
return Response.ok(strWriter.toString()).build();
}
//http://localhost:4901/reporter/observe/observedmethods/{prefix}
@POST
@Path("/{prefix}")
@Produces(MediaType.APPLICATION_JSON)
public Response addObservationMethod(@PathParam("prefix") String prefix, String jsonBody){
log.trace("addObservationMethod prefix {} , jsonBody {}.", prefix, jsonBody);
List<ObservedMethod> observedMethods = null;
try {
observedMethods = mapper.readValue(jsonBody, new TypeReference<ArrayList<ObservedMethodJson>>(){ });
if (observedMethods != null) {
for (ObservedMethod observedMethod : observedMethods) {
observedMethod.setPrefix(prefix);
}
}
} catch (IOException e) {
log.warn("Unexpected error trying to produce list of ObservedMethod from \n prefix {} \n json {}, \n Reason {}",prefix, jsonBody, e.getMessage());
return Response.status(Response.Status.NOT_ACCEPTABLE).entity("Error converting to requested format.").build();
}
long updatedCount = writeOperations.addObservations(prefix,observedMethods);
String message = "added " + updatedCount + " observedMethods.";
Writer strWriter = new StringWriter();
try {
mapper.writeValue(strWriter, message);
} catch (IOException e) {
log.error("Could not convert {} to JSON.", updatedCount, e);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("Error converting to requested format.").build();
}
return Response.ok(strWriter.toString()).build();
}
}
| altran/Valuereporter | src/main/java/org/valuereporter/observation/ObservedMethodsResouce.java | Java | apache-2.0 | 4,690 |
package com.lee.game;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.View;
import android.widget.TextView;
import com.lee.base.activity.BaseActivity;
import com.lee.base.application.PackageNameContainer;
import com.noobyang.log.LogUtil;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
/**
* Main Activity
* <p/>
* Created by LiYang on 2019/4/8.
*/
public class MainActivity extends BaseActivity {
private static final String ACTION_SAMPLE_CODE = "com.lee.main.action.SAMPLE_CODE_GAME";
private static final String EXTRA_NAME_PATH = "com.lee.main.Path";
private static final String PATH_DIVIDED_SYMBOLS = ".";
private static final String PATH_DIVIDED_SYMBOLS_REGEX = "\\.";
@BindView(R.id.tv_path)
TextView tvPath;
@BindView(R.id.rv_sample_code)
RecyclerView rvSampleCode;
private PackageManager packageManager;
private List<SampleCodeEntity> sampleCodeEntities;
private SampleCodeAdapter sampleCodeAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ButterKnife.bind(this);
initData();
initView();
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
setIntent(intent);
updateSampleCodes();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
private void initData() {
packageManager = getPackageManager();
sampleCodeAdapter = new SampleCodeAdapter(this, sampleCodeEntities, itemClickListener);
}
private void initView() {
LinearLayoutManager layoutManager = new LinearLayoutManager(this);
layoutManager.setOrientation(LinearLayoutManager.VERTICAL);
rvSampleCode.setLayoutManager(layoutManager);
rvSampleCode.setAdapter(sampleCodeAdapter);
updateSampleCodes();
}
private void updateSampleCodes() {
String path = getIntent().getStringExtra(EXTRA_NAME_PATH);
initSampleCodes(path);
sampleCodeAdapter.setData(sampleCodeEntities);
sampleCodeAdapter.notifyDataSetChanged();
setPathText(path);
}
private void setPathText(String path) {
if (TextUtils.isEmpty(path)) {
tvPath.setText(R.string.app_name);
} else {
tvPath.setText(path);
}
}
protected void initSampleCodes(String path) {
if (sampleCodeEntities == null) {
sampleCodeEntities = new ArrayList<>();
} else {
sampleCodeEntities.clear();
}
List<ResolveInfo> sampleCodeResolveInfoList = getSampleCodeResolveInfoList();
if (sampleCodeResolveInfoList == null || sampleCodeResolveInfoList.size() == 0) {
return;
}
String[] prefixPaths;
String currentPrefixPath;
Map<String, Boolean> folderLabel = new HashMap<>();
String label;
String[] labelPath;
String sampleCodeLabel;
for (ResolveInfo sampleCodeResolveInfo : sampleCodeResolveInfoList) {
if (TextUtils.isEmpty(path)) {
prefixPaths = null;
currentPrefixPath = null;
} else {
path = getRelativeName(path);
prefixPaths = path.split(PATH_DIVIDED_SYMBOLS_REGEX);
currentPrefixPath = path + PATH_DIVIDED_SYMBOLS;
}
label = getRelativeName(sampleCodeResolveInfo.activityInfo.name);
LogUtil.d("getData currentPrefixPath = " + currentPrefixPath + "---label = " + label);
if (TextUtils.isEmpty(currentPrefixPath) || label.startsWith(currentPrefixPath)) {
labelPath = label.split(PATH_DIVIDED_SYMBOLS_REGEX);
int prefixPathsLen = prefixPaths == null ? 0 : prefixPaths.length;
sampleCodeLabel = labelPath[prefixPathsLen];
if (prefixPathsLen == labelPath.length - 1) {
// activity
addActivityItem(sampleCodeEntities, sampleCodeLabel,
sampleCodeResolveInfo.activityInfo.applicationInfo.packageName,
sampleCodeResolveInfo.activityInfo.name);
} else {
// folder
if (folderLabel.get(sampleCodeLabel) == null) {
addFolderItem(sampleCodeEntities, sampleCodeLabel, currentPrefixPath);
folderLabel.put(sampleCodeLabel, true);
}
}
}
}
Collections.sort(sampleCodeEntities, comparator);
}
private String getRelativeName(String className) {
if (TextUtils.isEmpty(className)) {
return className;
}
for (String packageName : PackageNameContainer.getPackageNames()) {
if (className.startsWith(packageName + PATH_DIVIDED_SYMBOLS)) {
return className.substring(packageName.length() + 1);
}
}
return className;
}
private List<ResolveInfo> getSampleCodeResolveInfoList() {
Intent sampleCodeIntent = new Intent(ACTION_SAMPLE_CODE, null);
sampleCodeIntent.addCategory(Intent.CATEGORY_SAMPLE_CODE);
return packageManager.queryIntentActivities(sampleCodeIntent, 0);
}
private final static Comparator<SampleCodeEntity> comparator =
new Comparator<SampleCodeEntity>() {
private final Collator collator = Collator.getInstance();
public int compare(SampleCodeEntity entity1, SampleCodeEntity entity2) {
return collator.compare(entity1.getTitle(), entity2.getTitle());
}
};
private void addActivityItem(List<SampleCodeEntity> data, String sampleCodeLabel,
String packageName, String className) {
Intent activityIntent = new Intent();
activityIntent.setClassName(packageName, className);
addItem(data, SampleCodeEntity.SampleCodeType.SAMPLE_CODE_TYPE_ACTIVITY, sampleCodeLabel, activityIntent);
}
private void addFolderItem(List<SampleCodeEntity> data, String sampleCodeLabel,
String currentPrefixPath) {
Intent folderIntent = new Intent();
folderIntent.setClass(this, MainActivity.class);
String path = TextUtils.isEmpty(currentPrefixPath) ? sampleCodeLabel : currentPrefixPath + sampleCodeLabel;
folderIntent.putExtra(EXTRA_NAME_PATH, path);
addItem(data, SampleCodeEntity.SampleCodeType.SAMPLE_CODE_TYPE_FOLDER, sampleCodeLabel, folderIntent);
}
protected void addItem(List<SampleCodeEntity> data, int type, String title, Intent intent) {
SampleCodeEntity entity = new SampleCodeEntity(type, title, intent);
data.add(entity);
}
private SampleCodeAdapter.OnItemClickListener itemClickListener =
new SampleCodeAdapter.OnItemClickListener() {
@Override
public void onItemClick(View view, int position) {
SampleCodeEntity entity = sampleCodeEntities.get(position);
if (entity != null) {
Intent intent = entity.getIntent();
intent.addCategory(Intent.CATEGORY_SAMPLE_CODE);
startActivity(intent);
}
}
};
}
| noobyang/AndroidStudy | game/src/main/java/com/lee/game/MainActivity.java | Java | apache-2.0 | 7,946 |
/*
* Copyright 2014 http://Bither.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.bither.util;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import net.bither.BitherApplication;
import net.bither.bitherj.utils.Utils;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
public class FileUtil {
// old tickerName file
private static final String HUOBI_TICKER_NAME = "huobi.ticker";
private static final String BITSTAMP_TICKER_NAME = "bitstamp.ticker";
private static final String BTCE_TICKER_NAME = "btce.ticker";
private static final String OKCOIN_TICKER_NAME = "okcoin.ticker";
private static final String CHBTC_TICKER_NAME = "chbtc.ticker";
private static final String BTCCHINA_TICKER_NAME = "btcchina.ticker";
private static final String BITHER_BACKUP_SDCARD_DIR = "BitherBackup";
private static final String BITHER_BACKUP_ROM_DIR = "backup";
private static final String BITHER_BACKUP_HOT_FILE_NAME = "keys";
private static final String EXCAHNGE_TICKER_NAME = "exchange.ticker";
private static final String EXCHANGE_KLINE_NAME = "exchange.kline";
private static final String EXCHANGE_DEPTH_NAME = "exchange.depth";
private static final String PRICE_ALERT = "price.alert";
private static final String EXCHANGERATE = "exchangerate";
private static final String CURRENCIES_RATE = "currencies_rate";
private static final String MARKET_CAHER = "mark";
private static final String IMAGE_CACHE_DIR = "image";
private static final String IMAGE_SHARE_FILE_NAME = "share.jpg";
private static final String IMAGE_CACHE_UPLOAD = IMAGE_CACHE_DIR + "/upload";
private static final String IMAGE_CACHE_612 = IMAGE_CACHE_DIR + "/612";
private static final String IMAGE_CACHE_150 = IMAGE_CACHE_DIR + "/150";
private static final String AD_CACHE = "ad";
private static final String AD_NAME = "ad.json";
private static final String AD_IMAGE_EN_CACHE = AD_CACHE + "/img_en";
private static final String AD_IMAGE_ZH_CN_CACHE = AD_CACHE + "/img_zh_CN";
private static final String AD_IMAGE_ZH_TW_CACHE = AD_CACHE + "/img_zh_TW";
/**
* sdCard exist
*/
public static boolean existSdCardMounted() {
String storageState = android.os.Environment.getExternalStorageState();
if (Utils.isEmpty(storageState)) {
return false;
}
return Utils.compareString(storageState,
android.os.Environment.MEDIA_MOUNTED);
}
public static File getSDPath() {
File sdDir = Environment.getExternalStorageDirectory();
return sdDir;
}
public static File getBackupSdCardDir() {
File backupDir = new File(getSDPath(), BITHER_BACKUP_SDCARD_DIR);
if (!backupDir.exists()) {
backupDir.mkdirs();
}
return backupDir;
}
public static File getBackupFileOfCold() {
File file = new File(getBackupSdCardDir(),
DateTimeUtil.getNameForFile(System.currentTimeMillis())
+ ".bak"
);
return file;
}
public static List<File> getBackupFileListOfCold() {
File dir = getBackupSdCardDir();
List<File> fileList = new ArrayList<File>();
File[] files = dir.listFiles();
if (files != null && files.length > 0) {
files = orderByDateDesc(files);
for (File file : files) {
if (StringUtil.checkBackupFileOfCold(file.getName())) {
fileList.add(file);
}
}
}
return fileList;
}
private static File getBackupRomDir() {
File backupDir = new File(Utils.getWalletRomCache(), BITHER_BACKUP_ROM_DIR);
if (!backupDir.exists()) {
backupDir.mkdirs();
}
return backupDir;
}
public static File getBackupKeyOfHot() {
File backupDir = getBackupRomDir();
return new File(backupDir, BITHER_BACKUP_HOT_FILE_NAME);
}
public static File getDiskDir(String dirName, Boolean createNomedia) {
File dir = getDiskCacheDir(BitherApplication.mContext, dirName);
if (!dir.exists()) {
dir.mkdirs();
if (createNomedia) {
try {
File noMediaFile = new File(dir, ".nomedia");
noMediaFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return dir;
}
public static Uri saveShareImage(Bitmap bmp) {
File dir = getDiskDir(IMAGE_CACHE_DIR, true);
File jpg = new File(dir, IMAGE_SHARE_FILE_NAME);
NativeUtil.compressBitmap(bmp, 85, jpg.getAbsolutePath(), true);
return Uri.fromFile(jpg);
}
public static File getExternalCacheDir(Context context) {
// if (SdkUtils.hasFroyo()) {
//
// return context.getCacheDir();
// }
// Before Froyo we need to construct the external cache dir ourselves
final String cacheDir = "/Android/data/" + context.getPackageName()
+ "/cache/";
return new File(Environment.getExternalStorageDirectory().getPath()
+ cacheDir);
}
public static File getDiskCacheDir(Context context, String uniqueName) {
File extCacheDir = getExternalCacheDir(context);
final String cachePath = (Environment.MEDIA_MOUNTED.equals(Environment
.getExternalStorageState()) || !isExternalStorageRemovable())
&& extCacheDir != null ? extCacheDir.getPath() : context
.getCacheDir().getPath();
return new File(cachePath + File.separator + uniqueName);
}
@TargetApi(9)
public static boolean isExternalStorageRemovable() {
if (SdkUtils.hasGingerbread()) {
return Environment.isExternalStorageRemovable();
}
return true;
}
private static File getMarketCache() {
return getDiskDir(MARKET_CAHER, false);
}
public static File getAdImageEnDir() {
return getDiskDir(AD_IMAGE_EN_CACHE, true);
}
public static File getAdImagZhCnDir() {
return getDiskDir(AD_IMAGE_ZH_CN_CACHE, true);
}
public static File getAdImagZhTwDir() {
return getDiskDir(AD_IMAGE_ZH_TW_CACHE, true);
}
private static File getAdDir() {
return getDiskDir(AD_CACHE, false);
}
public static File getUploadImageDir() {
return getDiskDir(IMAGE_CACHE_UPLOAD, true);
}
public static File getAvatarDir() {
return getDiskDir(IMAGE_CACHE_612, true);
}
public static File getSmallAvatarDir() {
return getDiskDir(IMAGE_CACHE_150, true);
}
public static File getExchangeRateFile() {
File file = getDiskDir("", false);
return new File(file, EXCHANGERATE);
}
public static File getCurrenciesRateFile() {
File file = getDiskDir("", false);
return new File(file, CURRENCIES_RATE);
}
public static File getTickerFile() {
File file = getMarketCache();
file = new File(file, EXCAHNGE_TICKER_NAME);
return file;
}
public static File getPriceAlertFile() {
File marketDir = getMarketCache();
return new File(marketDir, PRICE_ALERT);
}
public static File getKlineFile() {
File file = getMarketCache();
file = new File(file, EXCHANGE_KLINE_NAME);
return file;
}
public static File getDepthFile() {
File file = getMarketCache();
file = new File(file, EXCHANGE_DEPTH_NAME);
return file;
}
public static File getAdFile() {
File file = getAdDir();
file = new File(file, AD_NAME);
return file;
}
@SuppressWarnings("resource")
public static Object deserialize(File file) {
FileInputStream fos = null;
try {
if (!file.exists()) {
return null;
}
fos = new FileInputStream(file);
ObjectInputStream ois;
ois = new ObjectInputStream(fos);
Object object = ois.readObject();
return object;
} catch (Exception e) {
e.printStackTrace();
return null;
} finally {
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void serializeObject(File file, Object object) {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(object);
oos.flush();
fos.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public static File[] orderByDateDesc(File[] fs) {
Arrays.sort(fs, new Comparator<File>() {
public int compare(File f1, File f2) {
long diff = f1.lastModified() - f2.lastModified();
if (diff > 0) {
return -1;//-1 f1 before f2
} else if (diff == 0) {
return 0;
} else {
return 1;
}
}
public boolean equals(Object obj) {
return true;
}
});
return fs;
}
public static void copyFile(File src, File tar) throws Exception {
if (src.isFile()) {
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
try {
InputStream is = new FileInputStream(src);
bis = new BufferedInputStream(is);
OutputStream op = new FileOutputStream(tar);
bos = new BufferedOutputStream(op);
byte[] bt = new byte[8192];
int len = bis.read(bt);
while (len != -1) {
bos.write(bt, 0, len);
len = bis.read(bt);
}
bis.close();
bos.close();
} catch (Exception e) {
e.printStackTrace();
} finally {
}
} else if (src.isDirectory()) {
File[] files = src.listFiles();
tar.mkdir();
for (int i = 0;
i < files.length;
i++) {
copyFile(files[i].getAbsoluteFile(),
new File(tar.getAbsoluteFile() + File.separator
+ files[i].getName())
);
}
} else {
throw new FileNotFoundException();
}
}
public static void delFolder(String folderPath) {
try {
delAllFile(folderPath);
String filePath = folderPath;
filePath = filePath.toString();
java.io.File myFilePath = new java.io.File(filePath);
myFilePath.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
private static void delAllFile(String path) {
File file = new File(path);
if (!file.exists()) {
return;
}
if (!file.isDirectory()) {
return;
}
String[] tempList = file.list();
if (tempList == null) {
return;
}
File temp = null;
for (int i = 0;
i < tempList.length;
i++) {
if (path.endsWith(File.separator)) {
temp = new File(path + tempList[i]);
} else {
temp = new File(path + File.separator + tempList[i]);
}
if (temp.isFile()) {
temp.delete();
}
if (temp.isDirectory()) {
delAllFile(path + "/" + tempList[i]);
delFolder(path + "/" + tempList[i]);
}
}
}
public static void upgradeTickerFile() {
File marketDir = getMarketCache();
File file = new File(marketDir, BITSTAMP_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, BTCE_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, HUOBI_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, OKCOIN_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, CHBTC_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, BTCCHINA_TICKER_NAME);
fileExistAndDelete(file);
}
public static boolean fileExistAndDelete(File file) {
return file.exists() && file.delete();
}
public static File convertUriToFile(Activity activity, Uri uri) {
File file = null;
try {
String[] proj = {MediaStore.Images.Media.DATA};
@SuppressWarnings("deprecation")
Cursor actualimagecursor = activity.managedQuery(uri, proj, null,
null, null);
if (actualimagecursor != null) {
int actual_image_column_index = actualimagecursor
.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
actualimagecursor.moveToFirst();
String img_path = actualimagecursor
.getString(actual_image_column_index);
if (!Utils.isEmpty(img_path)) {
file = new File(img_path);
}
} else {
file = new File(new URI(uri.toString()));
if (file.exists()) {
return file;
}
}
} catch (Exception e) {
}
return file;
}
public static int getOrientationOfFile(String fileName) {
int orientation = 0;
try {
ExifInterface exif = new ExifInterface(fileName);
String orientationString = exif
.getAttribute(ExifInterface.TAG_ORIENTATION);
if (Utils.isNubmer(orientationString)) {
int orc = Integer.valueOf(orientationString);
switch (orc) {
case ExifInterface.ORIENTATION_ROTATE_90:
orientation = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
orientation = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
orientation = 270;
break;
default:
break;
}
}
} catch (IOException e) {
e.printStackTrace();
}
return orientation;
}
}
| bither/bither-android | bither-android/src/net/bither/util/FileUtil.java | Java | apache-2.0 | 16,209 |
package org.cobbzilla.util.jdbc;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class DbUrlUtil {
public static final Pattern JDBC_URL_REGEX = Pattern.compile("^jdbc:postgresql://[\\.\\w]+:\\d+/(.+)$");
public static String setDbName(String url, String dbName) {
final Matcher matcher = JDBC_URL_REGEX.matcher(url);
if (!matcher.find()) return url;
final String renamed = matcher.replaceFirst(dbName);
return renamed;
}
}
| cobbzilla/cobbzilla-utils | src/main/java/org/cobbzilla/util/jdbc/DbUrlUtil.java | Java | apache-2.0 | 495 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.ozShell;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdds.cli.MissingSubcommandException;
import org.apache.hadoop.hdds.client.ReplicationFactor;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.OzoneAcl.OzoneACLRights;
import org.apache.hadoop.ozone.OzoneAcl.OzoneACLType;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.client.OzoneBucket;
import org.apache.hadoop.ozone.client.OzoneKey;
import org.apache.hadoop.ozone.client.OzoneVolume;
import org.apache.hadoop.ozone.client.VolumeArgs;
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
import org.apache.hadoop.ozone.client.rest.OzoneException;
import org.apache.hadoop.ozone.client.rest.RestClient;
import org.apache.hadoop.ozone.client.rpc.RpcClient;
import org.apache.hadoop.ozone.om.helpers.ServiceInfo;
import org.apache.hadoop.ozone.web.ozShell.Shell;
import org.apache.hadoop.ozone.web.request.OzoneQuota;
import org.apache.hadoop.ozone.web.response.BucketInfo;
import org.apache.hadoop.ozone.web.response.KeyInfo;
import org.apache.hadoop.ozone.web.response.VolumeInfo;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import com.google.common.base.Strings;
import org.apache.commons.lang3.RandomStringUtils;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_REPLICATION;
import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_ADDRESS_KEY;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import picocli.CommandLine.ExecutionException;
import picocli.CommandLine.IExceptionHandler2;
import picocli.CommandLine.ParameterException;
import picocli.CommandLine.ParseResult;
import picocli.CommandLine.RunLast;
/**
* This test class specified for testing Ozone shell command.
*/
@RunWith(value = Parameterized.class)
public class TestOzoneShell {
private static final Logger LOG =
LoggerFactory.getLogger(TestOzoneShell.class);
/**
* Set the timeout for every test.
*/
@Rule
public Timeout testTimeout = new Timeout(300000);
private static String url;
private static File baseDir;
private static OzoneConfiguration conf = null;
private static MiniOzoneCluster cluster = null;
private static ClientProtocol client = null;
private static Shell shell = null;
private final ByteArrayOutputStream out = new ByteArrayOutputStream();
private final ByteArrayOutputStream err = new ByteArrayOutputStream();
private static final PrintStream OLD_OUT = System.out;
private static final PrintStream OLD_ERR = System.err;
@Parameterized.Parameters
public static Collection<Object[]> clientProtocol() {
Object[][] params = new Object[][] {
{RpcClient.class},
{RestClient.class}};
return Arrays.asList(params);
}
@Parameterized.Parameter
public Class clientProtocol;
/**
* Create a MiniDFSCluster for testing with using distributed Ozone
* handler type.
*
* @throws Exception
*/
@BeforeClass
public static void init() throws Exception {
conf = new OzoneConfiguration();
String path = GenericTestUtils.getTempPath(
TestOzoneShell.class.getSimpleName());
baseDir = new File(path);
baseDir.mkdirs();
shell = new Shell();
cluster = MiniOzoneCluster.newBuilder(conf)
.setNumDatanodes(3)
.build();
conf.setInt(OZONE_REPLICATION, ReplicationFactor.THREE.getValue());
conf.setQuietMode(false);
client = new RpcClient(conf);
cluster.waitForClusterToBeReady();
}
/**
* shutdown MiniDFSCluster.
*/
@AfterClass
public static void shutdown() {
if (cluster != null) {
cluster.shutdown();
}
if (baseDir != null) {
FileUtil.fullyDelete(baseDir, true);
}
}
@Before
public void setup() {
System.setOut(new PrintStream(out));
System.setErr(new PrintStream(err));
if(clientProtocol.equals(RestClient.class)) {
String hostName = cluster.getOzoneManager().getHttpServer()
.getHttpAddress().getHostName();
int port = cluster
.getOzoneManager().getHttpServer().getHttpAddress().getPort();
url = String.format("http://" + hostName + ":" + port);
} else {
List<ServiceInfo> services = null;
try {
services = cluster.getOzoneManager().getServiceList();
} catch (IOException e) {
LOG.error("Could not get service list from OM");
}
String hostName = services.stream().filter(
a -> a.getNodeType().equals(HddsProtos.NodeType.OM))
.collect(Collectors.toList()).get(0).getHostname();
String port = cluster.getOzoneManager().getRpcPort();
url = String.format("o3://" + hostName + ":" + port);
}
}
@After
public void reset() {
// reset stream after each unit test
out.reset();
err.reset();
// restore system streams
System.setOut(OLD_OUT);
System.setErr(OLD_ERR);
}
@Test
public void testCreateVolume() throws Exception {
LOG.info("Running testCreateVolume");
String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
testCreateVolume(volumeName, "");
volumeName = "volume" + RandomStringUtils.randomNumeric(5);
testCreateVolume("/////" + volumeName, "");
testCreateVolume("/////", "Volume name is required");
testCreateVolume("/////vol/123",
"Invalid volume name. Delimiters (/) not allowed in volume name");
}
private void testCreateVolume(String volumeName, String errorMsg)
throws Exception {
err.reset();
String userName = "bilbo";
String[] args = new String[] {"volume", "create", url + "/" + volumeName,
"--user", userName, "--root"};
if (Strings.isNullOrEmpty(errorMsg)) {
execute(shell, args);
} else {
executeWithError(shell, args, errorMsg);
return;
}
String truncatedVolumeName =
volumeName.substring(volumeName.lastIndexOf('/') + 1);
OzoneVolume volumeInfo = client.getVolumeDetails(truncatedVolumeName);
assertEquals(truncatedVolumeName, volumeInfo.getName());
assertEquals(userName, volumeInfo.getOwner());
}
private void execute(Shell ozoneShell, String[] args) {
List<String> arguments = new ArrayList(Arrays.asList(args));
LOG.info("Executing shell command with args {}", arguments);
CommandLine cmd = ozoneShell.getCmd();
IExceptionHandler2<List<Object>> exceptionHandler =
new IExceptionHandler2<List<Object>>() {
@Override
public List<Object> handleParseException(ParameterException ex,
String[] args) {
throw ex;
}
@Override
public List<Object> handleExecutionException(ExecutionException ex,
ParseResult parseResult) {
throw ex;
}
};
cmd.parseWithHandlers(new RunLast(),
exceptionHandler, args);
}
/**
* Test to create volume without specifying --user or -u.
* @throws Exception
*/
@Test
public void testCreateVolumeWithoutUser() throws Exception {
String volumeName = "volume" + RandomStringUtils.randomNumeric(1);
String[] args = new String[] {"volume", "create", url + "/" + volumeName,
"--root"};
execute(shell, args);
String truncatedVolumeName =
volumeName.substring(volumeName.lastIndexOf('/') + 1);
OzoneVolume volumeInfo = client.getVolumeDetails(truncatedVolumeName);
assertEquals(truncatedVolumeName, volumeInfo.getName());
assertEquals(UserGroupInformation.getCurrentUser().getUserName(),
volumeInfo.getOwner());
}
@Test
public void testDeleteVolume() throws Exception {
LOG.info("Running testDeleteVolume");
String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
VolumeArgs volumeArgs = VolumeArgs.newBuilder()
.setOwner("bilbo")
.setQuota("100TB")
.build();
client.createVolume(volumeName, volumeArgs);
OzoneVolume volume = client.getVolumeDetails(volumeName);
assertNotNull(volume);
String[] args = new String[] {"volume", "delete", url + "/" + volumeName};
execute(shell, args);
String output = out.toString();
assertTrue(output.contains("Volume " + volumeName + " is deleted"));
// verify if volume has been deleted
try {
client.getVolumeDetails(volumeName);
fail("Get volume call should have thrown.");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains(
"Info Volume failed, error:VOLUME_NOT_FOUND", e);
}
volumeName = "volume" + RandomStringUtils.randomNumeric(5);
volumeArgs = VolumeArgs.newBuilder()
.setOwner("bilbo")
.setQuota("100TB")
.build();
client.createVolume(volumeName, volumeArgs);
volume = client.getVolumeDetails(volumeName);
assertNotNull(volume);
//volumeName prefixed with /
String volumeNameWithSlashPrefix = "/" + volumeName;
args = new String[] {"volume", "delete",
url + "/" + volumeNameWithSlashPrefix};
execute(shell, args);
output = out.toString();
assertTrue(output.contains("Volume " + volumeName + " is deleted"));
// verify if volume has been deleted
try {
client.getVolumeDetails(volumeName);
fail("Get volume call should have thrown.");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains(
"Info Volume failed, error:VOLUME_NOT_FOUND", e);
}
}
@Test
public void testInfoVolume() throws Exception {
LOG.info("Running testInfoVolume");
String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
VolumeArgs volumeArgs = VolumeArgs.newBuilder()
.setOwner("bilbo")
.setQuota("100TB")
.build();
client.createVolume(volumeName, volumeArgs);
//volumeName supplied as-is
String[] args = new String[] {"volume", "info", url + "/" + volumeName};
execute(shell, args);
String output = out.toString();
assertTrue(output.contains(volumeName));
assertTrue(output.contains("createdOn")
&& output.contains(OzoneConsts.OZONE_TIME_ZONE));
//volumeName prefixed with /
String volumeNameWithSlashPrefix = "/" + volumeName;
args = new String[] {"volume", "info",
url + "/" + volumeNameWithSlashPrefix};
execute(shell, args);
output = out.toString();
assertTrue(output.contains(volumeName));
assertTrue(output.contains("createdOn")
&& output.contains(OzoneConsts.OZONE_TIME_ZONE));
// test infoVolume with invalid volume name
args = new String[] {"volume", "info",
url + "/" + volumeName + "/invalid-name"};
executeWithError(shell, args, "Invalid volume name. " +
"Delimiters (/) not allowed in volume name");
// get info for non-exist volume
args = new String[] {"volume", "info", url + "/invalid-volume"};
executeWithError(shell, args, "VOLUME_NOT_FOUND");
}
@Test
public void testShellIncompleteCommand() throws Exception {
LOG.info("Running testShellIncompleteCommand");
String expectedError = "Incomplete command";
String[] args = new String[] {}; //executing 'ozone sh'
executeWithError(shell, args, expectedError,
"Usage: ozone sh [-hV] [--verbose] [-D=<String=String>]..." +
" [COMMAND]");
args = new String[] {"volume"}; //executing 'ozone sh volume'
executeWithError(shell, args, expectedError,
"Usage: ozone sh volume [-hV] [COMMAND]");
args = new String[] {"bucket"}; //executing 'ozone sh bucket'
executeWithError(shell, args, expectedError,
"Usage: ozone sh bucket [-hV] [COMMAND]");
args = new String[] {"key"}; //executing 'ozone sh key'
executeWithError(shell, args, expectedError,
"Usage: ozone sh key [-hV] [COMMAND]");
}
@Test
public void testUpdateVolume() throws Exception {
LOG.info("Running testUpdateVolume");
String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
String userName = "bilbo";
VolumeArgs volumeArgs = VolumeArgs.newBuilder()
.setOwner("bilbo")
.setQuota("100TB")
.build();
client.createVolume(volumeName, volumeArgs);
OzoneVolume vol = client.getVolumeDetails(volumeName);
assertEquals(userName, vol.getOwner());
assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(), vol.getQuota());
String[] args = new String[] {"volume", "update", url + "/" + volumeName,
"--quota", "500MB"};
execute(shell, args);
vol = client.getVolumeDetails(volumeName);
assertEquals(userName, vol.getOwner());
assertEquals(OzoneQuota.parseQuota("500MB").sizeInBytes(), vol.getQuota());
String newUser = "new-user";
args = new String[] {"volume", "update", url + "/" + volumeName,
"--user", newUser};
execute(shell, args);
vol = client.getVolumeDetails(volumeName);
assertEquals(newUser, vol.getOwner());
//volume with / prefix
String volumeWithPrefix = "/" + volumeName;
String newUser2 = "new-user2";
args = new String[] {"volume", "update", url + "/" + volumeWithPrefix,
"--user", newUser2};
execute(shell, args);
vol = client.getVolumeDetails(volumeName);
assertEquals(newUser2, vol.getOwner());
// test error conditions
args = new String[] {"volume", "update", url + "/invalid-volume",
"--user", newUser};
executeWithError(shell, args, "Info Volume failed, error:VOLUME_NOT_FOUND");
err.reset();
args = new String[] {"volume", "update", url + "/invalid-volume",
"--quota", "500MB"};
executeWithError(shell, args, "Info Volume failed, error:VOLUME_NOT_FOUND");
}
/**
* Execute command, assert exeception message and returns true if error
* was thrown.
*/
private void executeWithError(Shell ozoneShell, String[] args,
String expectedError) {
if (Strings.isNullOrEmpty(expectedError)) {
execute(ozoneShell, args);
} else {
try {
execute(ozoneShell, args);
fail("Exception is expected from command execution " + Arrays
.asList(args));
} catch (Exception ex) {
if (!Strings.isNullOrEmpty(expectedError)) {
Throwable exceptionToCheck = ex;
if (exceptionToCheck.getCause() != null) {
exceptionToCheck = exceptionToCheck.getCause();
}
Assert.assertTrue(
String.format(
"Error of shell code doesn't contain the " +
"exception [%s] in [%s]",
expectedError, exceptionToCheck.getMessage()),
exceptionToCheck.getMessage().contains(expectedError));
}
}
}
}
/**
* Execute command, assert exception message and returns true if error
* was thrown and contains the specified usage string.
*/
private void executeWithError(Shell ozoneShell, String[] args,
String expectedError, String usage) {
if (Strings.isNullOrEmpty(expectedError)) {
execute(ozoneShell, args);
} else {
try {
execute(ozoneShell, args);
fail("Exception is expected from command execution " + Arrays
.asList(args));
} catch (Exception ex) {
if (!Strings.isNullOrEmpty(expectedError)) {
Throwable exceptionToCheck = ex;
if (exceptionToCheck.getCause() != null) {
exceptionToCheck = exceptionToCheck.getCause();
}
Assert.assertTrue(
String.format(
"Error of shell code doesn't contain the " +
"exception [%s] in [%s]",
expectedError, exceptionToCheck.getMessage()),
exceptionToCheck.getMessage().contains(expectedError));
Assert.assertTrue(
exceptionToCheck instanceof MissingSubcommandException);
Assert.assertTrue(
((MissingSubcommandException)exceptionToCheck)
.getUsage().contains(usage));
}
}
}
}
@Test
public void testListVolume() throws Exception {
LOG.info("Running testListVolume");
String protocol = clientProtocol.getName().toLowerCase();
String commandOutput, commandError;
List<VolumeInfo> volumes;
final int volCount = 20;
final String user1 = "test-user-a-" + protocol;
final String user2 = "test-user-b-" + protocol;
// Create 20 volumes, 10 for user1 and another 10 for user2.
for (int x = 0; x < volCount; x++) {
String volumeName;
String userName;
if (x % 2 == 0) {
// create volume [test-vol0, test-vol2, ..., test-vol18] for user1
userName = user1;
volumeName = "test-vol-" + protocol + x;
} else {
// create volume [test-vol1, test-vol3, ..., test-vol19] for user2
userName = user2;
volumeName = "test-vol-" + protocol + x;
}
VolumeArgs volumeArgs = VolumeArgs.newBuilder()
.setOwner(userName)
.setQuota("100TB")
.build();
client.createVolume(volumeName, volumeArgs);
OzoneVolume vol = client.getVolumeDetails(volumeName);
assertNotNull(vol);
}
String[] args = new String[] {"volume", "list", url + "/abcde", "--user",
user1, "--length", "100"};
executeWithError(shell, args, "Invalid URI");
err.reset();
// test -length option
args = new String[] {"volume", "list", url + "/", "--user",
user1, "--length", "100"};
execute(shell, args);
commandOutput = out.toString();
volumes = (List<VolumeInfo>) JsonUtils
.toJsonList(commandOutput, VolumeInfo.class);
assertEquals(10, volumes.size());
for (VolumeInfo volume : volumes) {
assertEquals(volume.getOwner().getName(), user1);
assertTrue(volume.getCreatedOn().contains(OzoneConsts.OZONE_TIME_ZONE));
}
out.reset();
args = new String[] {"volume", "list", url + "/", "--user",
user1, "--length", "2"};
execute(shell, args);
commandOutput = out.toString();
volumes = (List<VolumeInfo>) JsonUtils
.toJsonList(commandOutput, VolumeInfo.class);
assertEquals(2, volumes.size());
// test --prefix option
out.reset();
args =
new String[] {"volume", "list", url + "/", "--user", user1, "--length",
"100", "--prefix", "test-vol-" + protocol + "1"};
execute(shell, args);
commandOutput = out.toString();
volumes = (List<VolumeInfo>) JsonUtils
.toJsonList(commandOutput, VolumeInfo.class);
assertEquals(5, volumes.size());
// return volume names should be [test-vol10, test-vol12, ..., test-vol18]
for (int i = 0; i < volumes.size(); i++) {
assertEquals(volumes.get(i).getVolumeName(),
"test-vol-" + protocol + ((i + 5) * 2));
assertEquals(volumes.get(i).getOwner().getName(), user1);
}
// test -start option
out.reset();
args =
new String[] {"volume", "list", url + "/", "--user", user2, "--length",
"100", "--start", "test-vol-" + protocol + "15"};
execute(shell, args);
commandOutput = out.toString();
volumes = (List<VolumeInfo>) JsonUtils
.toJsonList(commandOutput, VolumeInfo.class);
assertEquals(2, volumes.size());
assertEquals(volumes.get(0).getVolumeName(), "test-vol-" + protocol + "17");
assertEquals(volumes.get(1).getVolumeName(), "test-vol-" + protocol + "19");
assertEquals(volumes.get(0).getOwner().getName(), user2);
assertEquals(volumes.get(1).getOwner().getName(), user2);
// test error conditions
err.reset();
args = new String[] {"volume", "list", url + "/", "--user",
user2, "--length", "-1"};
executeWithError(shell, args, "the length should be a positive number");
err.reset();
args = new String[] {"volume", "list", url + "/", "--user",
user2, "--length", "invalid-length"};
executeWithError(shell, args, "For input string: \"invalid-length\"");
}
@Test
public void testCreateBucket() throws Exception {
LOG.info("Running testCreateBucket");
OzoneVolume vol = creatVolume();
String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
String[] args = new String[] {"bucket", "create",
url + "/" + vol.getName() + "/" + bucketName};
execute(shell, args);
OzoneBucket bucketInfo = vol.getBucket(bucketName);
assertEquals(vol.getName(),
bucketInfo.getVolumeName());
assertEquals(bucketName, bucketInfo.getName());
// test create a bucket in a non-exist volume
args = new String[] {"bucket", "create",
url + "/invalid-volume/" + bucketName};
executeWithError(shell, args, "Info Volume failed, error:VOLUME_NOT_FOUND");
// test createBucket with invalid bucket name
args = new String[] {"bucket", "create",
url + "/" + vol.getName() + "/" + bucketName + "/invalid-name"};
executeWithError(shell, args,
"Invalid bucket name. Delimiters (/) not allowed in bucket name");
}
@Test
public void testDeleteBucket() throws Exception {
LOG.info("Running testDeleteBucket");
OzoneVolume vol = creatVolume();
String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
vol.createBucket(bucketName);
OzoneBucket bucketInfo = vol.getBucket(bucketName);
assertNotNull(bucketInfo);
String[] args = new String[] {"bucket", "delete",
url + "/" + vol.getName() + "/" + bucketName};
execute(shell, args);
// verify if bucket has been deleted in volume
try {
vol.getBucket(bucketName);
fail("Get bucket should have thrown.");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains(
"Info Bucket failed, error: BUCKET_NOT_FOUND", e);
}
// test delete bucket in a non-exist volume
args = new String[] {"bucket", "delete",
url + "/invalid-volume" + "/" + bucketName};
executeWithError(shell, args, "Info Volume failed, error:VOLUME_NOT_FOUND");
err.reset();
// test delete non-exist bucket
args = new String[] {"bucket", "delete",
url + "/" + vol.getName() + "/invalid-bucket"};
executeWithError(shell, args,
"Delete Bucket failed, error:BUCKET_NOT_FOUND");
}
@Test
public void testInfoBucket() throws Exception {
LOG.info("Running testInfoBucket");
OzoneVolume vol = creatVolume();
String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
vol.createBucket(bucketName);
String[] args = new String[] {"bucket", "info",
url + "/" + vol.getName() + "/" + bucketName};
execute(shell, args);
String output = out.toString();
assertTrue(output.contains(bucketName));
assertTrue(output.contains("createdOn")
&& output.contains(OzoneConsts.OZONE_TIME_ZONE));
// test infoBucket with invalid bucket name
args = new String[] {"bucket", "info",
url + "/" + vol.getName() + "/" + bucketName + "/invalid-name"};
executeWithError(shell, args,
"Invalid bucket name. Delimiters (/) not allowed in bucket name");
// test get info from a non-exist bucket
args = new String[] {"bucket", "info",
url + "/" + vol.getName() + "/invalid-bucket" + bucketName};
executeWithError(shell, args,
"Info Bucket failed, error: BUCKET_NOT_FOUND");
}
@Test
public void testUpdateBucket() throws Exception {
LOG.info("Running testUpdateBucket");
OzoneVolume vol = creatVolume();
String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
vol.createBucket(bucketName);
OzoneBucket bucket = vol.getBucket(bucketName);
int aclSize = bucket.getAcls().size();
String[] args = new String[] {"bucket", "update",
url + "/" + vol.getName() + "/" + bucketName, "--addAcl",
"user:frodo:rw,group:samwise:r"};
execute(shell, args);
String output = out.toString();
assertTrue(output.contains("createdOn")
&& output.contains(OzoneConsts.OZONE_TIME_ZONE));
bucket = vol.getBucket(bucketName);
assertEquals(2 + aclSize, bucket.getAcls().size());
OzoneAcl acl = bucket.getAcls().get(aclSize);
assertTrue(acl.getName().equals("frodo")
&& acl.getType() == OzoneACLType.USER
&& acl.getRights()== OzoneACLRights.READ_WRITE);
args = new String[] {"bucket", "update",
url + "/" + vol.getName() + "/" + bucketName, "--removeAcl",
"user:frodo:rw"};
execute(shell, args);
bucket = vol.getBucket(bucketName);
acl = bucket.getAcls().get(aclSize);
assertEquals(1 + aclSize, bucket.getAcls().size());
assertTrue(acl.getName().equals("samwise")
&& acl.getType() == OzoneACLType.GROUP
&& acl.getRights()== OzoneACLRights.READ);
// test update bucket for a non-exist bucket
args = new String[] {"bucket", "update",
url + "/" + vol.getName() + "/invalid-bucket", "--addAcl",
"user:frodo:rw"};
executeWithError(shell, args,
"Info Bucket failed, error: BUCKET_NOT_FOUND");
}
@Test
public void testListBucket() throws Exception {
LOG.info("Running testListBucket");
List<BucketInfo> buckets;
String commandOutput;
int bucketCount = 11;
OzoneVolume vol = creatVolume();
List<String> bucketNames = new ArrayList<>();
// create bucket from test-bucket0 to test-bucket10
for (int i = 0; i < bucketCount; i++) {
String name = "test-bucket" + i;
bucketNames.add(name);
vol.createBucket(name);
OzoneBucket bucket = vol.getBucket(name);
assertNotNull(bucket);
}
// test listBucket with invalid volume name
String[] args = new String[] {"bucket", "list",
url + "/" + vol.getName() + "/invalid-name"};
executeWithError(shell, args, "Invalid volume name. " +
"Delimiters (/) not allowed in volume name");
// test -length option
args = new String[] {"bucket", "list",
url + "/" + vol.getName(), "--length", "100"};
execute(shell, args);
commandOutput = out.toString();
buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput,
BucketInfo.class);
assertEquals(11, buckets.size());
// sort bucket names since the return buckets isn't in created order
Collections.sort(bucketNames);
// return bucket names should be [test-bucket0, test-bucket1,
// test-bucket10, test-bucket2, ,..., test-bucket9]
for (int i = 0; i < buckets.size(); i++) {
assertEquals(buckets.get(i).getBucketName(), bucketNames.get(i));
assertEquals(buckets.get(i).getVolumeName(), vol.getName());
assertTrue(buckets.get(i).getCreatedOn()
.contains(OzoneConsts.OZONE_TIME_ZONE));
}
out.reset();
args = new String[] {"bucket", "list", url + "/" + vol.getName(),
"--length", "3"};
execute(shell, args);
commandOutput = out.toString();
buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput,
BucketInfo.class);
assertEquals(3, buckets.size());
// return bucket names should be [test-bucket0,
// test-bucket1, test-bucket10]
assertEquals(buckets.get(0).getBucketName(), "test-bucket0");
assertEquals(buckets.get(1).getBucketName(), "test-bucket1");
assertEquals(buckets.get(2).getBucketName(), "test-bucket10");
// test --prefix option
out.reset();
args = new String[] {"bucket", "list", url + "/" + vol.getName(),
"--length", "100", "--prefix", "test-bucket1"};
execute(shell, args);
commandOutput = out.toString();
buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput,
BucketInfo.class);
assertEquals(2, buckets.size());
// return bucket names should be [test-bucket1, test-bucket10]
assertEquals(buckets.get(0).getBucketName(), "test-bucket1");
assertEquals(buckets.get(1).getBucketName(), "test-bucket10");
// test -start option
out.reset();
args = new String[] {"bucket", "list", url + "/" + vol.getName(),
"--length", "100", "--start", "test-bucket7"};
execute(shell, args);
commandOutput = out.toString();
buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput,
BucketInfo.class);
assertEquals(2, buckets.size());
assertEquals(buckets.get(0).getBucketName(), "test-bucket8");
assertEquals(buckets.get(1).getBucketName(), "test-bucket9");
// test error conditions
err.reset();
args = new String[] {"bucket", "list", url + "/" + vol.getName(),
"--length", "-1"};
executeWithError(shell, args, "the length should be a positive number");
}
@Test
public void testPutKey() throws Exception {
LOG.info("Running testPutKey");
OzoneBucket bucket = creatBucket();
String volumeName = bucket.getVolumeName();
String bucketName = bucket.getName();
String keyName = "key" + RandomStringUtils.randomNumeric(5);
String[] args = new String[] {"key", "put",
url + "/" + volumeName + "/" + bucketName + "/" + keyName,
createTmpFile()};
execute(shell, args);
OzoneKey keyInfo = bucket.getKey(keyName);
assertEquals(keyName, keyInfo.getName());
// test put key in a non-exist bucket
args = new String[] {"key", "put",
url + "/" + volumeName + "/invalid-bucket/" + keyName,
createTmpFile()};
executeWithError(shell, args,
"Info Bucket failed, error: BUCKET_NOT_FOUND");
}
@Test
public void testGetKey() throws Exception {
LOG.info("Running testGetKey");
String keyName = "key" + RandomStringUtils.randomNumeric(5);
OzoneBucket bucket = creatBucket();
String volumeName = bucket.getVolumeName();
String bucketName = bucket.getName();
String dataStr = "test-data";
OzoneOutputStream keyOutputStream =
bucket.createKey(keyName, dataStr.length());
keyOutputStream.write(dataStr.getBytes());
keyOutputStream.close();
String tmpPath = baseDir.getAbsolutePath() + "/testfile-"
+ UUID.randomUUID().toString();
String[] args = new String[] {"key", "get",
url + "/" + volumeName + "/" + bucketName + "/" + keyName,
tmpPath};
execute(shell, args);
byte[] dataBytes = new byte[dataStr.length()];
try (FileInputStream randFile = new FileInputStream(new File(tmpPath))) {
randFile.read(dataBytes);
}
assertEquals(dataStr, DFSUtil.bytes2String(dataBytes));
tmpPath = baseDir.getAbsolutePath() + File.separatorChar + keyName;
args = new String[] {"key", "get",
url + "/" + volumeName + "/" + bucketName + "/" + keyName,
baseDir.getAbsolutePath()};
execute(shell, args);
dataBytes = new byte[dataStr.length()];
try (FileInputStream randFile = new FileInputStream(new File(tmpPath))) {
randFile.read(dataBytes);
}
assertEquals(dataStr, DFSUtil.bytes2String(dataBytes));
}
@Test
public void testDeleteKey() throws Exception {
LOG.info("Running testDeleteKey");
String keyName = "key" + RandomStringUtils.randomNumeric(5);
OzoneBucket bucket = creatBucket();
String volumeName = bucket.getVolumeName();
String bucketName = bucket.getName();
String dataStr = "test-data";
OzoneOutputStream keyOutputStream =
bucket.createKey(keyName, dataStr.length());
keyOutputStream.write(dataStr.getBytes());
keyOutputStream.close();
OzoneKey keyInfo = bucket.getKey(keyName);
assertEquals(keyName, keyInfo.getName());
String[] args = new String[] {"key", "delete",
url + "/" + volumeName + "/" + bucketName + "/" + keyName};
execute(shell, args);
// verify if key has been deleted in the bucket
try {
bucket.getKey(keyName);
fail("Get key should have thrown.");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains(
"Lookup key failed, error:KEY_NOT_FOUND", e);
}
// test delete key in a non-exist bucket
args = new String[] {"key", "delete",
url + "/" + volumeName + "/invalid-bucket/" + keyName};
executeWithError(shell, args,
"Info Bucket failed, error: BUCKET_NOT_FOUND");
err.reset();
// test delete a non-exist key in bucket
args = new String[] {"key", "delete",
url + "/" + volumeName + "/" + bucketName + "/invalid-key"};
executeWithError(shell, args, "Delete key failed, error:KEY_NOT_FOUND");
}
@Test
public void testInfoKeyDetails() throws Exception {
LOG.info("Running testInfoKey");
String keyName = "key" + RandomStringUtils.randomNumeric(5);
OzoneBucket bucket = creatBucket();
String volumeName = bucket.getVolumeName();
String bucketName = bucket.getName();
String dataStr = "test-data";
OzoneOutputStream keyOutputStream =
bucket.createKey(keyName, dataStr.length());
keyOutputStream.write(dataStr.getBytes());
keyOutputStream.close();
String[] args = new String[] {"key", "info",
url + "/" + volumeName + "/" + bucketName + "/" + keyName};
// verify the response output
execute(shell, args);
String output = out.toString();
assertTrue(output.contains(keyName));
assertTrue(
output.contains("createdOn") && output.contains("modifiedOn") && output
.contains(OzoneConsts.OZONE_TIME_ZONE));
assertTrue(
output.contains("containerID") && output.contains("localID") && output
.contains("length") && output.contains("offset"));
// reset stream
out.reset();
err.reset();
// get the info of a non-exist key
args = new String[] {"key", "info",
url + "/" + volumeName + "/" + bucketName + "/invalid-key"};
// verify the response output
// get the non-exist key info should be failed
executeWithError(shell, args, "Lookup key failed, error:KEY_NOT_FOUND");
}
@Test
public void testInfoDirKey() throws Exception {
LOG.info("Running testInfoKey for Dir Key");
String dirKeyName = "test/";
String keyNameOnly = "test";
OzoneBucket bucket = creatBucket();
String volumeName = bucket.getVolumeName();
String bucketName = bucket.getName();
String dataStr = "test-data";
OzoneOutputStream keyOutputStream =
bucket.createKey(dirKeyName, dataStr.length());
keyOutputStream.write(dataStr.getBytes());
keyOutputStream.close();
String[] args = new String[] {"key", "info",
url + "/" + volumeName + "/" + bucketName + "/" + dirKeyName};
// verify the response output
execute(shell, args);
String output = out.toString();
assertTrue(output.contains(dirKeyName));
assertTrue(output.contains("createdOn") &&
output.contains("modifiedOn") &&
output.contains(OzoneConsts.OZONE_TIME_ZONE));
args = new String[] {"key", "info",
url + "/" + volumeName + "/" + bucketName + "/" + keyNameOnly};
executeWithError(shell, args, "Lookup key failed, error:KEY_NOT_FOUND");
out.reset();
err.reset();
}
@Test
public void testListKey() throws Exception {
LOG.info("Running testListKey");
String commandOutput;
List<KeyInfo> keys;
int keyCount = 11;
OzoneBucket bucket = creatBucket();
String volumeName = bucket.getVolumeName();
String bucketName = bucket.getName();
String keyName;
List<String> keyNames = new ArrayList<>();
for (int i = 0; i < keyCount; i++) {
keyName = "test-key" + i;
keyNames.add(keyName);
String dataStr = "test-data";
OzoneOutputStream keyOutputStream =
bucket.createKey(keyName, dataStr.length());
keyOutputStream.write(dataStr.getBytes());
keyOutputStream.close();
}
// test listKey with invalid bucket name
String[] args = new String[] {"key", "list",
url + "/" + volumeName + "/" + bucketName + "/invalid-name"};
executeWithError(shell, args, "Invalid bucket name. " +
"Delimiters (/) not allowed in bucket name");
// test -length option
args = new String[] {"key", "list",
url + "/" + volumeName + "/" + bucketName, "--length", "100"};
execute(shell, args);
commandOutput = out.toString();
keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput,
KeyInfo.class);
assertEquals(11, keys.size());
// sort key names since the return keys isn't in created order
Collections.sort(keyNames);
// return key names should be [test-key0, test-key1,
// test-key10, test-key2, ,..., test-key9]
for (int i = 0; i < keys.size(); i++) {
assertEquals(keys.get(i).getKeyName(), keyNames.get(i));
// verify the creation/modification time of key
assertTrue(keys.get(i).getCreatedOn()
.contains(OzoneConsts.OZONE_TIME_ZONE));
assertTrue(keys.get(i).getModifiedOn()
.contains(OzoneConsts.OZONE_TIME_ZONE));
}
out.reset();
args =
new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName,
"--length", "3"};
execute(shell, args);
commandOutput = out.toString();
keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput,
KeyInfo.class);
assertEquals(3, keys.size());
// return key names should be [test-key0, test-key1, test-key10]
assertEquals(keys.get(0).getKeyName(), "test-key0");
assertEquals(keys.get(1).getKeyName(), "test-key1");
assertEquals(keys.get(2).getKeyName(), "test-key10");
// test --prefix option
out.reset();
args =
new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName,
"--length", "100", "--prefix", "test-key1"};
execute(shell, args);
commandOutput = out.toString();
keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput,
KeyInfo.class);
assertEquals(2, keys.size());
// return key names should be [test-key1, test-key10]
assertEquals(keys.get(0).getKeyName(), "test-key1");
assertEquals(keys.get(1).getKeyName(), "test-key10");
// test -start option
out.reset();
args =
new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName,
"--length", "100", "--start", "test-key7"};
execute(shell, args);
commandOutput = out.toString();
keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput,
KeyInfo.class);
assertEquals(keys.get(0).getKeyName(), "test-key8");
assertEquals(keys.get(1).getKeyName(), "test-key9");
// test error conditions
err.reset();
args =
new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName,
"--length", "-1"};
executeWithError(shell, args, "the length should be a positive number");
}
@Test
public void testS3BucketMapping() throws IOException {
List<ServiceInfo> services =
cluster.getOzoneManager().getServiceList();
String omHostName = services.stream().filter(
a -> a.getNodeType().equals(HddsProtos.NodeType.OM))
.collect(Collectors.toList()).get(0).getHostname();
String omPort = cluster.getOzoneManager().getRpcPort();
String setOmAddress =
"--set=" + OZONE_OM_ADDRESS_KEY + "=" + omHostName + ":" + omPort;
String s3Bucket = "bucket1";
String commandOutput;
createS3Bucket("ozone", s3Bucket);
//WHEN
String[] args =
new String[] {setOmAddress, "bucket",
"path", s3Bucket};
execute(shell, args);
//THEN
commandOutput = out.toString();
String volumeName = client.getOzoneVolumeName(s3Bucket);
assertTrue(commandOutput.contains("Volume name for S3Bucket is : " +
volumeName));
assertTrue(commandOutput.contains(OzoneConsts.OZONE_URI_SCHEME + "://" +
s3Bucket + "." + volumeName));
out.reset();
//Trying to get map for an unknown bucket
args = new String[] {setOmAddress, "bucket", "path",
"unknownbucket"};
executeWithError(shell, args, "S3_BUCKET_NOT_FOUND");
// No bucket name
args = new String[] {setOmAddress, "bucket", "path"};
executeWithError(shell, args, "Missing required parameter");
// Invalid bucket name
args = new String[] {setOmAddress, "bucket", "path", "/asd/multipleslash"};
executeWithError(shell, args, "S3_BUCKET_NOT_FOUND");
}
private void createS3Bucket(String userName, String s3Bucket) {
try {
client.createS3Bucket("ozone", s3Bucket);
} catch (IOException ex) {
GenericTestUtils.assertExceptionContains("S3_BUCKET_ALREADY_EXISTS", ex);
}
}
private OzoneVolume creatVolume() throws OzoneException, IOException {
String volumeName = RandomStringUtils.randomNumeric(5) + "volume";
VolumeArgs volumeArgs = VolumeArgs.newBuilder()
.setOwner("bilbo")
.setQuota("100TB")
.build();
try {
client.createVolume(volumeName, volumeArgs);
} catch (Exception ex) {
Assert.assertEquals("PartialGroupNameException",
ex.getCause().getClass().getSimpleName());
}
OzoneVolume volume = client.getVolumeDetails(volumeName);
return volume;
}
private OzoneBucket creatBucket() throws OzoneException, IOException {
OzoneVolume vol = creatVolume();
String bucketName = RandomStringUtils.randomNumeric(5) + "bucket";
vol.createBucket(bucketName);
OzoneBucket bucketInfo = vol.getBucket(bucketName);
return bucketInfo;
}
/**
* Create a temporary file used for putting key.
* @return the created file's path string
* @throws Exception
*/
private String createTmpFile() throws Exception {
// write a new file that used for putting key
File tmpFile = new File(baseDir,
"/testfile-" + UUID.randomUUID().toString());
FileOutputStream randFile = new FileOutputStream(tmpFile);
Random r = new Random();
for (int x = 0; x < 10; x++) {
char c = (char) (r.nextInt(26) + 'a');
randFile.write(c);
}
randFile.close();
return tmpFile.getAbsolutePath();
}
}
| xiao-chen/hadoop | hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java | Java | apache-2.0 | 43,840 |
/*
* Copyright 2015 Adaptris Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.adaptris.core.services.jdbc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import org.junit.Test;
import com.adaptris.core.CoreException;
import com.adaptris.core.jdbc.JdbcConnection;
import com.adaptris.core.util.JdbcUtil;
import com.adaptris.core.util.LifecycleHelper;
import com.adaptris.util.KeyValuePair;
import com.adaptris.util.KeyValuePairSet;
public abstract class JdbcMapInsertCase {
protected static final String CONTENT =
"firstname=alice\n" +
"lastname=smith\n" +
"dob=2017-01-01";
protected static final String INVALID_COLUMN =
"fi$rstname=alice\n" + "la$stname=smith\n" + "dob=2017-01-01";
protected static final String JDBC_DRIVER = "org.apache.derby.jdbc.EmbeddedDriver";
protected static final String JDBC_URL = "jdbc:derby:memory:JDCB_OBJ_DB;create=true";
protected static final String TABLE_NAME = "people";
protected static final String DROP_STMT = String.format("DROP TABLE %s", TABLE_NAME);
protected static final String CREATE_STMT = String.format("CREATE TABLE %s (firstname VARCHAR(128) NOT NULL, "
+ "lastname VARCHAR(128) NOT NULL, "
+ "dob DATE)",
TABLE_NAME);
protected static final String CREATE_QUOTED = String.format(
"CREATE TABLE %s (\"firstname\" VARCHAR(128) NOT NULL, \"lastname\" VARCHAR(128) NOT NULL, \"dob\" DATE)", TABLE_NAME);
@Test
public void testService_Init() throws Exception {
JdbcMapInsert service = createService();
try {
LifecycleHelper.init(service);
fail();
} catch (CoreException expected) {
}
service.setTable("hello");
LifecycleHelper.init(service);
}
protected abstract JdbcMapInsert createService();
protected static void doAssert(int expectedCount) throws Exception {
Connection c = null;
PreparedStatement p = null;
try {
c = createConnection();
p = c.prepareStatement(String.format("SELECT * FROM %s", TABLE_NAME));
ResultSet rs = p.executeQuery();
int count = 0;
while (rs.next()) {
count++;
assertEquals("smith", rs.getString("lastname"));
}
assertEquals(expectedCount, count);
JdbcUtil.closeQuietly(rs);
} finally {
JdbcUtil.closeQuietly(p);
JdbcUtil.closeQuietly(c);
}
}
protected static Connection createConnection() throws Exception {
Connection c = null;
Class.forName(JDBC_DRIVER);
c = DriverManager.getConnection(JDBC_URL);
c.setAutoCommit(true);
return c;
}
protected static void createDatabase() throws Exception {
createDatabase(CREATE_STMT);
}
protected static void createDatabase(String createStmt) throws Exception {
Connection c = null;
Statement s = null;
try {
c = createConnection();
s = c.createStatement();
executeQuietly(s, DROP_STMT);
s.execute(createStmt);
}
finally {
JdbcUtil.closeQuietly(s);
JdbcUtil.closeQuietly(c);
}
}
protected static void executeQuietly(Statement s, String sql) {
try {
s.execute(sql);
} catch (Exception e) {
;
}
}
protected static <T extends JdbcMapInsert> T configureForTests(T t) {
JdbcMapInsert service = t;
JdbcConnection connection = new JdbcConnection();
connection.setConnectUrl(JDBC_URL);
connection.setDriverImp(JDBC_DRIVER);
service.setConnection(connection);
KeyValuePairSet mappings = new KeyValuePairSet();
mappings.add(new KeyValuePair("dob", JdbcMapInsert.BasicType.Date.name()));
service.withTable(TABLE_NAME).withMappings(mappings);
return t;
}
}
| adaptris/interlok | interlok-core/src/test/java/com/adaptris/core/services/jdbc/JdbcMapInsertCase.java | Java | apache-2.0 | 4,375 |
/*
* Project Scelight
*
* Copyright (c) 2013 Andras Belicza <iczaaa@gmail.com>
*
* This software is the property of Andras Belicza.
* Copying, modifying, distributing, refactoring without the author's permission
* is prohibited and protected by Law.
*/
package hu.scelight.gui.page.replist.column.impl;
import hu.scelight.gui.icon.Icons;
import hu.scelight.gui.page.replist.column.BaseColumn;
import hu.scelight.sc2.rep.repproc.RepProcessor;
import java.util.Date;
/**
* Replay date column.
*
* @author Andras Belicza
*/
public class DateColumn extends BaseColumn< Date > {
/**
* Creates a new {@link DateColumn}.
*/
public DateColumn() {
super( "Date", Icons.F_CALENDAR_BLUE, "Replay date", Date.class, true );
}
@Override
public Date getData( final RepProcessor repProc ) {
return repProc.replay.details.getTime();
}
}
| icza/scelight | src-app/hu/scelight/gui/page/replist/column/impl/DateColumn.java | Java | apache-2.0 | 896 |
/*
* #%L
* FlatPack serialization code
* %%
* Copyright (C) 2012 Perka Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.getperka.flatpack.codex;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import org.junit.Test;
import com.getperka.flatpack.FlatPackTest;
import com.getperka.flatpack.HasUuid;
import com.getperka.flatpack.codexes.ArrayCodex;
import com.getperka.flatpack.codexes.ListCodex;
import com.getperka.flatpack.codexes.SetCodex;
import com.getperka.flatpack.domain.Employee;
import com.getperka.flatpack.domain.Person;
import com.getperka.flatpack.util.FlatPackCollections;
import com.google.inject.TypeLiteral;
/**
* Tests serializing collections of things.
*/
public class CollectionCodexTest extends FlatPackTest {
@Inject
private TypeLiteral<ArrayCodex<Person>> arrayPerson;
@Inject
private TypeLiteral<ArrayCodex<String>> arrayString;
@Inject
private TypeLiteral<ListCodex<Person>> listPerson;
@Inject
private TypeLiteral<ListCodex<String>> listString;
@Inject
private TypeLiteral<SetCodex<String>> setString;
@Inject
private Employee employee;
@Test
public void testArray() {
String[] in = { "Hello", " ", "", null, "World!" };
String[] out = testCodex(arrayString, in);
assertArrayEquals(in, out);
Set<HasUuid> scanned = FlatPackCollections.setForIteration();
Employee[] in2 = { employee, null, employee };
Person[] out2 = testCodex(arrayPerson, in2, scanned);
assertEquals(Collections.singleton(employee), scanned);
/*
* Because we're testing without a full flatpack structure, all we can expect is that a HasUuid
* is created with the same UUID. The concrete type would normally be specified in the data
* section, however it is missing, so we expect the configured type of the codex instead.
*/
Person p = out2[0];
assertNotNull(p);
assertEquals(Person.class, p.getClass());
assertEquals(employee.getUuid(), p.getUuid());
}
@Test
public void testList() {
List<String> in = Arrays.asList("Hello", " ", "", null, "World!");
Collection<String> out = testCodex(listString, in);
assertEquals(in, out);
Set<HasUuid> scanned = FlatPackCollections.setForIteration();
List<Person> in2 = Arrays.<Person> asList(employee, null, employee);
Collection<Person> out2 = testCodex(listPerson, in2, scanned);
assertEquals(Collections.singleton(employee), scanned);
/*
* Because we're testing without a full flatpack structure, all we can expect is that a HasUuid
* is created with the same UUID. The concrete type would normally be specified in the data
* section, however it is missing, so we expect the configured type of the codex instead.
*/
Person p = ((List<Person>) out2).get(0);
assertNotNull(p);
assertEquals(Person.class, p.getClass());
assertEquals(employee.getUuid(), p.getUuid());
}
@Test
public void testNull() {
assertNull(testCodex(arrayString, null));
assertNull(testCodex(listString, null));
assertNull(testCodex(setString, null));
}
@Test
public void testSet() {
Set<String> in = new LinkedHashSet<String>(Arrays.asList("Hello", " ", "", null, "World!"));
Set<String> out = testCodex(setString, in);
assertEquals(in, out);
}
}
| perka/flatpack-java | core/src/test/java/com/getperka/flatpack/codex/CollectionCodexTest.java | Java | apache-2.0 | 4,155 |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.location.suplclient.asn1.supl2.lpp;
// Copyright 2008 Google Inc. All Rights Reserved.
/*
* This class is AUTOMATICALLY GENERATED. Do NOT EDIT.
*/
//
//
import com.google.location.suplclient.asn1.base.Asn1Sequence;
import com.google.location.suplclient.asn1.base.Asn1Tag;
import com.google.location.suplclient.asn1.base.BitStream;
import com.google.location.suplclient.asn1.base.BitStreamReader;
import com.google.location.suplclient.asn1.base.SequenceComponent;
import com.google.common.collect.ImmutableList;
import java.util.Collection;
import javax.annotation.Nullable;
/**
*
*/
public class GNSS_RealTimeIntegrityReq extends Asn1Sequence {
//
private static final Asn1Tag TAG_GNSS_RealTimeIntegrityReq
= Asn1Tag.fromClassAndNumber(-1, -1);
public GNSS_RealTimeIntegrityReq() {
super();
}
@Override
@Nullable
protected Asn1Tag getTag() {
return TAG_GNSS_RealTimeIntegrityReq;
}
@Override
protected boolean isTagImplicit() {
return true;
}
public static Collection<Asn1Tag> getPossibleFirstTags() {
if (TAG_GNSS_RealTimeIntegrityReq != null) {
return ImmutableList.of(TAG_GNSS_RealTimeIntegrityReq);
} else {
return Asn1Sequence.getPossibleFirstTags();
}
}
/**
* Creates a new GNSS_RealTimeIntegrityReq from encoded stream.
*/
public static GNSS_RealTimeIntegrityReq fromPerUnaligned(byte[] encodedBytes) {
GNSS_RealTimeIntegrityReq result = new GNSS_RealTimeIntegrityReq();
result.decodePerUnaligned(new BitStreamReader(encodedBytes));
return result;
}
/**
* Creates a new GNSS_RealTimeIntegrityReq from encoded stream.
*/
public static GNSS_RealTimeIntegrityReq fromPerAligned(byte[] encodedBytes) {
GNSS_RealTimeIntegrityReq result = new GNSS_RealTimeIntegrityReq();
result.decodePerAligned(new BitStreamReader(encodedBytes));
return result;
}
@Override protected boolean isExtensible() {
return true;
}
@Override public boolean containsExtensionValues() {
for (SequenceComponent extensionComponent : getExtensionComponents()) {
if (extensionComponent.isExplicitlySet()) return true;
}
return false;
}
@Override public Iterable<? extends SequenceComponent> getComponents() {
ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder();
return builder.build();
}
@Override public Iterable<? extends SequenceComponent>
getExtensionComponents() {
ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder();
return builder.build();
}
@Override public Iterable<BitStream> encodePerUnaligned() {
return super.encodePerUnaligned();
}
@Override public Iterable<BitStream> encodePerAligned() {
return super.encodePerAligned();
}
@Override public void decodePerUnaligned(BitStreamReader reader) {
super.decodePerUnaligned(reader);
}
@Override public void decodePerAligned(BitStreamReader reader) {
super.decodePerAligned(reader);
}
@Override public String toString() {
return toIndentedString("");
}
public String toIndentedString(String indent) {
StringBuilder builder = new StringBuilder();
builder.append("GNSS_RealTimeIntegrityReq = {\n");
final String internalIndent = indent + " ";
for (SequenceComponent component : getComponents()) {
if (component.isExplicitlySet()) {
builder.append(internalIndent)
.append(component.toIndentedString(internalIndent));
}
}
if (isExtensible()) {
builder.append(internalIndent).append("...\n");
for (SequenceComponent component : getExtensionComponents()) {
if (component.isExplicitlySet()) {
builder.append(internalIndent)
.append(component.toIndentedString(internalIndent));
}
}
}
builder.append(indent).append("};\n");
return builder.toString();
}
}
| google/supl-client | src/main/java/com/google/location/suplclient/asn1/supl2/lpp/GNSS_RealTimeIntegrityReq.java | Java | apache-2.0 | 4,587 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package modelo.formularios;
import controlador.dbConnection;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import javax.swing.JOptionPane;
/**
*
* @author Eisner López Acevedo <eisner.lopez at gmail.com>
*/
public class Interfaz_Factura {
private final dbConnection myLink = new dbConnection();
private final Connection conexion = dbConnection.getConnection();
private String querySQL = "";
ResultSet rs = null;
PreparedStatement pst = null;
public boolean mostrarFactura(String Buscar) {
String[] registro = new String[8];
querySQL
= "SELECT `factura_cabina`.`factura_id`, "
+ "`factura_cabina`.`cant_dia`, "
+ "`factura_cabina`.`fecha`, "
+ "`factura_cabina`.`impuesto_cabina`, "
+ "`factura_cabina`.`precio_total_cabina`, "
+ "`factura_cabina`.`cabina_cabina_id`, "
+ "`factura_cabina`.`colaborador_empleado_id`, "
+ "`factura_cabina`.`numero_factura`"
+ "FROM `pct3`.`factura_cabina`"
+ "WHERE "
+ "`factura_cabina`.`numero_factura` = '" + Buscar + "'"
+ "order by `factura_cabina`.`numero_factura`;";
try {
Statement st = conexion.createStatement();
rs = st.executeQuery(querySQL);
while (rs.next()) {
registro[0] = rs.getString(1);
registro[1] = rs.getString(2);
registro[2] = rs.getString(3);
registro[3] = rs.getString(4);
registro[4] = rs.getString(5);
registro[5] = rs.getString(6);
registro[6] = rs.getString(7);
registro[7] = rs.getString(8);
}
} catch (SQLException sqle) {
JOptionPane.showConfirmDialog(null, sqle);
}
return false;
}
}
| eisnerh/PCT_315 | TropiCabinas/src/modelo/formularios/Interfaz_Factura.java | Java | apache-2.0 | 2,200 |
// ***************************************************************************
// * Copyright 2014 Joseph Molnar
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
// * you may not use this file except in compliance with the License.
// * You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing, software
// * distributed under the License is distributed on an "AS IS" BASIS,
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// * See the License for the specific language governing permissions and
// * limitations under the License.
// ***************************************************************************
package com.talvish.tales.samples.userclient;
import java.time.LocalDate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.talvish.tales.businessobjects.ObjectId;
import com.talvish.tales.client.http.ResourceClient;
import com.talvish.tales.client.http.ResourceConfiguration;
import com.talvish.tales.client.http.ResourceMethod;
import com.talvish.tales.client.http.ResourceResult;
import com.talvish.tales.communication.HttpVerb;
import com.talvish.tales.parts.ArgumentParser;
import com.talvish.tales.system.configuration.ConfigurationManager;
import com.talvish.tales.system.configuration.MapSource;
import com.talvish.tales.system.configuration.PropertyFileSource;
/**
* The client for talking to the UserService.
* @author jmolnar
*
*/
public class UserClient extends ResourceClient {
private static final Logger logger = LoggerFactory.getLogger( UserClient.class );
/**
* This main is really just to demonstrate calling and would not exist in an actual client.
*/
public static void main( String[ ] theArgs ) throws Exception {
// get the configuration system up and running
ConfigurationManager configurationManager = new ConfigurationManager( );
// we prepare two sources for configurations
// first the command line source
configurationManager.addSource( new MapSource( "command-line", ArgumentParser.parse( theArgs ) ) );
// second the file source, if the command-line indicates a file is to be used
String filename = configurationManager.getStringValue( "settings.file", null ); // we will store config in a file ideally
if( !Strings.isNullOrEmpty( filename ) ) {
configurationManager.addSource( new PropertyFileSource( filename ) );
}
UserClient client = new UserClient( configurationManager.getValues( "user_service", ResourceConfiguration.class ), "sample_user_client/1.0" );
// client.setHeaderOverride( "Authorization", "random" ); //<= for testing, perhaps want to override this value, assuming server allows overrides
// next we see what mode we are in, setup or not setup
String operation = configurationManager.getStringValue( "operation", "update_user" );
ResourceResult<User> result;
switch( operation ) {
case "update_user":
result = client.getUser( new ObjectId( 1, 1, 100 ) );
if( result.getResult() != null ) {
logger.debug( "Found user: '{}'/'{}'", result.getResult().getId(), result.getResult().getFirstName( ) );
result.getResult().setFirstName( "Bilbo" );
result.getResult().getAliases( ).add( "billy" );
result.getResult().getSettings().put( "favourite_category", "games" );
result = client.updateUser( result.getResult() );
logger.debug( "Updated user: '{}'", result.getResult().getFirstName( ) );
} else {
logger.debug( "Did not find user." );
}
break;
case "create_user":
//for( int i = 0; i < 1; i += 1 ) {
User user = new User( );
user.setFirstName( "Jimmy" );
user.setMiddleName( "Scott" );
user.setLastName( "McWhalter" );
user.setBirthdate( LocalDate.of( 1992, 1, 31 ) );
user.getAliases().add( "alias1" );
result = client.createUser( user );
if( result.getResult() != null ) {
logger.debug( "Created user: '{}'/'{}'", result.getResult().getId(), result.getResult().getFirstName( ) );
} else {
logger.debug( "Did not create user." );
}
//}
break;
default:
break;
}
// TODO: this doesn't exit at the end of the main here, need to understand why
// (which is why I added the System.exit(0)
// TODO: one time when this ran it throw some form of SSL EOF related error that
// I need to track down (this happened on the server too)
System.console().writer().print( "Please <Enter> to quit ..." );
System.console().writer().flush();
System.console().readLine();
System.exit( 0 );
}
private String authToken = "Sample key=\"42349840984\"";
/**
* The constructor used to create the client.
* @param theConfiguration the configuration needed to talk to the service
* @param theUserAgent the user agent to use while talking to the service
*/
public UserClient( ResourceConfiguration theConfiguration, String theUserAgent ) {
super( theConfiguration, "/user", "20140124", theUserAgent );
// we now define the methods that we are going to expose for calling
this.methods = new ResourceMethod[ 3 ];
this.methods[ 0 ] = this.defineMethod( "get_user", User.class, HttpVerb.GET, "users/{id}" )
.definePathParameter("id", ObjectId.class )
.defineHeaderParameter( "Authorization", String.class );
this.methods[ 1 ] = this.defineMethod( "update_user", User.class, HttpVerb.POST, "users/{id}/update" )
.definePathParameter( "id", ObjectId.class )
.defineBodyParameter( "user", User.class )
.defineHeaderParameter( "Authorization", String.class );
this.methods[ 2 ] = this.defineMethod( "create_user", User.class, HttpVerb.POST, "users/create" )
.defineBodyParameter( "user", User.class )
.defineHeaderParameter( "Authorization", String.class );
}
/**
* Requests a particular user.
* @param theUserId the id of the user being requested
* @return the requested user, if found, null otherwise
* @throws InterruptedException thrown if the calling thread is interrupted
*/
public ResourceResult<User> getUser( ObjectId theUserId ) throws InterruptedException {
Preconditions.checkNotNull( theUserId, "need a user id to retrieve a user" );
return this.createRequest( this.methods[ 0 ], theUserId )
.setHeaderParameter( "Authorization", this.authToken )
.call();
}
/**
* A call to save the values of a user on the server.
* @param theUser the user to save
* @return the server returned version of the saved user
* @throws InterruptedException thrown if the calling thread is interrupted
*/
public ResourceResult<User> updateUser( User theUser ) throws InterruptedException {
Preconditions.checkNotNull( theUser, "need a user to be able to update" );
return this.createRequest( this.methods[ 1 ], theUser.getId() )
.setBodyParameter( "user", theUser )
.setHeaderParameter( "Authorization", this.authToken )
.call();
}
/**
* A call to create a new user
* @param theFirstName the first name of the user
* @param theLastName the last name of the user
* @return the freshly created user
* @throws InterruptedException thrown if the calling thread is interrupted
*/
public ResourceResult<User> createUser( User theUser) throws InterruptedException {
Preconditions.checkNotNull( theUser, "need a user" );
Preconditions.checkArgument( theUser.getId( ) == null, "user's id must be null" );
Preconditions.checkArgument( !Strings.isNullOrEmpty( theUser.getFirstName() ), "to create a user you need a first name" );
return this.createRequest( this.methods[ 2 ] )
.setBodyParameter( "user", theUser )
.setHeaderParameter( "Authorization", this.authToken )
.call();
}
} | Talvish/Tales-Samples | user_client/src/main/java/com/talvish/tales/samples/userclient/UserClient.java | Java | apache-2.0 | 7,943 |
package com.ihtsdo.snomed.model.xml;
import java.sql.Date;
import javax.xml.bind.annotation.XmlRootElement;
import com.google.common.base.Objects;
import com.google.common.primitives.Longs;
import com.ihtsdo.snomed.dto.refset.RefsetDto;
import com.ihtsdo.snomed.model.refset.Refset;
@XmlRootElement(name="refset")
public class RefsetDtoShort {
private long id;
private XmlRefsetConcept concept;
private String publicId;
private String title;
private String description;
private Date created;
private Date lastModified;
private int memberSize;
private String snomedExtension;
private String snomedReleaseDate;
private boolean pendingChanges;
public RefsetDtoShort(Refset r){
setId(r.getId());
setConcept(new XmlRefsetConcept(r.getRefsetConcept()));
setPublicId(r.getPublicId());
setTitle(r.getTitle());
setDescription(r.getDescription());
setCreated(r.getCreationTime());
setLastModified(r.getModificationTime());
setPendingChanges(r.isPendingChanges());
setMemberSize(r.getMemberSize());
setSnomedExtension(r.getOntologyVersion().getFlavour().getPublicId());
setSnomedReleaseDate(RefsetDto.dateFormat.format(r.getOntologyVersion().getTaggedOn()));
}
public RefsetDtoShort(){}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("id", getId())
.add("concept", getConcept())
.add("publicId", getPublicId())
.add("title", getTitle())
.add("description", getDescription())
.add("created", getCreated())
.add("lastModified", getLastModified())
.add("pendingChanges", isPendingChanges())
.add("memberSize", getMemberSize())
.add("snomedExtension", getSnomedExtension())
.add("snomedReleaseDate", getSnomedReleaseDate())
.toString();
}
@Override
public int hashCode(){
return Longs.hashCode(getId());
}
@Override
public boolean equals(Object o){
if (o instanceof RefsetDtoShort){
RefsetDtoShort r = (RefsetDtoShort) o;
if (r.getId() == this.getId()){
return true;
}
}
return false;
}
public boolean isPendingChanges() {
return pendingChanges;
}
public void setPendingChanges(boolean pendingChanges) {
this.pendingChanges = pendingChanges;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public XmlRefsetConcept getConcept() {
return concept;
}
public void setConcept(XmlRefsetConcept concept) {
this.concept = concept;
}
public String getPublicId() {
return publicId;
}
public void setPublicId(String publicId) {
this.publicId = publicId;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
public Date getLastModified() {
return lastModified;
}
public void setLastModified(Date lastModified) {
this.lastModified = lastModified;
}
public int getMemberSize() {
return memberSize;
}
public void setMemberSize(int memberSize) {
this.memberSize = memberSize;
}
public String getSnomedExtension() {
return snomedExtension;
}
public void setSnomedExtension(String snomedExtension) {
this.snomedExtension = snomedExtension;
}
public String getSnomedReleaseDate() {
return snomedReleaseDate;
}
public void setSnomedReleaseDate(String snomedReleaseDate) {
this.snomedReleaseDate = snomedReleaseDate;
}
public static RefsetDtoShort parse(Refset r){
return getBuilder(new XmlRefsetConcept(r.getRefsetConcept()),
r.getPublicId(),
r.getTitle(),
r.getDescription(),
r.getCreationTime(),
r.getModificationTime(),
r.isPendingChanges(),
r.getMemberSize(),
r.getOntologyVersion().getFlavour().getPublicId(),
r.getOntologyVersion().getTaggedOn()).build();
}
public static Builder getBuilder(XmlRefsetConcept concept, String publicId, String title,
String description, Date created, Date lastModified, boolean pendingChanges, int memberSize,
String snomedExtension, Date snomedReleaseDate) {
return new Builder(concept, publicId, title, description, created, lastModified, pendingChanges,
memberSize, snomedExtension, snomedReleaseDate);
}
public static class Builder {
private RefsetDtoShort built;
Builder(XmlRefsetConcept concept, String publicId, String title, String description,
Date created, Date lastModified, boolean pendingChanges, int memberSize,
String snomedExtension, Date snomedReleaseDate){
built = new RefsetDtoShort();
built.concept = concept;
built.publicId = publicId;
built.title = title;
built.description = description;
built.created = created;
built.lastModified = lastModified;
built.pendingChanges = pendingChanges;
built.memberSize = memberSize;
built.setSnomedExtension(snomedExtension);
built.setSnomedReleaseDate(RefsetDto.dateFormat.format(snomedReleaseDate));
}
public RefsetDtoShort build() {
return built;
}
}
}
| IHTSDO/snomed-publish | model/src/main/java/com/ihtsdo/snomed/model/xml/RefsetDtoShort.java | Java | apache-2.0 | 6,129 |
package com.fuyoul.sanwenseller.bean.pickerview;
import java.util.List;
public class ProvinceModel implements IPickerViewData {
private String name;
private List<CityModel> cityList;
@Override
public String getPickerViewText() {
return name;
}
public ProvinceModel() {
super();
}
public ProvinceModel(String name, List<CityModel> cityList) {
super();
this.name = name;
this.cityList = cityList;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<CityModel> getCityList() {
return cityList;
}
public void setCityList(List<CityModel> cityList) {
this.cityList = cityList;
}
@Override
public String toString() {
return "ProvinceModel [name=" + name + ", cityList=" + cityList + "]";
}
}
| newbieandroid/AppBase | app/src/main/java/com/fuyoul/sanwenseller/bean/pickerview/ProvinceModel.java | Java | apache-2.0 | 915 |
/*
* Copyright 2014-2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.metrics.api.jaxrs.handler;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.APPLICATION_XHTML_XML;
import static javax.ws.rs.core.MediaType.TEXT_HTML;
import com.wordnik.swagger.annotations.ApiOperation;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
/**
* @author mwringe
*/
@Path("/")
public class BaseHandler {
public static final String PATH = "/";
@GET
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Returns some basic information about the Hawkular Metrics service.",
response = String.class, responseContainer = "Map")
public Response baseJSON(@Context ServletContext context) {
String version = context.getInitParameter("hawkular.metrics.version");
if (version == null) {
version = "undefined";
}
HawkularMetricsBase hawkularMetrics = new HawkularMetricsBase();
hawkularMetrics.version = version;
return Response.ok(hawkularMetrics).build();
}
@GET
@Produces({APPLICATION_XHTML_XML, TEXT_HTML})
public void baseHTML(@Context ServletContext context) throws Exception {
HttpServletRequest request = ResteasyProviderFactory.getContextData(HttpServletRequest.class);
HttpServletResponse response = ResteasyProviderFactory.getContextData(HttpServletResponse.class);
request.getRequestDispatcher("/static/index.html").forward(request,response);
}
private class HawkularMetricsBase {
String name = "Hawkular-Metrics";
String version;
public String getName() {
return name;
}
public void setVersion(String version) {
this.version = version;
}
public String getVersion() {
return version;
}
}
} | 140293816/Hawkular-fork | api/metrics-api-jaxrs/src/main/java/org/hawkular/metrics/api/jaxrs/handler/BaseHandler.java | Java | apache-2.0 | 2,802 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Dennis Ushakov
*/
package javax.accessibility;
import com.gaecompat.javax.swing.text.AttributeSet;
import com.google.code.appengine.awt.Point;
import com.google.code.appengine.awt.Rectangle;
public interface AccessibleText {
static final int CHARACTER = 1;
static final int WORD = 2;
static final int SENTENCE = 3;
int getIndexAtPoint(Point p);
Rectangle getCharacterBounds(int i);
int getCharCount();
int getCaretPosition();
String getAtIndex(int part, int index);
String getAfterIndex(int part, int index);
String getBeforeIndex(int part, int index);
AttributeSet getCharacterAttribute(int i);
int getSelectionStart();
int getSelectionEnd();
String getSelectedText();
}
| mike10004/appengine-imaging | gaecompat-awt-imaging/src/common/javax/accessibility/AccessibleText.java | Java | apache-2.0 | 1,610 |
/**
* Copyright 2015-2016 Maven Source Dependencies
* Plugin contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.l2x6.srcdeps.core.shell;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import org.l2x6.srcdeps.core.util.SrcdepsCoreUtils;
/**
* A definition of a shell command that can be executed by {@link Shell#execute(ShellCommand)}.
*
* @author <a href="https://github.com/ppalaga">Peter Palaga</a>
*/
public class ShellCommand {
private final List<String> arguments;
private final Map<String, String> environment;
private final String executable;
private final IoRedirects ioRedirects;
private final long timeoutMs;
private final Path workingDirectory;
public ShellCommand(String executable, List<String> arguments, Path workingDirectory,
Map<String, String> environment, IoRedirects ioRedirects, long timeoutMs) {
super();
SrcdepsCoreUtils.assertArgNotNull(executable, "executable");
SrcdepsCoreUtils.assertArgNotNull(arguments, "arguments");
SrcdepsCoreUtils.assertArgNotNull(workingDirectory, "workingDirectory");
SrcdepsCoreUtils.assertArgNotNull(environment, "environment");
SrcdepsCoreUtils.assertArgNotNull(ioRedirects, "ioRedirects");
this.executable = executable;
this.arguments = arguments;
this.workingDirectory = workingDirectory;
this.environment = environment;
this.ioRedirects = ioRedirects;
this.timeoutMs = timeoutMs;
}
/**
* @return an array containing the executable and its arguments that can be passed e.g. to
* {@link ProcessBuilder#command(String...)}
*/
public String[] asCmdArray() {
String[] result = new String[arguments.size() + 1];
int i = 0;
result[i++] = executable;
for (String arg : arguments) {
result[i++] = arg;
}
return result;
}
/**
* @return the {@link List} arguments for the executable. Cannot be {@code null}.
*/
public List<String> getArguments() {
return arguments;
}
/**
* @return a {@link Map} of environment variables that should be used when executing this {@link ShellCommand}.
* Cannot be {@code null}. Note that these are just overlay variables - when a new {@link Process} is
* spawned, the environment is copied from the present process and only the variables the provided by the
* present method are overwritten.
*/
public Map<String, String> getEnvironment() {
return environment;
}
/**
* @return the executable file that should be called
*/
public String getExecutable() {
return executable;
}
/**
* @return the {@link IoRedirects} to use when the {@link Shell} spawns a new {@link Process}
*/
public IoRedirects getIoRedirects() {
return ioRedirects;
}
/**
* @return timeout in milliseconds
*/
public long getTimeoutMs() {
return timeoutMs;
}
/**
* @return the directory in which this {@link ShellCommand} should be executed
*/
public Path getWorkingDirectory() {
return workingDirectory;
}
}
| jpkrohling/srcdeps-maven-plugin | srcdeps-core/src/main/java/org/l2x6/srcdeps/core/shell/ShellCommand.java | Java | apache-2.0 | 3,821 |
/*
* Powered By agile
* Web Site: http://www.agile.com
* Since 2008 - 2016
*/
package persistent.prestige.modules.edu.service;
import java.util.Map;
/**
* Organization service类
* @author 雅居乐 2016-9-10 22:28:24
* @version 1.0
*/
public interface OrganizationService{
/**
* 保存信息
* @param datas
* @return
*/
Integer saveOrganization(Map datas);
}
| dingwpmz/Mycat-Demo | src/main/java/persistent/prestige/modules/edu/service/OrganizationService.java | Java | apache-2.0 | 381 |
package buchungstool.model.importer;
import org.junit.Test;
import static java.time.LocalDateTime.now;
import static org.assertj.core.api.Assertions.assertThat;
public class KonfigurationEventTest {
@Test
public void test() {
KonfigurationEvent konfigurationEvent = new KonfigurationEvent(now(), now(), "@Konfiguration",
"Max:16\nMin: 4");
assertThat(konfigurationEvent.getMax()).isEqualTo(16);
assertThat(konfigurationEvent.getMin()).isEqualTo(4);
}
} | AlexBischof/buchungstool | src/test/java/buchungstool/model/importer/KonfigurationEventTest.java | Java | apache-2.0 | 562 |
/*
*
* * Copyright (c) 2011-2015 EPFL DATA Laboratory
* * Copyright (c) 2014-2015 The Squall Collaboration (see NOTICE)
* *
* * All rights reserved.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package ch.epfl.data.squall.components.dbtoaster;
import backtype.storm.Config;
import backtype.storm.topology.TopologyBuilder;
import ch.epfl.data.squall.components.Component;
import ch.epfl.data.squall.components.JoinerComponent;
import ch.epfl.data.squall.components.AbstractJoinerComponent;
import ch.epfl.data.squall.operators.AggregateStream;
import ch.epfl.data.squall.predicates.Predicate;
import ch.epfl.data.squall.storm_components.StormComponent;
import ch.epfl.data.squall.storm_components.dbtoaster.StormDBToasterJoin;
import ch.epfl.data.squall.storm_components.synchronization.TopologyKiller;
import ch.epfl.data.squall.types.Type;
import ch.epfl.data.squall.utilities.MyUtilities;
import org.apache.log4j.Logger;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class DBToasterJoinComponent extends AbstractJoinerComponent<DBToasterJoinComponent> {
protected DBToasterJoinComponent getThis() {
return this;
}
private static final long serialVersionUID = 1L;
private static Logger LOG = Logger.getLogger(DBToasterJoinComponent.class);
private Map<String, Type[]> _parentNameColTypes;
private Set<String> _parentsWithMultiplicity;
private Map<String, AggregateStream> _parentsWithAggregator;
private String _equivalentSQL;
protected DBToasterJoinComponent(List<Component> relations, Map<String, Type[]> relationTypes,
Set<String> relationsWithMultiplicity, Map<String, AggregateStream> relationsWithAggregator,
String sql, String name) {
super(relations, name);
_parentsWithMultiplicity = relationsWithMultiplicity;
_parentsWithAggregator = relationsWithAggregator;
_parentNameColTypes = relationTypes;
_equivalentSQL = sql;
}
@Override
public void makeBolts(TopologyBuilder builder, TopologyKiller killer,
List<String> allCompNames, Config conf, int hierarchyPosition) {
// by default print out for the last component
// for other conditions, can be set via setPrintOut
if (hierarchyPosition == StormComponent.FINAL_COMPONENT
&& !getPrintOutSet())
setPrintOut(true);
MyUtilities.checkBatchOutput(getBatchOutputMillis(),
getChainOperator().getAggregation(), conf);
setStormEmitter(new StormDBToasterJoin(getParents(), this,
allCompNames,
_parentNameColTypes,
_parentsWithMultiplicity,
_parentsWithAggregator,
hierarchyPosition,
builder, killer, conf));
}
@Override
public DBToasterJoinComponent setJoinPredicate(Predicate predicate) {
throw new UnsupportedOperationException();
}
public String getSQLQuery() {
return _equivalentSQL;
}
}
| akathorn/squall | squall-core/src/main/java/ch/epfl/data/squall/components/dbtoaster/DBToasterJoinComponent.java | Java | apache-2.0 | 3,862 |
/*
* Copyright 2009-2013 Aarhus University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dk.brics.tajs.analysis;
import dk.brics.tajs.flowgraph.BasicBlock;
import dk.brics.tajs.lattice.CallEdge;
import dk.brics.tajs.solver.CallGraph;
import dk.brics.tajs.solver.IWorkListStrategy;
/**
* Work list strategy.
*/
public class WorkListStrategy implements IWorkListStrategy<Context> {
private CallGraph<State,Context,CallEdge<State>> call_graph;
/**
* Constructs a new WorkListStrategy object.
*/
public WorkListStrategy() {}
/**
* Sets the call graph.
*/
public void setCallGraph(CallGraph<State,Context,CallEdge<State>> call_graph) {
this.call_graph = call_graph;
}
@Override
public int compare(IEntry<Context> e1, IEntry<Context> e2) {
BasicBlock n1 = e1.getBlock();
BasicBlock n2 = e2.getBlock();
int serial1 = e1.getSerial();
int serial2 = e2.getSerial();
if (serial1 == serial2)
return 0;
final int E1_FIRST = -1;
final int E2_FIRST = 1;
if (n1.getFunction().equals(n2.getFunction()) && e1.getContext().equals(e2.getContext())) {
// same function and same context: use block order
if (n1.getOrder() < n2.getOrder())
return E1_FIRST;
else if (n2.getOrder() < n1.getOrder())
return E2_FIRST;
}
int function_context_order1 = call_graph.getBlockContextOrder(e1.getContext().getEntryBlockAndContext());
int function_context_order2 = call_graph.getBlockContextOrder(e2.getContext().getEntryBlockAndContext());
// different function/context: order by occurrence number
if (function_context_order1 < function_context_order2)
return E2_FIRST;
else if (function_context_order2 < function_context_order1)
return E1_FIRST;
// strategy: breadth first
return serial1 - serial2;
}
}
| cursem/ScriptCompressor | ScriptCompressor1.0/src/dk/brics/tajs/analysis/WorkListStrategy.java | Java | apache-2.0 | 2,303 |
package at.ac.tuwien.dsg.pm.resources;
import at.ac.tuwien.dsg.pm.PeerManager;
import at.ac.tuwien.dsg.pm.model.Collective;
import at.ac.tuwien.dsg.smartcom.model.CollectiveInfo;
import at.ac.tuwien.dsg.smartcom.model.Identifier;
import javax.inject.Inject;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.List;
/**
* @author Philipp Zeppezauer (philipp.zeppezauer@gmail.com)
* @version 1.0
*/
@Path("collectiveInfo")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public class CollectiveInfoResource {
@Inject
private PeerManager manager;
@GET
@Path("/{id}")
public CollectiveInfo getCollectiveInfo(@PathParam("id") String id) {
Collective collective = manager.getCollective(id);
if (collective == null) {
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND).build());
}
CollectiveInfo info = new CollectiveInfo();
info.setId(Identifier.collective(id));
info.setDeliveryPolicy(collective.getDeliveryPolicy());
List<Identifier> peers = new ArrayList<>(collective.getPeers().size());
for (String s : collective.getPeers()) {
peers.add(Identifier.peer(s));
}
info.setPeers(peers);
return info;
}
}
| PhilZeppe/CaaS | pm/src/main/java/at/ac/tuwien/dsg/pm/resources/CollectiveInfoResource.java | Java | apache-2.0 | 1,385 |
package com.earlysleep.model;
import org.litepal.crud.DataSupport;
import java.util.ArrayList;
import java.util.List;
/**
* Created by zml on 2016/6/23.
* 介绍:
*/
public class AllData extends DataSupport {
private String music;
private int musictime;
private boolean musicchosse;
List<TimeSeting> list=new ArrayList<>();
}
| 642638112/-1.0 | EarlySleep/app/src/main/java/com/earlysleep/model/AllData.java | Java | apache-2.0 | 353 |
package org.spoofax.jsglr2.integrationtest.features;
import java.util.Arrays;
import java.util.stream.Stream;
import org.junit.jupiter.api.DynamicTest;
import org.junit.jupiter.api.TestFactory;
import org.spoofax.jsglr2.integrationtest.BaseTestWithSdf3ParseTables;
import org.spoofax.jsglr2.integrationtest.OriginDescriptor;
import org.spoofax.terms.ParseError;
public class OriginsTest extends BaseTestWithSdf3ParseTables {
public OriginsTest() {
super("tokenization.sdf3");
}
@TestFactory public Stream<DynamicTest> operator() throws ParseError {
return testOrigins("x+x", Arrays.asList(
//@formatter:off
new OriginDescriptor("AddOperator", 0, 2),
new OriginDescriptor("Id", 0, 0),
new OriginDescriptor("Id", 2, 2)
//@formatter:on
));
}
}
| metaborg/jsglr | org.spoofax.jsglr2.integrationtest/src/test/java/org/spoofax/jsglr2/integrationtest/features/OriginsTest.java | Java | apache-2.0 | 839 |
/*
* Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.andes.server.handler;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.andes.AMQException;
import org.wso2.andes.amqp.AMQPUtils;
import org.wso2.andes.exchange.ExchangeDefaults;
import org.wso2.andes.framing.AMQShortString;
import org.wso2.andes.framing.BasicPublishBody;
import org.wso2.andes.framing.abstraction.MessagePublishInfo;
import org.wso2.andes.protocol.AMQConstant;
import org.wso2.andes.server.AMQChannel;
import org.wso2.andes.server.exchange.Exchange;
import org.wso2.andes.server.protocol.AMQProtocolSession;
import org.wso2.andes.server.state.AMQStateManager;
import org.wso2.andes.server.state.StateAwareMethodListener;
import org.wso2.andes.server.virtualhost.VirtualHost;
public class BasicPublishMethodHandler implements StateAwareMethodListener<BasicPublishBody>
{
private static final Log _logger = LogFactory.getLog(BasicPublishMethodHandler.class);
private static final BasicPublishMethodHandler _instance = new BasicPublishMethodHandler();
public static BasicPublishMethodHandler getInstance()
{
return _instance;
}
private BasicPublishMethodHandler()
{
}
public void methodReceived(AMQStateManager stateManager, BasicPublishBody body, int channelId) throws AMQException
{
AMQProtocolSession session = stateManager.getProtocolSession();
if (_logger.isDebugEnabled())
{
_logger.debug("Publish received on channel " + channelId);
}
AMQShortString exchangeName = body.getExchange();
// TODO: check the delivery tag field details - is it unique across the broker or per subscriber?
if (exchangeName == null)
{
exchangeName = ExchangeDefaults.DEFAULT_EXCHANGE_NAME;
}
VirtualHost vHost = session.getVirtualHost();
Exchange exch = vHost.getExchangeRegistry().getExchange(exchangeName);
// if the exchange does not exist we raise a channel exception
if (exch == null)
{
throw body.getChannelException(AMQConstant.NOT_FOUND, "Unknown exchange name");
}
else
{
// The partially populated BasicDeliver frame plus the received route body
// is stored in the channel. Once the final body frame has been received
// it is routed to the exchange.
AMQChannel channel = session.getChannel(channelId);
if (channel == null)
{
throw body.getChannelNotFoundException(channelId);
}
MessagePublishInfo info = session.getMethodRegistry().getProtocolVersionMethodConverter().convertToInfo(body);
if (ExchangeDefaults.TOPIC_EXCHANGE_NAME.equals(exchangeName)
&& AMQPUtils.isWildCardDestination(info.getRoutingKey().toString())) {
throw body.getChannelException(AMQConstant.INVALID_ROUTING_KEY, "Publishing messages to a wildcard "
+ "destination is not allowed");
}
info.setExchange(exchangeName);
channel.setPublishFrame(info, exch);
}
}
}
| wso2/andes | modules/andes-core/broker/src/main/java/org/wso2/andes/server/handler/BasicPublishMethodHandler.java | Java | apache-2.0 | 3,860 |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.policy.mgt.core.task;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.ntask.common.TaskException;
import org.wso2.carbon.ntask.core.TaskInfo;
import org.wso2.carbon.ntask.core.TaskManager;
import org.wso2.carbon.ntask.core.service.TaskService;
import org.wso2.carbon.policy.mgt.common.PolicyMonitoringTaskException;
import org.wso2.carbon.policy.mgt.core.internal.PolicyManagementDataHolder;
import org.wso2.carbon.policy.mgt.core.util.PolicyManagementConstants;
import org.wso2.carbon.policy.mgt.core.util.PolicyManagerUtil;
import org.wso2.carbon.ntask.core.TaskInfo.TriggerInfo;
import java.util.HashMap;
import java.util.Map;
public class TaskScheduleServiceImpl implements TaskScheduleService {
private static Log log = LogFactory.getLog(TaskScheduleServiceImpl.class);
@Override
public void startTask(int monitoringFrequency) throws PolicyMonitoringTaskException {
if (monitoringFrequency <= 0) {
throw new PolicyMonitoringTaskException("Time interval cannot be 0 or less than 0.");
}
try {
int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
TaskService taskService = PolicyManagementDataHolder.getInstance().getTaskService();
taskService.registerTaskType(PolicyManagementConstants.TASK_TYPE);
if (log.isDebugEnabled()) {
log.debug("Monitoring task is started for the tenant id " + tenantId);
}
TaskManager taskManager = taskService.getTaskManager(PolicyManagementConstants.TASK_TYPE);
TriggerInfo triggerInfo = new TriggerInfo();
triggerInfo.setIntervalMillis(monitoringFrequency);
triggerInfo.setRepeatCount(-1);
Map<String, String> properties = new HashMap<>();
properties.put(PolicyManagementConstants.TENANT_ID, String.valueOf(tenantId));
String taskName = PolicyManagementConstants.TASK_NAME + "_" + String.valueOf(tenantId);
TaskInfo taskInfo = new TaskInfo(taskName, PolicyManagementConstants.TASK_CLAZZ, properties, triggerInfo);
taskManager.registerTask(taskInfo);
taskManager.rescheduleTask(taskInfo.getName());
} catch (TaskException e) {
String msg = "Error occurred while creating the task for tenant " + PrivilegedCarbonContext.
getThreadLocalCarbonContext().getTenantId();
log.error(msg, e);
throw new PolicyMonitoringTaskException(msg, e);
}
}
@Override
public void stopTask() throws PolicyMonitoringTaskException {
try {
int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
String taskName = PolicyManagementConstants.TASK_NAME + "_" + String.valueOf(tenantId);
TaskService taskService = PolicyManagementDataHolder.getInstance().getTaskService();
TaskManager taskManager = taskService.getTaskManager(PolicyManagementConstants.TASK_TYPE);
taskManager.deleteTask(taskName);
} catch (TaskException e) {
String msg = "Error occurred while deleting the task for tenant " + PrivilegedCarbonContext.
getThreadLocalCarbonContext().getTenantId();
log.error(msg, e);
throw new PolicyMonitoringTaskException(msg, e);
}
}
@Override
public void updateTask(int monitoringFrequency) throws PolicyMonitoringTaskException {
try {
int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
String taskName = PolicyManagementConstants.TASK_NAME + "_" + String.valueOf(tenantId);
TaskService taskService = PolicyManagementDataHolder.getInstance().getTaskService();
TaskManager taskManager = taskService.getTaskManager(PolicyManagementConstants.TASK_TYPE);
taskManager.deleteTask(taskName);
TriggerInfo triggerInfo = new TriggerInfo();
triggerInfo.setIntervalMillis(monitoringFrequency);
triggerInfo.setRepeatCount(-1);
Map<String, String> properties = new HashMap<>();
properties.put("tenantId", String.valueOf(tenantId));
TaskInfo taskInfo = new TaskInfo(taskName, PolicyManagementConstants.TASK_CLAZZ, properties, triggerInfo);
taskManager.registerTask(taskInfo);
taskManager.rescheduleTask(taskInfo.getName());
} catch (TaskException e) {
String msg = "Error occurred while updating the task for tenant " + PrivilegedCarbonContext.
getThreadLocalCarbonContext().getTenantId();
log.error(msg, e);
throw new PolicyMonitoringTaskException(msg, e);
}
}
}
| charithag/carbon-device-mgt-framework | components/policy-mgt/org.wso2.carbon.policy.mgt.core/src/main/java/org/wso2/carbon/policy/mgt/core/task/TaskScheduleServiceImpl.java | Java | apache-2.0 | 5,611 |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.api.client.googleapis.testing.json;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpResponse;
import com.google.api.client.json.Json;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.testing.http.HttpTesting;
import com.google.api.client.testing.http.MockHttpTransport;
import com.google.api.client.testing.http.MockLowLevelHttpResponse;
import com.google.api.client.util.Beta;
import java.io.IOException;
/**
* {@link Beta} <br>
* Factory class that builds {@link GoogleJsonResponseException} instances for testing.
*
* @since 1.18
*/
@Beta
public final class GoogleJsonResponseExceptionFactoryTesting {
/**
* Convenience factory method that builds a {@link GoogleJsonResponseException} from its
* arguments. The method builds a dummy {@link HttpRequest} and {@link HttpResponse}, sets the
* response's status to a user-specified HTTP error code, suppresses exceptions, and executes the
* request. This forces the underlying framework to create, but not throw, a {@link
* GoogleJsonResponseException}, which the method retrieves and returns to the invoker.
*
* @param jsonFactory the JSON factory that will create all JSON required by the underlying
* framework
* @param httpCode the desired HTTP error code. Note: do nut specify any codes that indicate
* successful completion, e.g. 2XX.
* @param reasonPhrase the HTTP reason code that explains the error. For example, if {@code
* httpCode} is {@code 404}, the reason phrase should be {@code NOT FOUND}.
* @return the generated {@link GoogleJsonResponseException}, as specified.
* @throws IOException if request transport fails.
*/
public static GoogleJsonResponseException newMock(
JsonFactory jsonFactory, int httpCode, String reasonPhrase) throws IOException {
MockLowLevelHttpResponse otherServiceUnavaiableLowLevelResponse =
new MockLowLevelHttpResponse()
.setStatusCode(httpCode)
.setReasonPhrase(reasonPhrase)
.setContentType(Json.MEDIA_TYPE)
.setContent(
"{ \"error\": { \"errors\": [ { \"reason\": \""
+ reasonPhrase
+ "\" } ], "
+ "\"code\": "
+ httpCode
+ " } }");
MockHttpTransport otherTransport =
new MockHttpTransport.Builder()
.setLowLevelHttpResponse(otherServiceUnavaiableLowLevelResponse)
.build();
HttpRequest otherRequest =
otherTransport.createRequestFactory().buildGetRequest(HttpTesting.SIMPLE_GENERIC_URL);
otherRequest.setThrowExceptionOnExecuteError(false);
HttpResponse otherServiceUnavailableResponse = otherRequest.execute();
return GoogleJsonResponseException.from(jsonFactory, otherServiceUnavailableResponse);
}
}
| googleapis/google-api-java-client | google-api-client/src/main/java/com/google/api/client/googleapis/testing/json/GoogleJsonResponseExceptionFactoryTesting.java | Java | apache-2.0 | 3,555 |
package pl.mobilization.conference2015.sponsor;
import android.content.Context;
import android.content.Intent;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.ArrayList;
import java.util.List;
import de.greenrobot.event.EventBus;
import lombok.extern.slf4j.Slf4j;
import pl.mobilization.conference2015.sponsor.events.OnSponsorClickEvent;
import pl.mobilization.conference2015.sponsor.events.SponsorUpdatedEvent;
import pl.mobilization.conference2015.sponsor.repository.SponsorRepoModel;
import pl.mobilization.conference2015.sponsor.repository.SponsorRepository;
import pl.mobilization.conference2015.sponsor.rest.SponsorRestService;
import pl.mobilization.conference2015.sponsor.rest.SponsorListRestModel;
import pl.mobilization.conference2015.sponsor.view.SponsorsView;
import pl.mobilization.conference2015.sponsor.view.SponsorsListViewModel;
import rx.Observable;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
/**
* Created by msaramak on 19.08.15.
*/
@Slf4j
public class SponsorRestModelPresenterTest {
@Mock
SponsorRestService sponsorRestService;
@Mock
EventBus eventBus;
@Mock
SponsorsView view;
@Mock
SponsorRepository sponsorRepository;
@Mock
Context context;
private SponsorPresenter testedSp;
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
//GIVEN a sponsor presenter..
testedSp = new SponsorPresenter(sponsorRepository, eventBus);
List<SponsorRepoModel> l = new ArrayList<>();
when(sponsorRepository.getSponsors()).thenReturn(Observable.<List<SponsorRepoModel>>just(l));
}
@After
public void tearDown() throws Exception {
}
@SuppressWarnings("ResourceType")
@Test
public void testOnBindView() throws Exception {
//GIVEN a sponsor presenter
verify(eventBus).register(testedSp);
//WHEN bind view
testedSp.onBindView(context, view);
//THEN check if background service is setup
verify(context).bindService(any(Intent.class), any(), eq(Context.BIND_AUTO_CREATE));
}
@Test
public void shouldDisplayDialogWhenOnSponsorClickEventCalled() throws Exception {
//GIVEN a tested sponsor presenter with binded view
testedSp.onBindView(context, view);
//WHEN event come
OnSponsorClickEvent event = new OnSponsorClickEvent(null);
testedSp.onEvent(event);
//THEN
verify(view).showSponsorDialog(event);
}
@Test
public void testOnUpdateSponsorList() throws Exception {
//GIVEN a tested sponsor presenter with binded view
testedSp.onBindView(context, view);
//WHEN sponsors list is updated
SponsorUpdatedEvent event = new SponsorUpdatedEvent();
testedSp.onEvent(event);
//THEN
verify(view).updateSponsors(any(SponsorsListViewModel.class));
}
} | Mobilization/mobandroid5 | app/src/test/java/pl/mobilization/conference2015/sponsor/SponsorRestModelPresenterTest.java | Java | apache-2.0 | 3,058 |
package org.liveontologies.protege.justification.proof.preferences;
/*-
* #%L
* Protege Proof Justification
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2016 - 2017 Live Ontologies Project
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.eclipse.core.runtime.IExtension;
import org.protege.editor.core.editorkit.EditorKit;
import org.protege.editor.core.plugin.AbstractPluginLoader;
public class ProofPreferencesPanelPluginLoader extends AbstractPluginLoader<ProofPreferencesPanelPlugin> {
private final EditorKit kit;
private static final String ID = "JustificationProofPreferences";
private static final String KEY = "org.liveontologies.protege.justification.proof";
public ProofPreferencesPanelPluginLoader(EditorKit kit) {
super(KEY, ID);
this.kit = kit;
}
@Override
protected ProofPreferencesPanelPlugin createInstance(IExtension extension) {
return new ProofPreferencesPanelPlugin(kit, extension);
}
}
| liveontologies/protege-proof-justification | src/main/java/org/liveontologies/protege/justification/proof/preferences/ProofPreferencesPanelPluginLoader.java | Java | apache-2.0 | 1,491 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.workdocs.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.workdocs.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* DeleteFolderRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class DeleteFolderRequestProtocolMarshaller implements Marshaller<Request<DeleteFolderRequest>, DeleteFolderRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON).requestUri("/api/v1/folders/{FolderId}")
.httpMethodName(HttpMethodName.DELETE).hasExplicitPayloadMember(false).hasPayloadMembers(false).serviceName("AmazonWorkDocs").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public DeleteFolderRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<DeleteFolderRequest> marshall(DeleteFolderRequest deleteFolderRequest) {
if (deleteFolderRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<DeleteFolderRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING,
deleteFolderRequest);
protocolMarshaller.startMarshalling();
DeleteFolderRequestMarshaller.getInstance().marshall(deleteFolderRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-workdocs/src/main/java/com/amazonaws/services/workdocs/model/transform/DeleteFolderRequestProtocolMarshaller.java | Java | apache-2.0 | 2,620 |
/*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.materials.git;
import com.googlecode.junit.ext.JunitExtRunner;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.domain.materials.RevisionContext;
import com.thoughtworks.go.domain.materials.TestSubprocessExecutionContext;
import com.thoughtworks.go.domain.materials.git.GitCommand;
import com.thoughtworks.go.domain.materials.git.GitTestRepo;
import com.thoughtworks.go.domain.materials.mercurial.StringRevision;
import com.thoughtworks.go.helper.TestRepo;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.TestFileUtil;
import org.hamcrest.Matchers;
import org.hamcrest.core.Is;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.thoughtworks.go.domain.materials.git.GitTestRepo.*;
import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(JunitExtRunner.class)
public class GitMaterialShallowCloneTest {
private GitTestRepo repo;
private File workingDir;
@Before
public void setup() throws Exception {
repo = new GitTestRepo();
workingDir = TestFileUtil.createUniqueTempFolder("working");
}
@After
public void teardown() throws Exception {
TestRepo.internalTearDown();
}
@Test
public void defaultShallowFlagIsOff() throws Exception {
assertThat(new GitMaterial(repo.projectRepositoryUrl()).isShallowClone(), is(false));
assertThat(new GitMaterial(repo.projectRepositoryUrl(), null).isShallowClone(), is(false));
assertThat(new GitMaterial(repo.projectRepositoryUrl(), true).isShallowClone(), is(true));
assertThat(new GitMaterial(new GitMaterialConfig(repo.projectRepositoryUrl())).isShallowClone(), is(false));
assertThat(new GitMaterial(new GitMaterialConfig(repo.projectRepositoryUrl(), GitMaterialConfig.DEFAULT_BRANCH, true)).isShallowClone(), is(true));
assertThat(new GitMaterial(new GitMaterialConfig(repo.projectRepositoryUrl(), GitMaterialConfig.DEFAULT_BRANCH, false)).isShallowClone(), is(false));
TestRepo.internalTearDown();
}
@Test
public void shouldGetLatestModificationWithShallowClone() throws IOException {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
List<Modification> mods = material.latestModification(workingDir, context());
assertThat(mods.size(), is(1));
assertThat(mods.get(0).getComment(), Matchers.is("Added 'run-till-file-exists' ant target"));
assertThat(localRepoFor(material).isShallow(), is(true));
assertThat(localRepoFor(material).containsRevisionInBranch(REVISION_0), is(false));
assertThat(localRepoFor(material).currentRevision(), is(REVISION_4.getRevision()));
}
@Test
public void shouldGetModificationSinceANotInitiallyClonedRevision() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
List<Modification> modifications = material.modificationsSince(workingDir, REVISION_0, context());
assertThat(modifications.size(), is(4));
assertThat(modifications.get(0).getRevision(), is(REVISION_4.getRevision()));
assertThat(modifications.get(0).getComment(), is("Added 'run-till-file-exists' ant target"));
assertThat(modifications.get(1).getRevision(), is(REVISION_3.getRevision()));
assertThat(modifications.get(1).getComment(), is("adding build.xml"));
assertThat(modifications.get(2).getRevision(), is(REVISION_2.getRevision()));
assertThat(modifications.get(2).getComment(), is("Created second.txt from first.txt"));
assertThat(modifications.get(3).getRevision(), is(REVISION_1.getRevision()));
assertThat(modifications.get(3).getComment(), is("Added second line"));
}
@Test
public void shouldBeAbleToUpdateToRevisionNotFetched() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
material.updateTo(inMemoryConsumer(), workingDir, new RevisionContext(REVISION_3, REVISION_2, 2), context());
assertThat(localRepoFor(material).currentRevision(), is(REVISION_3.getRevision()));
assertThat(localRepoFor(material).containsRevisionInBranch(REVISION_2), is(true));
assertThat(localRepoFor(material).containsRevisionInBranch(REVISION_3), is(true));
}
@Test
public void configShouldIncludesShallowFlag() {
GitMaterialConfig shallowConfig = (GitMaterialConfig) new GitMaterial(repo.projectRepositoryUrl(), true).config();
assertThat(shallowConfig.isShallowClone(), is(true));
GitMaterialConfig normalConfig = (GitMaterialConfig) new GitMaterial(repo.projectRepositoryUrl(), null).config();
assertThat(normalConfig.isShallowClone(), is(false));
}
@Test
public void xmlAttributesShouldIncludesShallowFlag() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
assertThat(material.getAttributesForXml().get("shallowClone"), Is.<Object>is(true));
}
@Test
public void attributesShouldIncludeShallowFlag() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
Map gitConfig = (Map) (material.getAttributes(false).get("git-configuration"));
assertThat(gitConfig.get("shallow-clone"), Is.<Object>is(true));
}
@Test
public void shouldConvertExistingRepoToFullRepoWhenShallowCloneIsOff() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
material.latestModification(workingDir, context());
assertThat(localRepoFor(material).isShallow(), is(true));
material = new GitMaterial(repo.projectRepositoryUrl(), false);
material.latestModification(workingDir, context());
assertThat(localRepoFor(material).isShallow(), is(false));
}
@Test
public void withShallowCloneShouldGenerateANewMaterialWithOverriddenShallowConfig() {
GitMaterial original = new GitMaterial(repo.projectRepositoryUrl(), false);
assertThat(original.withShallowClone(true).isShallowClone(), is(true));
assertThat(original.withShallowClone(false).isShallowClone(), is(false));
assertThat(original.isShallowClone(), is(false));
}
@Test
public void updateToANewRevisionShouldNotResultInUnshallowing() throws IOException {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
material.updateTo(inMemoryConsumer(), workingDir, new RevisionContext(REVISION_4, REVISION_4, 1), context());
assertThat(localRepoFor(material).isShallow(), is(true));
List<Modification> modifications = repo.addFileAndPush("newfile", "add new file");
StringRevision newRevision = new StringRevision(modifications.get(0).getRevision());
material.updateTo(inMemoryConsumer(), workingDir, new RevisionContext(newRevision, newRevision, 1), context());
assertThat(new File(workingDir, "newfile").exists(), is(true));
assertThat(localRepoFor(material).isShallow(), is(true));
}
@Test
public void shouldUnshallowServerSideRepoCompletelyOnRetrievingModificationsSincePreviousRevision() {
SystemEnvironment mockSystemEnvironment = mock(SystemEnvironment.class);
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
when(mockSystemEnvironment.get(SystemEnvironment.GO_SERVER_SHALLOW_CLONE)).thenReturn(false);
material.modificationsSince(workingDir, REVISION_4, new TestSubprocessExecutionContext(mockSystemEnvironment, true));
assertThat(localRepoFor(material).isShallow(), is(false));
}
@Test
public void shouldNotUnshallowOnServerSideIfShallowClonePropertyIsOnAndRepoIsAlreadyShallow() {
SystemEnvironment mockSystemEnvironment = mock(SystemEnvironment.class);
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
when(mockSystemEnvironment.get(SystemEnvironment.GO_SERVER_SHALLOW_CLONE)).thenReturn(true);
material.modificationsSince(workingDir, REVISION_4, new TestSubprocessExecutionContext(mockSystemEnvironment, false));
assertThat(localRepoFor(material).isShallow(), is(true));
}
private TestSubprocessExecutionContext context() {
return new TestSubprocessExecutionContext();
}
private GitCommand localRepoFor(GitMaterial material) {
return new GitCommand(material.getFingerprint(), workingDir, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>());
}
}
| soundcloud/gocd | domain/test/com/thoughtworks/go/config/materials/git/GitMaterialShallowCloneTest.java | Java | apache-2.0 | 9,537 |
package wei_chih.service.handler.wei_chih;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.net.Socket;
import java.security.KeyPair;
import java.security.PublicKey;
import java.security.SignatureException;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import message.Operation;
import message.OperationType;
import service.Key;
import service.KeyManager;
import service.handler.ConnectionHandler;
import wei_chih.service.Config;
import wei_chih.service.SocketServer;
import wei_chih.utility.MerkleTree;
import wei_chih.utility.Utils;
import wei_chih.message.wei_chih.Request;
import wei_chih.message.wei_chih.Acknowledgement;
/**
*
* @author Chienweichih
*/
public class WeiChihHandler extends ConnectionHandler {
private static final ReentrantLock LOCK;
private static final MerkleTree[] merkleTree;
private static final String[] digestBeforeUpdate;
private static final Operation[] lastOP;
private static final Integer[] sequenceNumbers;
static {
merkleTree = new MerkleTree[Config.SERVICE_NUM];
digestBeforeUpdate = new String[Config.SERVICE_NUM];
lastOP = new Operation[Config.SERVICE_NUM];
sequenceNumbers = new Integer[Config.SERVICE_NUM];
for (int i = 0; i < Config.SERVICE_NUM; ++i) {
merkleTree[i] = new MerkleTree(new File(SocketServer.dataDirPath));
digestBeforeUpdate[i] = "";
lastOP[i] = new Operation(OperationType.DOWNLOAD, "", merkleTree[i].getRootHash());
sequenceNumbers[i] = 0;
}
LOCK = new ReentrantLock();
}
public WeiChihHandler(Socket socket, KeyPair keyPair) {
super(socket, keyPair);
}
@Override
protected void handle(DataOutputStream out, DataInputStream in) {
PublicKey clientPubKey = KeyManager.getInstance().getPublicKey(Key.CLIENT);
int portIndex = 0;
if (Math.abs(socket.getPort() - Config.SERVICE_PORT[0]) < 10) {
portIndex = socket.getPort() - Config.SERVICE_PORT[0];
} else if (Math.abs(socket.getLocalPort() - Config.SERVICE_PORT[0]) < 10) {
portIndex = socket.getLocalPort() - Config.SERVICE_PORT[0];
}
try {
Request req = Request.parse(Utils.receive(in));
LOCK.lock();
if (!req.validate(clientPubKey)) {
throw new SignatureException("REQ validation failure");
}
Operation op = req.getOperation();
switch (op.getType()) {
case UPLOAD:
digestBeforeUpdate[portIndex] = merkleTree[portIndex].getDigest(op.getPath());
merkleTree[portIndex].update(op.getPath(), op.getMessage());
case DOWNLOAD:
// both upload and download, so no break
if (0 != op.getClientID().compareTo(String.valueOf(sequenceNumbers[portIndex]))) {
throw new java.security.InvalidParameterException();
}
sequenceNumbers[portIndex]++;
default:
}
File file = new File(SocketServer.dataDirPath + op.getPath());
String rootHash = merkleTree[portIndex].getRootHash();
String fileHash = null;
if (file.exists()) {
fileHash = Utils.digest(file, Config.DIGEST_ALGORITHM);
}
Acknowledgement ack = new Acknowledgement(rootHash, fileHash, req);
ack.sign(keyPair);
Utils.send(out, ack.toString());
switch (op.getType()) {
case DOWNLOAD:
lastOP[portIndex] = op;
if (portIndex + Config.SERVICE_PORT[0] == Config.SERVICE_PORT[0]) {
Utils.send(out, file);
}
break;
case UPLOAD:
lastOP[portIndex] = op;
if (portIndex + Config.SERVICE_PORT[0] == Config.SERVICE_PORT[0]) {
file = new File(Config.DOWNLOADS_DIR_PATH + op.getPath());
Utils.receive(in, file);
String digest = Utils.digest(file, Config.DIGEST_ALGORITHM);
if (0 != op.getMessage().compareTo(digest)) {
throw new java.io.IOException();
}
}
break;
case AUDIT:
file = new File(Config.ATTESTATION_DIR_PATH + "/service-provider/voting");
switch (lastOP[portIndex].getType()) {
case DOWNLOAD:
Utils.write(file, rootHash);
break;
case UPLOAD:
MerkleTree prevMerkleTree = new MerkleTree(merkleTree[portIndex]);
prevMerkleTree.update(lastOP[portIndex].getPath(), digestBeforeUpdate[portIndex]);
Utils.Serialize(file, prevMerkleTree);
break;
default:
throw new java.lang.Error();
}
Utils.send(out, file);
break;
default:
}
socket.close();
} catch (IOException | SignatureException ex) {
Logger.getLogger(WeiChihHandler.class.getName()).log(Level.SEVERE, null, ex);
} finally {
if (LOCK != null) {
LOCK.unlock();
}
}
}
}
| CloudComLab/Voting-CAP | src/wei_chih/service/handler/wei_chih/WeiChihHandler.java | Java | apache-2.0 | 5,740 |
package ru.stqa.pft.addressbook.tests;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.ContactData;
import ru.stqa.pft.addressbook.tests.TestBase;
import java.util.Arrays;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Created by mocius on 2017-04-16.
*/
public class ContactPhone extends TestBase {
@Test
public void testContactPhones(){
app.goTo().homePage();
ContactData contact = app.contactHelper().all().iterator().next();
ContactData contactInfoFromEditForm = app.contactHelper().infoFromEditForm(contact);
assertThat(contact.getAllPhones(), equalTo(mergePhones(contactInfoFromEditForm)));
}
private <T> String mergePhones(ContactData contact) {
return Arrays.asList(contact.getHomePhone(), contact.getMobilePhone(),contact.getWorkPhone()).stream().
filter((s) -> ! s.equals("")).map(ContactPhone::cleaned)
.collect(Collectors.joining("\n"));
}
public static String cleaned(String phone){
return phone.replaceAll("\\s", "").replaceAll("[-()]", "");
}
}
| mociek124/java_pft | addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/tests/ContactPhone.java | Java | apache-2.0 | 1,225 |
package com.oauth.services.security;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.keygen.BytesKeyGenerator;
import org.springframework.security.crypto.keygen.KeyGenerators;
import org.springframework.security.crypto.password.StandardPasswordEncoder;
/**
* Created by yichen.wei on 6/24/17.
*/
public class Test {
public static void main(String args[]) {
BytesKeyGenerator saltGenerator = KeyGenerators.secureRandom();
// StandardPasswordEncoder encode = new StandardPasswordEncoder("SHA-256", "");
// StandardPasswordEncoder encode = new StandardPasswordEncoder("");
StandardPasswordEncoder encode = new StandardPasswordEncoder();
System.out.println("abcfwef...");
//a8ba715d5a076c99b95995d357651df5c296bf308abaa154a54d2418885ec622e9fe8624f2e06524
//be1e54adbd1c5c5d58a714fad7d529c73198c8c51e1f9d43edc79dac4784b5e93460605fe7082b0d
//910a6df88a99d5d81f3376628f3fd6a91a2152a366f2d450ef9220ff32f0c74952f754da62cd5a13
System.out.println(encode.encode("abcdef"));
// System.out.println(encode.encode("mypass"));
String salt = saltGenerator.generateKey().toString();
System.out.println(salt);
System.out.println(saltGenerator.getKeyLength());
BCryptPasswordEncoder bc = new BCryptPasswordEncoder();
System.out.println(bc.encode("admin"));
}
}
| kinddevil/course-service | oauth/src/main/java/com/oauth/services/security/Test.java | Java | apache-2.0 | 1,438 |
package net.distilledcode.httpclient.impl.metatype.reflection;
import org.apache.http.client.config.RequestConfig;
import org.junit.Test;
import java.util.Map;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertThat;
public class InvokersTest {
private static class TestBean {
private boolean featureEnabled = true;
// getters
public String getFooBar() { return null; }
public void getFooBarVoid() {}
// setters
public void setBarFoo(String fooBar) {}
public void setBarFooNoArgs() {}
// boolean switch (only called for enabling, disabled by default
public void enableFeature() {
featureEnabled = true;
}
// boolean switch (only called for disabling, enabled by default
void disableFeature() {
featureEnabled = false;
}
}
@Test
public void invokeMethods() throws Exception {
// builder.setMaxRedirects(5)
Invokers.Invoker<Void> setMaxRedirects = new Invokers.Invoker<>(RequestConfig.Builder.class.getDeclaredMethod("setMaxRedirects", int.class));
RequestConfig.Builder builder = RequestConfig.custom();
setMaxRedirects.invoke(builder, 17);
// requestConfig.getMaxRedirects()
Invokers.Invoker<Integer> getMaxRedirects = new Invokers.Invoker<>(RequestConfig.class.getDeclaredMethod("getMaxRedirects"));
RequestConfig requestConfig = builder.build();
assertThat(getMaxRedirects.invoke(requestConfig), is(17));
}
@Test
public void beanGetters() throws Exception {
Map<String, Invokers.Invoker<?>> testBeanGetters = Invokers.beanGetters(TestBean.class);
assertThat(testBeanGetters.keySet(), allOf(
hasItem("foo.bar"),
not(hasItem("foo.bar.void"))
));
}
@Test
public void beanSetters() throws Exception {
Map<String, Invokers.Invoker<?>> testBeanGetters = Invokers.beanSetters(TestBean.class);
assertThat(testBeanGetters.keySet(), allOf(
hasItem("bar.foo"),
not(hasItem("bar.foo.no.args"))
));
}
@Test
public void conditionalSetter() throws Exception {
Invokers.Invoker<?> featureDisabler = Invokers.conditionalNoArgsSetter(TestBean.class.getDeclaredMethod("disableFeature"), false);
TestBean testBean = new TestBean();
assertThat(testBean.featureEnabled, is(true));
featureDisabler.invoke(testBean, false);
assertThat(testBean.featureEnabled, is(false));
}
@Test
public void conditionalSetterIgnored() throws Exception {
Invokers.Invoker<?> featureDisabler = Invokers.conditionalNoArgsSetter(TestBean.class.getDeclaredMethod("disableFeature"), true);
TestBean testBean = new TestBean();
assertThat(testBean.featureEnabled, is(true));
featureDisabler.invoke(testBean, false);
assertThat(testBean.featureEnabled, is(true));
}
}
| code-distillery/httpclient-configuration-support | src/test/java/net/distilledcode/httpclient/impl/metatype/reflection/InvokersTest.java | Java | apache-2.0 | 3,178 |
/*
* Copyright 2013 Square Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package flowless;
import android.os.Parcelable;
import android.support.annotation.NonNull;
/**
* Used by History to convert your key objects to and from instances of
* {@link android.os.Parcelable}.
*/
public interface KeyParceler {
@NonNull
Parcelable toParcelable(@NonNull Object key);
@NonNull
Object toKey(@NonNull Parcelable parcelable);
}
| Zhuinden/flowless | flowless-library/src/main/java/flowless/KeyParceler.java | Java | apache-2.0 | 964 |
/*
* Copyright 2012 Michael Bischoff
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.jpaw.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.Externalizable;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.OutputStream;
import java.nio.charset.Charset;
/**
* Functionality which corresponds to String, but for byte arrays.
* Essential feature is that the class is immutable, so you can use it in messaging without making deep copies.
* Mimicking {@link java.lang.String}, the class contains offset and length fields to allow sharing of the buffer.
* <p>
* This should really exist in Java SE already.
*
* @author Michael Bischoff
*
*/
public final class ByteArray implements Externalizable, Cloneable {
private static final long serialVersionUID = 2782729564297256974L;
public static final Charset CHARSET_UTF8 = Charset.forName("UTF-8"); // default character set is available on all platforms
private static final int MAGIC_LENGTH_INDICATING_32_BIT_SIZE = 247; // if a single byte length of this value is written in the
// serialized form, it indicates a full four byte length must be read instead. Not used 0 or 255 due to their frequent use.
private final byte[] buffer;
private final int offset;
private final int length;
private ByteArray extraFieldJustRequiredForDeserialization = null; // transient temporary field
private static final byte[] ZERO_JAVA_BYTE_ARRAY = new byte[0];
public static final ByteArray ZERO_BYTE_ARRAY = new ByteArray(ZERO_JAVA_BYTE_ARRAY);
/** No-arg constructor required for Serializable interface. */
@Deprecated
public ByteArray() {
this(ZERO_JAVA_BYTE_ARRAY);
}
/** Constructs a ByteArray from a source byte[], which is defensively copied. */
public ByteArray(final byte[] source) {
if (source == null || source.length == 0) {
buffer = ZERO_JAVA_BYTE_ARRAY;
offset = 0;
length = 0;
} else {
buffer = source.clone(); // benchmarks have shown that clone() is equally fast as System.arraycopy for all lengths > 0
offset = 0;
length = buffer.length;
}
}
// construct a ByteArray from a trusted source byte[]
// this method is always called with unsafeTrustedReuseOfJavaByteArray = true, the parameter is only required in order to distinguish the constructor
// from the copying one
private ByteArray(final byte[] source, final boolean unsafeTrustedReuseOfJavaByteArray) {
if (source == null || source.length == 0) {
buffer = ZERO_JAVA_BYTE_ARRAY;
offset = 0;
length = 0;
} else {
buffer = unsafeTrustedReuseOfJavaByteArray ? source : source.clone();
offset = 0;
length = buffer.length;
}
}
/** Constructs a ByteArray from a ByteArrayOutputStream, which has just been contructed by some previous process.
* @throws IOException */
public static ByteArray fromByteArrayOutputStream(final ByteArrayOutputStream baos) throws IOException {
baos.flush();
return new ByteArray(baos.toByteArray(), true);
}
/** Writes the contents of this ByteArray to an OutputStream. */
public void toOutputStream(final OutputStream os) throws IOException {
os.write(buffer, offset, length);
}
/** Constructs a ByteArray from the provided DataInput, with a predefined length. */
public static ByteArray fromDataInput(final DataInput in, final int len) throws IOException {
if (len <= 0)
return ZERO_BYTE_ARRAY;
final byte[] tmp = new byte[len];
in.readFully(tmp);
return new ByteArray(tmp, true);
}
/** read bytes from an input stream, up to maxBytes (or all which exist, if maxBytes = 0). */
public static ByteArray fromInputStream(final InputStream is, final int maxBytes) throws IOException {
final ByteBuilder tmp = maxBytes > 0 ? new ByteBuilder(maxBytes, CHARSET_UTF8) : new ByteBuilder();
tmp.readFromInputStream(is, maxBytes);
if (tmp.length() == 0)
return ZERO_BYTE_ARRAY;
return new ByteArray(tmp.getCurrentBuffer(), 0, tmp.length());
}
/** Constructs a ByteArray from the provided ByteBuilder. */
public static ByteArray fromByteBuilder(final ByteBuilder in) {
if (in == null || in.length() == 0)
return ZERO_BYTE_ARRAY;
return new ByteArray(in.getCurrentBuffer(), 0, in.length());
}
/** Constructs a ByteArray from the provided String, using the UTF8 character set. */
public static ByteArray fromString(final String in) {
return fromString(in, CHARSET_UTF8);
}
/** Constructs a ByteArray from the provided String, using the specified character set. */
public static ByteArray fromString(final String in, final Charset cs) {
if (in == null || in.length() == 0)
return ZERO_BYTE_ARRAY;
return new ByteArray(in.getBytes(cs), true); // we know these bytes are never changed, so no extra copy required
}
/** returns the byte array as a string. Unlike toString(), which uses the JVM default character set, this method always uses UTF-8. */
public String asString() {
return asString(CHARSET_UTF8);
}
/** returns the byte array as a string, using a specified character set. */
public String asString(final Charset cs) {
return new String(buffer, offset, length, cs);
}
/** construct a ByteArray from a source byte[], with offset and length. source may not be null. */
public ByteArray(final byte[] source, final int offset, final int length) {
if (source == null || offset < 0 || length < 0 || offset + length > source.length)
throw new IllegalArgumentException();
buffer = new byte[length];
System.arraycopy(source, offset, buffer, 0, length);
this.offset = 0;
this.length = length;
}
/** Construct a ByteArray from another one. Could also just assign it due to immutability.
* The only benefit of this constructor is that it converts a null parameter into the non-null empty ByteArray. */
public ByteArray(final ByteArray source) {
if (source == null) {
buffer = ZERO_JAVA_BYTE_ARRAY;
offset = 0;
length = 0;
} else {
buffer = source.buffer; // no array copy required due to immutability
offset = source.offset;
length = source.length;
}
}
/** Construct a ByteArray from a source byte[], with offset and length. source may not be null.
* Similar to the subArray member method. */
public ByteArray(final ByteArray source, final int offset, final int length) {
if (source == null || offset < 0 || length < 0 || offset + length > source.length)
throw new IllegalArgumentException();
this.buffer = source.buffer; // no array copy required due to immutability
this.offset = source.offset + offset;
this.length = length;
}
/** Returns a ByteArray which contains a subsequence of the bytes of this one. The underlying buffer is shared.
* Functionality wise this corresponds to String.substring (before Java 6) or ByteBuffer.slice. */
public ByteArray subArray(final int xoffset, final int xlength) {
// create a new ByteArray sharing the same buffer
return new ByteArray(this, xoffset, xlength);
}
/** Returns a ByteArray which contains a subsequence of the bytes of this one. The underlying buffer is not shared.
* Use this variant if the original ByteArray holds a much larger byte[] and can be GCed afterwards. */
public ByteArray subArrayUnshared(final int xoffset, final int xlength) {
if (xoffset < 0 || xlength < 0 || xoffset + xlength > this.length)
throw new IllegalArgumentException();
final byte[] newBuffer = new byte[xlength];
System.arraycopy(buffer, xoffset, newBuffer, 0, xlength);
// create a new ByteArray using the new buffer
return new ByteArray(newBuffer, true);
}
@Override
public ByteArray clone() {
return new ByteArray(this);
}
public int length() {
return this.length;
}
// public int getOffset() {
// return this.offset;
// }
//
// /** Returns the internal buffer of this object. It may only be used for read-only access.
// * Java is missing a "const" specifier for arrays as it is available in C and C++.
// *
// * Java-purists will complain against exposing this internal state of an immutable object, but as long as
// * access is possible via reflection anyway, just with performance penalty, it would be outright stupid
// * to force people to use reflection, or even defensive copies. Instead I hope the name of the method
// * documents the intended use.
// */
// public byte /* const */[] unsafe$getConstBufferOfConstBytes() {
// return this.buffer;
// }
public int indexOf(final byte x) {
int i = 0;
while (i < length) {
if (buffer[offset + i] == x)
return i;
++i;
}
return -1;
}
public int indexOf(final byte x, final int fromIndex) {
int i = fromIndex >= 0 ? fromIndex : 0;
while (i < length) {
if (buffer[offset + i] == x)
return i;
++i;
}
return -1;
}
public int lastIndexOf(final byte x) {
int i = length;
while (i > 0) {
if (buffer[offset + --i] == x)
return i;
}
return -1;
}
public int lastIndexOf(final byte x, final int fromIndex) {
int i = fromIndex >= length ? length - 1 : fromIndex;
while (i >= 0) {
if (buffer[offset + i] == x)
return i;
--i;
}
return -1;
}
public byte byteAt(final int pos) {
if (pos < 0 || pos >= length)
throw new IllegalArgumentException();
return buffer[offset + pos];
}
/** Provides the contents of this ByteArray to some InputStream. */
public ByteArrayInputStream asByteArrayInputStream() {
return new ByteArrayInputStream(buffer, offset, length());
}
// return a defensive copy of the contents
public byte[] getBytes() {
final byte[] result = new byte[length];
System.arraycopy(buffer, offset, result, 0, length);
return result;
}
// return a defensive copy of part of the contents. Shorthand for subArray(offset, length).getBytes(),
// which would create a temporary object
public byte[] getBytes(final int xoffset, final int xlength) {
if (xoffset < 0 || xlength < 0 || xoffset + xlength > this.length)
throw new IllegalArgumentException();
final byte[] result = new byte[xlength];
System.arraycopy(buffer, xoffset + this.offset, result, 0, xlength);
return result;
}
private boolean contentEqualsSub(final byte[] dst, final int dstOffset, final int dstLength) {
if (length != dstLength)
return false;
for (int i = 0; i < dstLength; ++i) {
if (buffer[offset + i] != dst[dstOffset + i])
return false;
}
return true;
}
// following: all arguments must be not null
public boolean contentEquals(final ByteArray that) {
return contentEqualsSub(that.buffer, that.offset, that.length);
}
public boolean contentEquals(final byte[] that) {
return contentEqualsSub(that, 0, that.length);
}
public boolean contentEquals(final byte[] that, final int thatOffset, final int thatLength) {
if (thatOffset < 0 || thatLength < 0 || thatOffset + thatLength > that.length)
throw new IllegalArgumentException();
return contentEqualsSub(that, thatOffset, thatLength);
}
// returns if the two instances share the same backing buffer (for debugging)
public boolean shareBuffer(final ByteArray that) {
return buffer == that.buffer;
}
@Override
public int hashCode() {
int hash = 997;
for (int i = 0; i < length; ++i) {
hash = 29 * hash + buffer[offset + i];
}
return hash;
}
// two ByteArrays are considered equal if they have the same visible contents
@Override
public boolean equals(final Object that) {
if (this == that)
return true;
if (that == null || getClass() != that.getClass())
return false;
final ByteArray xthat = (ByteArray)that;
// same as contentEqualsSub(..) now
if (this.length != xthat.length)
return false;
for (int i = 0; i < length; ++i) {
if (buffer[offset + i] != xthat.buffer[xthat.offset + i])
return false;
}
return true;
}
// support function to allow dumping contents to DataOutput without the need to expose our internal buffer
public void writeToDataOutput(final DataOutput out) throws IOException {
out.write(buffer, offset, length);
}
public String hexdump(final int startAt, final int maxlength) {
if (length <= startAt)
return ""; // no data to dump
return ByteUtil.dump(buffer, offset + startAt, (maxlength > 0 && maxlength < length) ? maxlength : length);
}
@Override
public void writeExternal(final ObjectOutput out) throws IOException {
//writeBytes(out, buffer, offset, length);
if (length < 256 && length != MAGIC_LENGTH_INDICATING_32_BIT_SIZE) {
out.writeByte(length);
} else {
out.writeByte(MAGIC_LENGTH_INDICATING_32_BIT_SIZE);
out.writeInt(length);
}
out.write(buffer, offset, length);
}
// support function to allow ordinary byte[] to be written in same fashion
public static void writeBytes(final ObjectOutput out, final byte[] buffer, final int offset, final int length) throws IOException {
if (length < 256 && length != MAGIC_LENGTH_INDICATING_32_BIT_SIZE) {
out.writeByte(length);
} else {
out.writeByte(MAGIC_LENGTH_INDICATING_32_BIT_SIZE);
out.writeInt(length);
}
out.write(buffer, offset, length);
}
public static byte[] readBytes(final ObjectInput in) throws IOException {
int newlength = in.readByte();
if (newlength < 0)
newlength += 256; // want full unsigned range
if (newlength == MAGIC_LENGTH_INDICATING_32_BIT_SIZE) // magic to indicate four byte length
newlength = in.readInt();
// System.out.println("ByteArray.readExternal() with length " + newlength);
if (newlength == 0)
return ZERO_JAVA_BYTE_ARRAY;
final byte[] localBuffer = new byte[newlength];
int done = 0;
while (done < newlength) {
final int nRead = in.read(localBuffer, done, newlength - done); // may return less bytes than requested!
if (nRead <= 0)
throw new IOException("deserialization of ByteArray returned " + nRead + " while expecting " + (newlength - done));
done += nRead;
}
return localBuffer;
}
// factory method to read from objectInput via above helper function
public static ByteArray read(final ObjectInput in) throws IOException {
return new ByteArray(readBytes(in), true);
}
// a direct implementation of this method would conflict with the immutability / "final" attributes of the field
// Weird Java language design again. If readExternal() is kind of a constructor, why are assignments to final fields not allowed here?
// alternatives around are to add artificial fields and use readResolve / proxies or to discard the "final" attributes,
// or using reflection to set the values (!?). Bleh!
// We're using kind of Bloch's "proxy" pattern (Essential Java, #78), namely a single-sided variant with just a single additonal member field,
// which lets us preserve the immutability
// see also http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6379948 for discussion around this
@Override
public void readExternal(final ObjectInput in) throws IOException {
extraFieldJustRequiredForDeserialization = new ByteArray(readBytes(in), true);
}
public Object readResolve() {
// System.out.println("ByteArray.readResolve()");
if (extraFieldJustRequiredForDeserialization == null)
throw new RuntimeException("readResolve() called on instance not obtained via readExternal()");
return extraFieldJustRequiredForDeserialization;
}
// factory method to construct a byte array from a prevalidated base64 byte sequence. returns null if length is suspicious
public static ByteArray fromBase64(final byte[] data, final int offset, final int length) {
if (length == 0)
return ZERO_BYTE_ARRAY;
final byte[] tmp = Base64.decode(data, offset, length);
if (tmp == null)
return null;
return new ByteArray(tmp, true);
}
public void appendBase64(final ByteBuilder b) {
Base64.encodeToByte(b, buffer, offset, length);
}
public void appendToRaw(final ByteBuilder b) {
b.write(buffer, offset, length);
}
/** Returns the contents of this ByteArray as a base64 encoded string.
* @since 1.2.12 */
public String asBase64() {
final ByteBuilder tmp = new ByteBuilder(0, null);
Base64.encodeToByte(tmp, buffer, offset, length);
return tmp.toString();
}
// returns the String representation of the visible bytes portion
@Override
public String toString() {
return new String(buffer, offset, length);
}
}
| jpaw/jpaw | jpaw-util/src/main/java/de/jpaw/util/ByteArray.java | Java | apache-2.0 | 18,751 |
/*
* Copyright (C) 2014 Jörg Prante
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xbib.elasticsearch.plugin.jdbc.feeder;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.metrics.MeterMetric;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.JsonSettingsLoader;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.river.RiverName;
import org.xbib.elasticsearch.plugin.jdbc.RiverRunnable;
import org.xbib.elasticsearch.plugin.jdbc.classloader.uri.URIClassLoader;
import org.xbib.elasticsearch.plugin.jdbc.client.Ingest;
import org.xbib.elasticsearch.plugin.jdbc.client.IngestFactory;
import org.xbib.elasticsearch.plugin.jdbc.client.transport.BulkTransportClient;
import org.xbib.elasticsearch.plugin.jdbc.cron.CronExpression;
import org.xbib.elasticsearch.plugin.jdbc.cron.CronThreadPoolExecutor;
import org.xbib.elasticsearch.plugin.jdbc.state.RiverStatesMetaData;
import org.xbib.elasticsearch.plugin.jdbc.util.RiverServiceLoader;
import org.xbib.elasticsearch.river.jdbc.RiverFlow;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Reader;
import java.io.Writer;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.collect.Lists.newLinkedList;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
/**
* Standalone feeder for JDBC
*/
public class JDBCFeeder {
private final static ESLogger logger = ESLoggerFactory.getLogger("JDBCFeeder");
/**
* Register metadata factory in Elasticsearch for being able to decode
* ClusterStateResponse with RiverStatesMetadata
*/
static {
MetaData.registerFactory(RiverStatesMetaData.TYPE, RiverStatesMetaData.FACTORY);
}
protected Reader reader;
protected Writer writer;
protected PrintStream printStream;
protected IngestFactory ingestFactory;
/**
* This ingest is the client for the river flow state operations
*/
private Ingest ingest;
private RiverFlow riverFlow;
private List<Map<String, Object>> definitions;
private ThreadPoolExecutor threadPoolExecutor;
private volatile Thread feederThread;
private volatile boolean closed;
/**
* Constructor for running this from command line
*/
public JDBCFeeder() {
Runtime.getRuntime().addShutdownHook(shutdownHook());
}
public void exec() throws Exception {
readFrom(new InputStreamReader(System.in, "UTF-8"))
.writeTo(new OutputStreamWriter(System.out, "UTF-8"))
.errorsTo(System.err)
.start();
}
@SuppressWarnings("unchecked")
public JDBCFeeder readFrom(Reader reader) {
this.reader = reader;
try {
Map<String, Object> map = XContentFactory.xContent(XContentType.JSON).createParser(reader).mapOrderedAndClose();
Settings settings = settingsBuilder()
.put(new JsonSettingsLoader().load(jsonBuilder().map(map).string()))
.build();
this.definitions = newLinkedList();
Object pipeline = map.get("jdbc");
if (pipeline instanceof Map) {
definitions.add((Map<String, Object>) pipeline);
}
if (pipeline instanceof List) {
definitions.addAll((List<Map<String, Object>>) pipeline);
}
// before running, create the river flow
createRiverFlow(map, settings);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return this;
}
protected RiverFlow createRiverFlow(Map<String, Object> spec, Settings settings) throws IOException {
String strategy = XContentMapValues.nodeStringValue(spec.get("strategy"), "simple");
this.riverFlow = RiverServiceLoader.newRiverFlow(strategy);
logger.debug("strategy {}: river flow class {}, spec = {} settings = {}",
strategy, riverFlow.getClass().getName(), spec, settings.getAsMap());
this.ingestFactory = createIngestFactory(settings);
// out private ingest, needed for having a client in the river flow
this.ingest = ingestFactory.create();
riverFlow.setRiverName(new RiverName("jdbc", "feeder"))
.setSettings(settings)
.setClient(ingest.client())
.setIngestFactory(ingestFactory)
.setMetric(new MeterMetric(Executors.newScheduledThreadPool(1), TimeUnit.SECONDS))
.setQueue(new ConcurrentLinkedDeque<Map<String, Object>>());
return riverFlow;
}
public JDBCFeeder writeTo(Writer writer) {
this.writer = writer;
return this;
}
public JDBCFeeder errorsTo(PrintStream printStream) {
this.printStream = printStream;
return this;
}
public JDBCFeeder start() throws Exception {
this.closed = false;
if (ingest.getConnectedNodes().isEmpty()) {
throw new IOException("no nodes connected, can't continue");
}
this.feederThread = new Thread(new RiverRunnable(riverFlow, definitions));
List<Future<?>> futures = schedule(feederThread);
// wait for all threads to finish
for (Future<?> future : futures) {
future.get();
}
ingest.shutdown();
return this;
}
private List<Future<?>> schedule(Thread thread) {
Settings settings = riverFlow.getSettings();
String[] schedule = settings.getAsArray("schedule");
List<Future<?>> futures = newLinkedList();
Long seconds = settings.getAsTime("interval", TimeValue.timeValueSeconds(0)).seconds();
if (schedule != null && schedule.length > 0) {
CronThreadPoolExecutor cronThreadPoolExecutor =
new CronThreadPoolExecutor(settings.getAsInt("threadpoolsize", 1));
for (String cron : schedule) {
futures.add(cronThreadPoolExecutor.schedule(thread, new CronExpression(cron)));
}
this.threadPoolExecutor = cronThreadPoolExecutor;
logger.debug("scheduled feeder instance with cron expressions {}", Arrays.asList(schedule));
} else if (seconds > 0L) {
ScheduledThreadPoolExecutor scheduledThreadPoolExecutor =
new ScheduledThreadPoolExecutor(settings.getAsInt("threadpoolsize", 4));
futures.add(scheduledThreadPoolExecutor.scheduleAtFixedRate(thread, 0L, seconds, TimeUnit.SECONDS));
logger.debug("scheduled feeder instance at fixed rate of {} seconds", seconds);
this.threadPoolExecutor = scheduledThreadPoolExecutor;
} else {
this.threadPoolExecutor = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>());
futures.add(threadPoolExecutor.submit(thread));
logger.debug("started feeder instance");
}
return futures;
}
/**
* Shut down feeder instance by Ctrl-C
*
* @return shutdown thread
*/
public Thread shutdownHook() {
return new Thread() {
public void run() {
try {
shutdown();
} catch (Exception e) {
e.printStackTrace(printStream);
}
}
};
}
public synchronized void shutdown() throws Exception {
if (closed) {
return;
}
closed = true;
if (threadPoolExecutor != null) {
threadPoolExecutor.shutdownNow();
threadPoolExecutor = null;
}
if (feederThread != null) {
feederThread.interrupt();
}
if (!ingest.isShutdown()) {
ingest.shutdown();
}
reader.close();
writer.close();
printStream.close();
}
private IngestFactory createIngestFactory(final Settings settings) {
return new IngestFactory() {
@Override
public Ingest create() {
Integer maxbulkactions = settings.getAsInt("max_bulk_actions", 10000);
Integer maxconcurrentbulkrequests = settings.getAsInt("max_concurrent_bulk_requests",
Runtime.getRuntime().availableProcessors() * 2);
ByteSizeValue maxvolume = settings.getAsBytesSize("max_bulk_volume", ByteSizeValue.parseBytesSizeValue("10m"));
TimeValue maxrequestwait = settings.getAsTime("max_request_wait", TimeValue.timeValueSeconds(60));
TimeValue flushinterval = settings.getAsTime("flush_interval", TimeValue.timeValueSeconds(5));
File home = new File(settings.get("home", "."));
BulkTransportClient ingest = new BulkTransportClient();
Settings clientSettings = ImmutableSettings.settingsBuilder()
.put("cluster.name", settings.get("elasticsearch.cluster", "elasticsearch"))
.put("host", settings.get("elasticsearch.host", "localhost"))
.put("port", settings.getAsInt("elasticsearch.port", 9300))
.put("sniff", settings.getAsBoolean("elasticsearch.sniff", false))
.put("name", "feeder") // prevents lookup of names.txt, we don't have it, and marks this node as "feeder". See also module load skipping in JDBCRiverPlugin
.put("client.transport.ignore_cluster_name", true) // ignore cluster name setting
.put("client.transport.ping_timeout", settings.getAsTime("elasticsearch.timeout", TimeValue.timeValueSeconds(10))) // ping timeout
.put("client.transport.nodes_sampler_interval", settings.getAsTime("elasticsearch.timeout", TimeValue.timeValueSeconds(5))) // for sniff sampling
.put("path.plugins", ".dontexist") // pointing to a non-exiting folder means, this disables loading site plugins
// adding our custom class loader is tricky, actions may not be registered to ActionService
.classLoader(getClassLoader(getClass().getClassLoader(), home))
.build();
ingest.maxActionsPerBulkRequest(maxbulkactions)
.maxConcurrentBulkRequests(maxconcurrentbulkrequests)
.maxVolumePerBulkRequest(maxvolume)
.maxRequestWait(maxrequestwait)
.flushIngestInterval(flushinterval)
.newClient(clientSettings);
return ingest;
}
};
}
/**
* We have to add Elasticsearch to our classpath, but exclude all jvm plugins
* for starting our TransportClient.
*
* @param home ES_HOME
* @return a custom class loader with our dependencies
*/
private ClassLoader getClassLoader(ClassLoader parent, File home) {
URIClassLoader classLoader = new URIClassLoader(parent);
File[] libs = new File(home + "/lib").listFiles();
if (libs != null) {
for (File file : libs) {
if (file.getName().toLowerCase().endsWith(".jar")) {
classLoader.addURI(file.toURI());
}
}
}
return classLoader;
}
}
| songwie/elasticsearch-river-jdbc | src/main/java/org/xbib/elasticsearch/plugin/jdbc/feeder/JDBCFeeder.java | Java | apache-2.0 | 12,908 |
/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.service.neomedia.stats;
import org.jitsi.service.neomedia.*;
import java.util.*;
/**
* An extended interface for accessing the statistics of a {@link MediaStream}.
*
* The reason to extend the {@link MediaStreamStats} interface rather than
* adding methods into it is to allow the implementation to reside in a separate
* class. This is desirable in order to:
* 1. Help to keep the old interface for backward compatibility.
* 2. Provide a "clean" place where future code can be added, thus avoiding
* further cluttering of the already overly complicated
* {@link org.jitsi.impl.neomedia.MediaStreamStatsImpl}.
*
* @author Boris Grozev
*/
public interface MediaStreamStats2
extends MediaStreamStats
{
/**
* @return the instance which keeps aggregate statistics for the associated
* {@link MediaStream} in the receive direction.
*/
ReceiveTrackStats getReceiveStats();
/**
* @return the instance which keeps aggregate statistics for the associated
* {@link MediaStream} in the send direction.
*/
SendTrackStats getSendStats();
/**
* @return the instance which keeps statistics for a particular SSRC in the
* receive direction.
*/
ReceiveTrackStats getReceiveStats(long ssrc);
/**
* @return the instance which keeps statistics for a particular SSRC in the
* send direction.
*/
SendTrackStats getSendStats(long ssrc);
/**
* @return all per-SSRC statistics for the send direction.
*/
Collection<? extends SendTrackStats> getAllSendStats();
/**
* @return all per-SSRC statistics for the receive direction.
*/
Collection<? extends ReceiveTrackStats> getAllReceiveStats();
/**
* Clears send ssrc stats.
* @param ssrc the ssrc to clear.
*/
void clearSendSsrc(long ssrc);
}
| jitsi/libjitsi | src/main/java/org/jitsi/service/neomedia/stats/MediaStreamStats2.java | Java | apache-2.0 | 2,461 |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.data.input.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import io.druid.TestObjectMapper;
import org.junit.Assert;
import org.junit.Test;
import javax.validation.constraints.Null;
import java.io.IOException;
import java.util.Arrays;
public class DelimitedParseSpecTest
{
private final ObjectMapper jsonMapper = new TestObjectMapper();
@Test
public void testSerde() throws IOException
{
DelimitedParseSpec spec = new DelimitedParseSpec(
new TimestampSpec("abc", "iso", null,null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("abc")), null, null),
"\u0001",
"\u0002",
Arrays.asList("abc")
);
final DelimitedParseSpec serde = jsonMapper.readValue(
jsonMapper.writeValueAsString(spec),
DelimitedParseSpec.class
);
Assert.assertEquals("abc", serde.getTimestampSpec().getTimestampColumn());
Assert.assertEquals("iso", serde.getTimestampSpec().getTimestampFormat());
Assert.assertEquals(Arrays.asList("abc"), serde.getColumns());
Assert.assertEquals("\u0001", serde.getDelimiter());
Assert.assertEquals("\u0002", serde.getListDelimiter());
Assert.assertEquals(Arrays.asList("abc"), serde.getDimensionsSpec().getDimensionNames());
}
@Test(expected = IllegalArgumentException.class)
public void testColumnMissing() throws Exception
{
final ParseSpec spec = new DelimitedParseSpec(
new TimestampSpec(
"timestamp",
"auto",
null,
null
),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")),
Lists.<String>newArrayList(),
Lists.<SpatialDimensionSchema>newArrayList()
),
",",
" ",
Arrays.asList("a")
);
}
@Test(expected = IllegalArgumentException.class)
public void testComma() throws Exception
{
final ParseSpec spec = new DelimitedParseSpec(
new TimestampSpec(
"timestamp",
"auto",
null,
null
),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(Arrays.asList("a,", "b")),
Lists.<String>newArrayList(),
Lists.<SpatialDimensionSchema>newArrayList()
),
",",
null,
Arrays.asList("a")
);
}
@Test(expected = NullPointerException.class)
public void testDefaultColumnList(){
final DelimitedParseSpec spec = new DelimitedParseSpec(
new TimestampSpec(
"timestamp",
"auto",
null,
null
),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")),
Lists.<String>newArrayList(),
Lists.<SpatialDimensionSchema>newArrayList()
),
",",
null,
// pass null columns not allowed
null
);
}
}
| Saligia-eva/mobvista_druid | api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java | Java | apache-2.0 | 3,790 |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticloadbalancing.model;
import java.io.Serializable;
/**
*
*/
public class RegisterInstancesWithLoadBalancerResult implements Serializable, Cloneable {
/**
* The updated list of instances for the load balancer.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<Instance> instances;
/**
* The updated list of instances for the load balancer.
*
* @return The updated list of instances for the load balancer.
*/
public java.util.List<Instance> getInstances() {
if (instances == null) {
instances = new com.amazonaws.internal.ListWithAutoConstructFlag<Instance>();
instances.setAutoConstruct(true);
}
return instances;
}
/**
* The updated list of instances for the load balancer.
*
* @param instances The updated list of instances for the load balancer.
*/
public void setInstances(java.util.Collection<Instance> instances) {
if (instances == null) {
this.instances = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<Instance> instancesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Instance>(instances.size());
instancesCopy.addAll(instances);
this.instances = instancesCopy;
}
/**
* The updated list of instances for the load balancer.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setInstances(java.util.Collection)} or {@link
* #withInstances(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param instances The updated list of instances for the load balancer.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public RegisterInstancesWithLoadBalancerResult withInstances(Instance... instances) {
if (getInstances() == null) setInstances(new java.util.ArrayList<Instance>(instances.length));
for (Instance value : instances) {
getInstances().add(value);
}
return this;
}
/**
* The updated list of instances for the load balancer.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param instances The updated list of instances for the load balancer.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public RegisterInstancesWithLoadBalancerResult withInstances(java.util.Collection<Instance> instances) {
if (instances == null) {
this.instances = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<Instance> instancesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Instance>(instances.size());
instancesCopy.addAll(instances);
this.instances = instancesCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInstances() != null) sb.append("Instances: " + getInstances() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getInstances() == null) ? 0 : getInstances().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof RegisterInstancesWithLoadBalancerResult == false) return false;
RegisterInstancesWithLoadBalancerResult other = (RegisterInstancesWithLoadBalancerResult)obj;
if (other.getInstances() == null ^ this.getInstances() == null) return false;
if (other.getInstances() != null && other.getInstances().equals(this.getInstances()) == false) return false;
return true;
}
@Override
public RegisterInstancesWithLoadBalancerResult clone() {
try {
return (RegisterInstancesWithLoadBalancerResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| trasa/aws-sdk-java | aws-java-sdk-elasticloadbalancing/src/main/java/com/amazonaws/services/elasticloadbalancing/model/RegisterInstancesWithLoadBalancerResult.java | Java | apache-2.0 | 5,518 |
package com.example.godtemper.db;
import java.util.ArrayList;
import java.util.List;
import com.example.godtemper.model.City;
import com.example.godtemper.model.County;
import com.example.godtemper.model.Province;
import android.R.integer;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
public class GodTemperDB {
/**
* Êý¾Ý¿âÃû
*/
public static final String DB_NAME = "GodTemper";
/**
* Êý¾Ý¿â°æ±¾
*/
public static final int VERSION = 1;
private static GodTemperDB godTemperDB;
private SQLiteDatabase db;
private GodTemperDB(Context context){
GodTemperOpenHelper dbHelper = new GodTemperOpenHelper(context, DB_NAME, null, VERSION);
db = dbHelper.getWritableDatabase();
}
/**
* »ñÈ¡godTemperDBµÄʵÀý
* @param context
* @return
*/
public synchronized static GodTemperDB getInstance(Context context){
if(godTemperDB == null){
godTemperDB = new GodTemperDB(context);
}
return godTemperDB;
}
/**
* ½«ProvinceʵÀý´æ´¢µ½Êý¾Ý¿â
* @param province
*/
public void saveProvince(Province province){
if(province != null){
ContentValues values = new ContentValues();
values.put("province_name", province.getProvinceName());
values.put("province_code", province.getProvinceCode());
db.insert("Province", null, values);
}
}
/**
* ´ÓÊý¾Ý¿â¶Áȡȫ¹úËùÓÐÊ¡·ÝµÄÐÅÏ¢
* @return
*/
public List<Province>loadProvinces(){
List<Province>list = new ArrayList<Province>();
Cursor cursor = db.query("Province", null, null, null, null, null, null);
if(cursor.moveToFirst()){
do{
Province province = new Province();
province.setId(cursor.getInt(cursor.getColumnIndex("id")));
province.setProvinceName(cursor.getString(cursor.getColumnIndex("province_name")));
province.setProvinceCode(cursor.getString(cursor.getColumnIndex("province_code")));
list.add(province);
}while(cursor.moveToNext());
}
return list;
}
/**
* ½«CityʵÀý´æ´¢µ½Êý¾Ý¿â
* @param city
*/
public void saveCity(City city) {
if(city!=null){
ContentValues values = new ContentValues();
values.put("city_name", city.getCityName());
values.put("city_code", city.getCityCode());
values.put("province_id", city.getProvinceId());
db.insert("City", null, values);
}
}
/**
* ´ÓÊý¾Ý¿â¶ÁȡijʡÏÂËùÓеijÇÊÐÐÅÏ¢
* @param provinceId
* @return
*/
public List<City> loadCities(int provinceId) {
List<City>list = new ArrayList<City>();
Cursor cursor = db.query("City", null, "province_id = ?",
new String[]{String.valueOf(provinceId)}, null,null,null);
if(cursor.moveToFirst()){
do{
City city = new City();
city.setId(cursor.getInt(cursor.getColumnIndex("id")));
city.setCityName(cursor.getString(cursor.getColumnIndex("city_name")));
city.setCityCode(cursor.getString(cursor.getColumnIndex("city_code")));
city.setProvinceId(provinceId);
list.add(city);
}while(cursor.moveToNext());
}
return list;
}
/**
* ½«CountyʵÀý´æ´¢µ½Êý¾Ý¿â
*/
public void saveCounty(County county){
if(county != null){
ContentValues values = new ContentValues();
values.put("county_name", county.getCountyName());
values.put("county_code", county.getCountyCode());
values.put("city_id", county.getCityId());
db.insert("County", null, values);
}
}
/**
* ´ÓÊý¾Ý¿â¶Áȡij³ÇÊÐÏÂËùÓÐÏØµÄÐÅÏ¢
*/
public List<County>loadCounties (int cityId){
List<County>list = new ArrayList<County>();
Cursor cursor = db.query("County", null, "city_id = ?",
new String[]{String.valueOf(cityId)}, null, null, null);
if(cursor.moveToFirst()){
do{
County county = new County();
county.setId(cursor.getInt(cursor.getColumnIndex("id")));
county.setCountyName(cursor.getString(cursor.getColumnIndex("county_name")));
county.setCountyCode(cursor.getString(cursor.getColumnIndex("county_code")));
county.setCityId(cityId);
list.add(county);
}while(cursor.moveToNext());
}
return list;
}
}
| GodisGod/godtemper | src/com/example/godtemper/db/GodTemperDB.java | Java | apache-2.0 | 4,037 |
package org.axway.grapes.server.webapp.resources;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.WebResource;
import com.yammer.dropwizard.auth.basic.BasicAuthProvider;
import com.yammer.dropwizard.testing.ResourceTest;
import org.axway.grapes.commons.api.ServerAPI;
import org.axway.grapes.server.GrapesTestUtils;
import org.axway.grapes.server.config.GrapesServerConfig;
import org.axway.grapes.server.core.options.FiltersHolder;
import org.axway.grapes.server.db.RepositoryHandler;
import org.axway.grapes.server.db.datamodel.DbCredential;
import org.axway.grapes.server.db.datamodel.DbSearch;
import org.axway.grapes.server.webapp.auth.GrapesAuthenticator;
import org.eclipse.jetty.http.HttpStatus;
import org.junit.Test;
import javax.ws.rs.core.MediaType;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class WebSearchResourceTest extends ResourceTest {
private RepositoryHandler repositoryHandler;
@Override
protected void setUpResources() throws Exception {
repositoryHandler = GrapesTestUtils.getRepoHandlerMock();
final GrapesServerConfig config = mock(GrapesServerConfig.class);
final WebSearchResource resource = new WebSearchResource(repositoryHandler, config);
addProvider(new BasicAuthProvider<DbCredential>(new GrapesAuthenticator(repositoryHandler), "test auth"));
addResource(resource);
}
@Test
public void getSearchResult() throws Exception {
List<String> moduleIds = new ArrayList<>();
moduleIds.add("testSearch_id_1");
moduleIds.add("testSearch_id_2");
List<String> artifactIds = new ArrayList<>();
artifactIds.add("testSearch_artifact_id_1");
artifactIds.add("testSearch_artifact_id_2");
DbSearch search = new DbSearch();
search.setModules(moduleIds);
search.setArtifacts(artifactIds);
when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search);
final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch");
final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
final String results = response.getEntity(new GenericType<String>() {
});
assertEquals("{\"modules\":[\"testSearch_id_1\",\"testSearch_id_2\"],\"artifacts\":[\"testSearch_artifact_id_1\",\"testSearch_artifact_id_2\"]}", results);
}
@Test
public void getNullSearchResult() {
DbSearch search = new DbSearch();
when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search);
final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch");
final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
final String results = response.getEntity(new GenericType<String>() {
});
assertEquals("{\"modules\":null,\"artifacts\":null}", results);
}
@Test
public void getModulesSearchResult() {
DbSearch search = new DbSearch();
List<String> moduleIds = new ArrayList<>();
moduleIds.add("testSearch_id_1");
moduleIds.add("testSearch_id_2");
search.setModules(moduleIds);
when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search);
final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch");
final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
final String results = response.getEntity(new GenericType<String>() {
});
assertEquals("{\"modules\":[\"testSearch_id_1\",\"testSearch_id_2\"],\"artifacts\":null}", results);
}
@Test
public void getArtifactsSearchResult() {
DbSearch search = new DbSearch();
List<String> artifactIds = new ArrayList<>();
artifactIds.add("testSearch_artifact_id_1");
artifactIds.add("testSearch_artifact_id_2");
search.setArtifacts(artifactIds);
when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search);
final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch");
final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
final String results = response.getEntity(new GenericType<String>() {
});
assertEquals("{\"modules\":null,\"artifacts\":[\"testSearch_artifact_id_1\",\"testSearch_artifact_id_2\"]}", results);
}
} | Axway/Grapes | server/src/test/java/org/axway/grapes/server/webapp/resources/WebSearchResourceTest.java | Java | apache-2.0 | 5,448 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.documentation;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.MachineLearningGetResultsIT;
import org.elasticsearch.client.MachineLearningIT;
import org.elasticsearch.client.MlTestStateCleaner;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteCalendarEventRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataResponse;
import org.elasticsearch.client.ml.DeleteFilterRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.DeleteTrainedModelRequest;
import org.elasticsearch.client.ml.EstimateModelMemoryRequest;
import org.elasticsearch.client.ml.EstimateModelMemoryResponse;
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameResponse;
import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.FindFileStructureRequest;
import org.elasticsearch.client.ml.FindFileStructureResponse;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.FlushJobResponse;
import org.elasticsearch.client.ml.ForecastJobRequest;
import org.elasticsearch.client.ml.ForecastJobResponse;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetBucketsResponse;
import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsResponse;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCalendarsResponse;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetCategoriesResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedResponse;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
import org.elasticsearch.client.ml.GetDatafeedStatsResponse;
import org.elasticsearch.client.ml.GetFiltersRequest;
import org.elasticsearch.client.ml.GetFiltersResponse;
import org.elasticsearch.client.ml.GetInfluencersRequest;
import org.elasticsearch.client.ml.GetInfluencersResponse;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.client.ml.GetJobStatsResponse;
import org.elasticsearch.client.ml.GetModelSnapshotsRequest;
import org.elasticsearch.client.ml.GetModelSnapshotsResponse;
import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetOverallBucketsResponse;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.GetRecordsResponse;
import org.elasticsearch.client.ml.GetTrainedModelsRequest;
import org.elasticsearch.client.ml.GetTrainedModelsResponse;
import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest;
import org.elasticsearch.client.ml.GetTrainedModelsStatsResponse;
import org.elasticsearch.client.ml.MlInfoRequest;
import org.elasticsearch.client.ml.MlInfoResponse;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostCalendarEventRequest;
import org.elasticsearch.client.ml.PostCalendarEventResponse;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutDatafeedResponse;
import org.elasticsearch.client.ml.PutFilterRequest;
import org.elasticsearch.client.ml.PutFilterResponse;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.PutJobResponse;
import org.elasticsearch.client.ml.PutTrainedModelRequest;
import org.elasticsearch.client.ml.PutTrainedModelResponse;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotResponse;
import org.elasticsearch.client.ml.SetUpgradeModeRequest;
import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StartDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedResponse;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.StopDatafeedResponse;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest;
import org.elasticsearch.client.ml.UpdateJobRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotResponse;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.client.ml.calendars.ScheduledEventTests;
import org.elasticsearch.client.ml.datafeed.ChunkingConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.client.ml.datafeed.DelayedDataCheckConfig;
import org.elasticsearch.client.ml.dataframe.Classification;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalysis;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsDest;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsSource;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsState;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats;
import org.elasticsearch.client.ml.dataframe.OutlierDetection;
import org.elasticsearch.client.ml.dataframe.QueryConfig;
import org.elasticsearch.client.ml.dataframe.Regression;
import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.AccuracyMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric.ActualClass;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric.PredictedClass;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric.ConfusionMatrix;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
import org.elasticsearch.client.ml.dataframe.explain.FieldSelection;
import org.elasticsearch.client.ml.dataframe.explain.MemoryEstimation;
import org.elasticsearch.client.ml.filestructurefinder.FileStructure;
import org.elasticsearch.client.ml.inference.InferenceToXContentCompressor;
import org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider;
import org.elasticsearch.client.ml.inference.TrainedModelConfig;
import org.elasticsearch.client.ml.inference.TrainedModelDefinition;
import org.elasticsearch.client.ml.inference.TrainedModelDefinitionTests;
import org.elasticsearch.client.ml.inference.TrainedModelInput;
import org.elasticsearch.client.ml.inference.TrainedModelStats;
import org.elasticsearch.client.ml.inference.trainedmodel.RegressionConfig;
import org.elasticsearch.client.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
import org.elasticsearch.client.ml.job.config.AnalysisLimits;
import org.elasticsearch.client.ml.job.config.DataDescription;
import org.elasticsearch.client.ml.job.config.DetectionRule;
import org.elasticsearch.client.ml.job.config.Detector;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.config.JobUpdate;
import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.client.ml.job.config.ModelPlotConfig;
import org.elasticsearch.client.ml.job.config.Operator;
import org.elasticsearch.client.ml.job.config.RuleCondition;
import org.elasticsearch.client.ml.job.process.DataCounts;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.client.ml.job.results.AnomalyRecord;
import org.elasticsearch.client.ml.job.results.Bucket;
import org.elasticsearch.client.ml.job.results.CategoryDefinition;
import org.elasticsearch.client.ml.job.results.Influencer;
import org.elasticsearch.client.ml.job.results.OverallBucket;
import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.tasks.TaskId;
import org.junit.After;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.core.Is.is;
public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
@After
public void cleanUp() throws IOException {
new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata();
}
public void testCreateJob() throws Exception {
RestHighLevelClient client = highLevelClient();
// tag::put-job-detector
Detector.Builder detectorBuilder = new Detector.Builder()
.setFunction("sum") // <1>
.setFieldName("total") // <2>
.setDetectorDescription("Sum of total"); // <3>
// end::put-job-detector
// tag::put-job-analysis-config
List<Detector> detectors = Collections.singletonList(detectorBuilder.build()); // <1>
AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(detectors) // <2>
.setBucketSpan(TimeValue.timeValueMinutes(10)); // <3>
// end::put-job-analysis-config
// tag::put-job-data-description
DataDescription.Builder dataDescriptionBuilder = new DataDescription.Builder()
.setTimeField("timestamp"); // <1>
// end::put-job-data-description
{
String id = "job_1";
// tag::put-job-config
Job.Builder jobBuilder = new Job.Builder(id) // <1>
.setAnalysisConfig(analysisConfigBuilder) // <2>
.setDataDescription(dataDescriptionBuilder) // <3>
.setDescription("Total sum of requests"); // <4>
// end::put-job-config
// tag::put-job-request
PutJobRequest request = new PutJobRequest(jobBuilder.build()); // <1>
// end::put-job-request
// tag::put-job-execute
PutJobResponse response = client.machineLearning().putJob(request, RequestOptions.DEFAULT);
// end::put-job-execute
// tag::put-job-response
Date createTime = response.getResponse().getCreateTime(); // <1>
// end::put-job-response
assertThat(createTime.getTime(), greaterThan(0L));
}
{
String id = "job_2";
Job.Builder jobBuilder = new Job.Builder(id)
.setAnalysisConfig(analysisConfigBuilder)
.setDataDescription(dataDescriptionBuilder)
.setDescription("Total sum of requests");
PutJobRequest request = new PutJobRequest(jobBuilder.build());
// tag::put-job-execute-listener
ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() {
@Override
public void onResponse(PutJobResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-job-execute-async
client.machineLearning().putJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetJob() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("get-machine-learning-job1");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job2");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
{
// tag::get-job-request
GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); // <1>
request.setAllowNoJobs(true); // <2>
// end::get-job-request
// tag::get-job-execute
GetJobResponse response = client.machineLearning().getJob(request, RequestOptions.DEFAULT);
// end::get-job-execute
// tag::get-job-response
long numberOfJobs = response.count(); // <1>
List<Job> jobs = response.jobs(); // <2>
// end::get-job-response
assertEquals(2, response.count());
assertThat(response.jobs(), hasSize(2));
assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()),
containsInAnyOrder(job.getId(), secondJob.getId()));
}
{
GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*");
// tag::get-job-execute-listener
ActionListener<GetJobResponse> listener = new ActionListener<GetJobResponse>() {
@Override
public void onResponse(GetJobResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-job-execute-async
client.machineLearning().getJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteJob() throws Exception {
RestHighLevelClient client = highLevelClient();
String jobId = "my-first-machine-learning-job";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("my-second-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
{
//tag::delete-job-request
DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); // <1>
//end::delete-job-request
//tag::delete-job-request-force
deleteJobRequest.setForce(false); // <1>
//end::delete-job-request-force
//tag::delete-job-request-wait-for-completion
deleteJobRequest.setWaitForCompletion(true); // <1>
//end::delete-job-request-wait-for-completion
//tag::delete-job-execute
DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT);
//end::delete-job-execute
//tag::delete-job-response
Boolean isAcknowledged = deleteJobResponse.getAcknowledged(); // <1>
TaskId task = deleteJobResponse.getTask(); // <2>
//end::delete-job-response
assertTrue(isAcknowledged);
assertNull(task);
}
{
//tag::delete-job-execute-listener
ActionListener<DeleteJobResponse> listener = new ActionListener<DeleteJobResponse>() {
@Override
public void onResponse(DeleteJobResponse deleteJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-second-machine-learning-job");
// tag::delete-job-execute-async
client.machineLearning().deleteJobAsync(deleteJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testOpenJob() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("opening-my-first-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("opening-my-second-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
{
// tag::open-job-request
OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-first-machine-learning-job"); // <1>
openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <2>
// end::open-job-request
// tag::open-job-execute
OpenJobResponse openJobResponse = client.machineLearning().openJob(openJobRequest, RequestOptions.DEFAULT);
// end::open-job-execute
// tag::open-job-response
boolean isOpened = openJobResponse.isOpened(); // <1>
String node = openJobResponse.getNode(); // <2>
// end::open-job-response
assertThat(node, notNullValue());
}
{
// tag::open-job-execute-listener
ActionListener<OpenJobResponse> listener = new ActionListener<OpenJobResponse>() {
@Override
public void onResponse(OpenJobResponse openJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::open-job-execute-listener
OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-second-machine-learning-job");
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::open-job-execute-async
client.machineLearning().openJobAsync(openJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::open-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testCloseJob() throws Exception {
RestHighLevelClient client = highLevelClient();
{
Job job = MachineLearningIT.buildJob("closing-my-first-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
// tag::close-job-request
CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-first-machine-learning-job", "otherjobs*"); // <1>
closeJobRequest.setForce(false); // <2>
closeJobRequest.setAllowNoJobs(true); // <3>
closeJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <4>
// end::close-job-request
// tag::close-job-execute
CloseJobResponse closeJobResponse = client.machineLearning().closeJob(closeJobRequest, RequestOptions.DEFAULT);
// end::close-job-execute
// tag::close-job-response
boolean isClosed = closeJobResponse.isClosed(); // <1>
// end::close-job-response
}
{
Job job = MachineLearningIT.buildJob("closing-my-second-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
// tag::close-job-execute-listener
ActionListener<CloseJobResponse> listener = new ActionListener<CloseJobResponse>() {
@Override
public void onResponse(CloseJobResponse closeJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::close-job-execute-listener
CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-second-machine-learning-job");
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::close-job-execute-async
client.machineLearning().closeJobAsync(closeJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::close-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testUpdateJob() throws Exception {
RestHighLevelClient client = highLevelClient();
String jobId = "test-update-job";
Job tempJob = MachineLearningIT.buildJob(jobId);
Job job = new Job.Builder(tempJob)
.setAnalysisConfig(new AnalysisConfig.Builder(tempJob.getAnalysisConfig())
.setCategorizationFieldName("categorization-field")
.setDetector(0,
new Detector.Builder().setFieldName("total")
.setFunction("sum")
.setPartitionFieldName("mlcategory")
.setDetectorDescription(randomAlphaOfLength(10))
.build()))
.build();
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
{
List<DetectionRule> detectionRules = Arrays.asList(
new DetectionRule.Builder(Arrays.asList(RuleCondition.createTime(Operator.GT, 100L))).build());
Map<String, Object> customSettings = new HashMap<>();
customSettings.put("custom-setting-1", "custom-value");
// tag::update-job-detector-options
JobUpdate.DetectorUpdate detectorUpdate = new JobUpdate.DetectorUpdate(0, // <1>
"detector description", // <2>
detectionRules); // <3>
// end::update-job-detector-options
// tag::update-job-options
JobUpdate update = new JobUpdate.Builder(jobId) // <1>
.setDescription("My description") // <2>
.setAnalysisLimits(new AnalysisLimits(1000L, null)) // <3>
.setBackgroundPersistInterval(TimeValue.timeValueHours(3)) // <4>
.setCategorizationFilters(Arrays.asList("categorization-filter")) // <5>
.setDetectorUpdates(Arrays.asList(detectorUpdate)) // <6>
.setGroups(Arrays.asList("job-group-1")) // <7>
.setResultsRetentionDays(10L) // <8>
.setModelPlotConfig(new ModelPlotConfig(true, null, true)) // <9>
.setModelSnapshotRetentionDays(7L) // <10>
.setCustomSettings(customSettings) // <11>
.setRenormalizationWindowDays(3L) // <12>
.build();
// end::update-job-options
// tag::update-job-request
UpdateJobRequest updateJobRequest = new UpdateJobRequest(update); // <1>
// end::update-job-request
// tag::update-job-execute
PutJobResponse updateJobResponse = client.machineLearning().updateJob(updateJobRequest, RequestOptions.DEFAULT);
// end::update-job-execute
// tag::update-job-response
Job updatedJob = updateJobResponse.getResponse(); // <1>
// end::update-job-response
assertEquals(update.getDescription(), updatedJob.getDescription());
}
{
// tag::update-job-execute-listener
ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() {
@Override
public void onResponse(PutJobResponse updateJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-job-execute-listener
UpdateJobRequest updateJobRequest = new UpdateJobRequest(new JobUpdate.Builder(jobId).build());
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-job-execute-async
client.machineLearning().updateJobAsync(updateJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::update-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// We need to create a job for the datafeed request to be valid
String jobId = "put-datafeed-job-1";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String id = "datafeed-1";
// tag::put-datafeed-config
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder(id, jobId) // <1>
.setIndices("index_1", "index_2"); // <2>
// end::put-datafeed-config
AggregatorFactories.Builder aggs = AggregatorFactories.builder();
// tag::put-datafeed-config-set-aggregations
datafeedBuilder.setAggregations(aggs); // <1>
// end::put-datafeed-config-set-aggregations
// Clearing aggregation to avoid complex validation rules
datafeedBuilder.setAggregations((String) null);
// tag::put-datafeed-config-set-chunking-config
datafeedBuilder.setChunkingConfig(ChunkingConfig.newAuto()); // <1>
// end::put-datafeed-config-set-chunking-config
// tag::put-datafeed-config-set-frequency
datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(30)); // <1>
// end::put-datafeed-config-set-frequency
// tag::put-datafeed-config-set-query
datafeedBuilder.setQuery(QueryBuilders.matchAllQuery()); // <1>
// end::put-datafeed-config-set-query
// tag::put-datafeed-config-set-query-delay
datafeedBuilder.setQueryDelay(TimeValue.timeValueMinutes(1)); // <1>
// end::put-datafeed-config-set-query-delay
// tag::put-datafeed-config-set-delayed-data-check-config
datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig
.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(1))); // <1>
// end::put-datafeed-config-set-delayed-data-check-config
// no need to accidentally trip internal validations due to job bucket size
datafeedBuilder.setDelayedDataCheckConfig(null);
List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList();
// tag::put-datafeed-config-set-script-fields
datafeedBuilder.setScriptFields(scriptFields); // <1>
// end::put-datafeed-config-set-script-fields
// tag::put-datafeed-config-set-scroll-size
datafeedBuilder.setScrollSize(1000); // <1>
// end::put-datafeed-config-set-scroll-size
// tag::put-datafeed-request
PutDatafeedRequest request = new PutDatafeedRequest(datafeedBuilder.build()); // <1>
// end::put-datafeed-request
// tag::put-datafeed-execute
PutDatafeedResponse response = client.machineLearning().putDatafeed(request, RequestOptions.DEFAULT);
// end::put-datafeed-execute
// tag::put-datafeed-response
DatafeedConfig datafeed = response.getResponse(); // <1>
// end::put-datafeed-response
assertThat(datafeed.getId(), equalTo("datafeed-1"));
}
{
// We need to create a job for the datafeed request to be valid
String jobId = "put-datafeed-job-2";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String id = "datafeed-2";
DatafeedConfig datafeed = new DatafeedConfig.Builder(id, jobId).setIndices("index_1", "index_2").build();
PutDatafeedRequest request = new PutDatafeedRequest(datafeed);
// tag::put-datafeed-execute-listener
ActionListener<PutDatafeedResponse> listener = new ActionListener<PutDatafeedResponse>() {
@Override
public void onResponse(PutDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-datafeed-execute-async
client.machineLearning().putDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testUpdateDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("update-datafeed-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
AggregatorFactories.Builder aggs = AggregatorFactories.builder();
List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList();
// tag::update-datafeed-config
DatafeedUpdate.Builder datafeedUpdateBuilder = new DatafeedUpdate.Builder(datafeedId) // <1>
.setAggregations(aggs) // <2>
.setIndices("index_1", "index_2") // <3>
.setChunkingConfig(ChunkingConfig.newAuto()) // <4>
.setFrequency(TimeValue.timeValueSeconds(30)) // <5>
.setQuery(QueryBuilders.matchAllQuery()) // <6>
.setQueryDelay(TimeValue.timeValueMinutes(1)) // <7>
.setScriptFields(scriptFields) // <8>
.setScrollSize(1000); // <9>
// end::update-datafeed-config
// Clearing aggregation to avoid complex validation rules
datafeedUpdateBuilder.setAggregations((String) null);
// tag::update-datafeed-request
UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdateBuilder.build()); // <1>
// end::update-datafeed-request
// tag::update-datafeed-execute
PutDatafeedResponse response = client.machineLearning().updateDatafeed(request, RequestOptions.DEFAULT);
// end::update-datafeed-execute
// tag::update-datafeed-response
DatafeedConfig updatedDatafeed = response.getResponse(); // <1>
// end::update-datafeed-response
assertThat(updatedDatafeed.getId(), equalTo(datafeedId));
}
{
DatafeedUpdate datafeedUpdate = new DatafeedUpdate.Builder(datafeedId).setIndices("index_1", "index_2").build();
UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdate);
// tag::update-datafeed-execute-listener
ActionListener<PutDatafeedResponse> listener = new ActionListener<PutDatafeedResponse>() {
@Override
public void onResponse(PutDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-datafeed-execute-async
client.machineLearning().updateDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::update-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("get-datafeed-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
// tag::get-datafeed-request
GetDatafeedRequest request = new GetDatafeedRequest(datafeedId); // <1>
request.setAllowNoDatafeeds(true); // <2>
// end::get-datafeed-request
// tag::get-datafeed-execute
GetDatafeedResponse response = client.machineLearning().getDatafeed(request, RequestOptions.DEFAULT);
// end::get-datafeed-execute
// tag::get-datafeed-response
long numberOfDatafeeds = response.count(); // <1>
List<DatafeedConfig> datafeeds = response.datafeeds(); // <2>
// end::get-datafeed-response
assertEquals(1, numberOfDatafeeds);
assertEquals(1, datafeeds.size());
}
{
GetDatafeedRequest request = new GetDatafeedRequest(datafeedId);
// tag::get-datafeed-execute-listener
ActionListener<GetDatafeedResponse> listener = new ActionListener<GetDatafeedResponse>() {
@Override
public void onResponse(GetDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-datafeed-execute-async
client.machineLearning().getDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
String jobId = "test-delete-datafeed-job";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = "test-delete-datafeed";
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId).setIndices("foo").build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
// tag::delete-datafeed-request
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
deleteDatafeedRequest.setForce(false); // <1>
// end::delete-datafeed-request
// tag::delete-datafeed-execute
AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed(
deleteDatafeedRequest, RequestOptions.DEFAULT);
// end::delete-datafeed-execute
// tag::delete-datafeed-response
boolean isAcknowledged = deleteDatafeedResponse.isAcknowledged(); // <1>
// end::delete-datafeed-response
}
// Recreate datafeed to allow second deletion
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
// tag::delete-datafeed-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
// tag::delete-datafeed-execute-async
client.machineLearning().deleteDatafeedAsync(deleteDatafeedRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPreviewDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("preview-datafeed-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
String indexName = "preview_data_2";
createIndex(indexName);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId())
.setIndices(indexName)
.build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
// tag::preview-datafeed-request
PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId); // <1>
// end::preview-datafeed-request
// tag::preview-datafeed-execute
PreviewDatafeedResponse response = client.machineLearning().previewDatafeed(request, RequestOptions.DEFAULT);
// end::preview-datafeed-execute
// tag::preview-datafeed-response
BytesReference rawPreview = response.getPreview(); // <1>
List<Map<String, Object>> semiParsedPreview = response.getDataList(); // <2>
// end::preview-datafeed-response
assertTrue(semiParsedPreview.isEmpty());
}
{
PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId);
// tag::preview-datafeed-execute-listener
ActionListener<PreviewDatafeedResponse> listener = new ActionListener<PreviewDatafeedResponse>() {
@Override
public void onResponse(PreviewDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::preview-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::preview-datafeed-execute-async
client.machineLearning().previewDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::preview-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testStartDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("start-datafeed-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
String indexName = "start_data_2";
createIndex(indexName);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId())
.setIndices(indexName)
.build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
{
// tag::start-datafeed-request
StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); // <1>
// end::start-datafeed-request
// tag::start-datafeed-request-options
request.setEnd("2018-08-21T00:00:00Z"); // <1>
request.setStart("2018-08-20T00:00:00Z"); // <2>
request.setTimeout(TimeValue.timeValueMinutes(10)); // <3>
// end::start-datafeed-request-options
// tag::start-datafeed-execute
StartDatafeedResponse response = client.machineLearning().startDatafeed(request, RequestOptions.DEFAULT);
// end::start-datafeed-execute
// tag::start-datafeed-response
boolean started = response.isStarted(); // <1>
String node = response.getNode(); // <2>
// end::start-datafeed-response
assertTrue(started);
assertThat(node, notNullValue());
}
{
StartDatafeedRequest request = new StartDatafeedRequest(datafeedId);
// tag::start-datafeed-execute-listener
ActionListener<StartDatafeedResponse> listener = new ActionListener<StartDatafeedResponse>() {
@Override
public void onResponse(StartDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::start-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::start-datafeed-execute-async
client.machineLearning().startDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::start-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testStopDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::stop-datafeed-request
StopDatafeedRequest request = new StopDatafeedRequest("datafeed_id1", "datafeed_id*"); // <1>
// end::stop-datafeed-request
request = StopDatafeedRequest.stopAllDatafeedsRequest();
// tag::stop-datafeed-request-options
request.setAllowNoDatafeeds(true); // <1>
request.setForce(true); // <2>
request.setTimeout(TimeValue.timeValueMinutes(10)); // <3>
// end::stop-datafeed-request-options
// tag::stop-datafeed-execute
StopDatafeedResponse response = client.machineLearning().stopDatafeed(request, RequestOptions.DEFAULT);
// end::stop-datafeed-execute
// tag::stop-datafeed-response
boolean stopped = response.isStopped(); // <1>
// end::stop-datafeed-response
assertTrue(stopped);
}
{
StopDatafeedRequest request = StopDatafeedRequest.stopAllDatafeedsRequest();
// tag::stop-datafeed-execute-listener
ActionListener<StopDatafeedResponse> listener = new ActionListener<StopDatafeedResponse>() {
@Override
public void onResponse(StopDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::stop-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::stop-datafeed-execute-async
client.machineLearning().stopDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::stop-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDatafeedStats() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats1");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats2");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
String datafeedId1 = job.getId() + "-feed";
String indexName = "datafeed_stats_data_2";
createIndex(indexName);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId1, job.getId())
.setIndices(indexName)
.build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
String datafeedId2 = secondJob.getId() + "-feed";
DatafeedConfig secondDatafeed = DatafeedConfig.builder(datafeedId2, secondJob.getId())
.setIndices(indexName)
.build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(secondDatafeed), RequestOptions.DEFAULT);
{
//tag::get-datafeed-stats-request
GetDatafeedStatsRequest request =
new GetDatafeedStatsRequest("get-machine-learning-datafeed-stats1-feed", "get-machine-learning-datafeed*"); // <1>
request.setAllowNoDatafeeds(true); // <2>
//end::get-datafeed-stats-request
//tag::get-datafeed-stats-execute
GetDatafeedStatsResponse response = client.machineLearning().getDatafeedStats(request, RequestOptions.DEFAULT);
//end::get-datafeed-stats-execute
//tag::get-datafeed-stats-response
long numberOfDatafeedStats = response.count(); // <1>
List<DatafeedStats> datafeedStats = response.datafeedStats(); // <2>
//end::get-datafeed-stats-response
assertEquals(2, response.count());
assertThat(response.datafeedStats(), hasSize(2));
assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()),
containsInAnyOrder(datafeed.getId(), secondDatafeed.getId()));
}
{
GetDatafeedStatsRequest request = new GetDatafeedStatsRequest("*");
// tag::get-datafeed-stats-execute-listener
ActionListener<GetDatafeedStatsResponse> listener = new ActionListener<GetDatafeedStatsResponse>() {
@Override
public void onResponse(GetDatafeedStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-datafeed-stats-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-datafeed-stats-execute-async
client.machineLearning().getDatafeedStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-datafeed-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetBuckets() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-buckets";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a bucket
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-get-buckets\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 80.0}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-buckets-request
GetBucketsRequest request = new GetBucketsRequest(jobId); // <1>
// end::get-buckets-request
// tag::get-buckets-timestamp
request.setTimestamp("2018-08-17T00:00:00Z"); // <1>
// end::get-buckets-timestamp
// Set timestamp to null as it is incompatible with other args
request.setTimestamp(null);
// tag::get-buckets-anomaly-score
request.setAnomalyScore(75.0); // <1>
// end::get-buckets-anomaly-score
// tag::get-buckets-desc
request.setDescending(true); // <1>
// end::get-buckets-desc
// tag::get-buckets-end
request.setEnd("2018-08-21T00:00:00Z"); // <1>
// end::get-buckets-end
// tag::get-buckets-exclude-interim
request.setExcludeInterim(true); // <1>
// end::get-buckets-exclude-interim
// tag::get-buckets-expand
request.setExpand(true); // <1>
// end::get-buckets-expand
// tag::get-buckets-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-buckets-page
// Set page params back to null so the response contains the bucket we indexed
request.setPageParams(null);
// tag::get-buckets-sort
request.setSort("anomaly_score"); // <1>
// end::get-buckets-sort
// tag::get-buckets-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-buckets-start
// tag::get-buckets-execute
GetBucketsResponse response = client.machineLearning().getBuckets(request, RequestOptions.DEFAULT);
// end::get-buckets-execute
// tag::get-buckets-response
long count = response.count(); // <1>
List<Bucket> buckets = response.buckets(); // <2>
// end::get-buckets-response
assertEquals(1, buckets.size());
}
{
GetBucketsRequest request = new GetBucketsRequest(jobId);
// tag::get-buckets-execute-listener
ActionListener<GetBucketsResponse> listener =
new ActionListener<GetBucketsResponse>() {
@Override
public void onResponse(GetBucketsResponse getBucketsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-buckets-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-buckets-execute-async
client.machineLearning().getBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-buckets-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testFlushJob() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("flushing-my-first-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("flushing-my-second-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(secondJob.getId()), RequestOptions.DEFAULT);
{
// tag::flush-job-request
FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); // <1>
// end::flush-job-request
// tag::flush-job-request-options
flushJobRequest.setCalcInterim(true); // <1>
flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); // <2>
flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); // <3>
flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); // <4>
flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); // <5>
// end::flush-job-request-options
// tag::flush-job-execute
FlushJobResponse flushJobResponse = client.machineLearning().flushJob(flushJobRequest, RequestOptions.DEFAULT);
// end::flush-job-execute
// tag::flush-job-response
boolean isFlushed = flushJobResponse.isFlushed(); // <1>
Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); // <2>
// end::flush-job-response
}
{
// tag::flush-job-execute-listener
ActionListener<FlushJobResponse> listener = new ActionListener<FlushJobResponse>() {
@Override
public void onResponse(FlushJobResponse FlushJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::flush-job-execute-listener
FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-second-machine-learning-job");
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::flush-job-execute-async
client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::flush-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteForecast() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("deleting-forecast-for-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder();
for(int i = 0; i < 30; i++) {
Map<String, Object> hashMap = new HashMap<>();
hashMap.put("total", randomInt(1000));
hashMap.put("timestamp", (i+1)*1000);
builder.addDoc(hashMap);
}
PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder);
client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT);
ForecastJobResponse forecastJobResponse = client.machineLearning().
forecastJob(new ForecastJobRequest(job.getId()), RequestOptions.DEFAULT);
String forecastId = forecastJobResponse.getForecastId();
GetRequest request = new GetRequest(".ml-anomalies-" + job.getId());
request.id(job.getId() + "_model_forecast_request_stats_" + forecastId);
assertBusy(() -> {
GetResponse getResponse = highLevelClient().get(request, RequestOptions.DEFAULT);
assertTrue(getResponse.isExists());
assertTrue(getResponse.getSourceAsString().contains("finished"));
}, 30, TimeUnit.SECONDS);
{
// tag::delete-forecast-request
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest("deleting-forecast-for-job"); // <1>
// end::delete-forecast-request
// tag::delete-forecast-request-options
deleteForecastRequest.setForecastIds(forecastId); // <1>
deleteForecastRequest.timeout("30s"); // <2>
deleteForecastRequest.setAllowNoForecasts(true); // <3>
// end::delete-forecast-request-options
// tag::delete-forecast-execute
AcknowledgedResponse deleteForecastResponse = client.machineLearning().deleteForecast(deleteForecastRequest,
RequestOptions.DEFAULT);
// end::delete-forecast-execute
// tag::delete-forecast-response
boolean isAcknowledged = deleteForecastResponse.isAcknowledged(); // <1>
// end::delete-forecast-response
}
{
// tag::delete-forecast-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse DeleteForecastResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-forecast-execute-listener
DeleteForecastRequest deleteForecastRequest = DeleteForecastRequest.deleteAllForecasts(job.getId());
deleteForecastRequest.setAllowNoForecasts(true);
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-forecast-execute-async
client.machineLearning().deleteForecastAsync(deleteForecastRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-forecast-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetJobStats() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("get-machine-learning-job-stats1");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job-stats2");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
{
// tag::get-job-stats-request
GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); // <1>
request.setAllowNoJobs(true); // <2>
// end::get-job-stats-request
// tag::get-job-stats-execute
GetJobStatsResponse response = client.machineLearning().getJobStats(request, RequestOptions.DEFAULT);
// end::get-job-stats-execute
// tag::get-job-stats-response
long numberOfJobStats = response.count(); // <1>
List<JobStats> jobStats = response.jobStats(); // <2>
// end::get-job-stats-response
assertEquals(2, response.count());
assertThat(response.jobStats(), hasSize(2));
assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()),
containsInAnyOrder(job.getId(), secondJob.getId()));
}
{
GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*");
// tag::get-job-stats-execute-listener
ActionListener<GetJobStatsResponse> listener = new ActionListener<GetJobStatsResponse>() {
@Override
public void onResponse(GetJobStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-job-stats-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-job-stats-execute-async
client.machineLearning().getJobStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-job-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testForecastJob() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("forecasting-my-first-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder();
for(int i = 0; i < 30; i++) {
Map<String, Object> hashMap = new HashMap<>();
hashMap.put("total", randomInt(1000));
hashMap.put("timestamp", (i+1)*1000);
builder.addDoc(hashMap);
}
PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder);
client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT);
{
// tag::forecast-job-request
ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); // <1>
// end::forecast-job-request
// tag::forecast-job-request-options
forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(48)); // <1>
forecastJobRequest.setDuration(TimeValue.timeValueHours(24)); // <2>
forecastJobRequest.setMaxModelMemory(new ByteSizeValue(30, ByteSizeUnit.MB)); // <3>
// end::forecast-job-request-options
// tag::forecast-job-execute
ForecastJobResponse forecastJobResponse = client.machineLearning().forecastJob(forecastJobRequest, RequestOptions.DEFAULT);
// end::forecast-job-execute
// tag::forecast-job-response
boolean isAcknowledged = forecastJobResponse.isAcknowledged(); // <1>
String forecastId = forecastJobResponse.getForecastId(); // <2>
// end::forecast-job-response
assertTrue(isAcknowledged);
assertNotNull(forecastId);
}
{
// tag::forecast-job-execute-listener
ActionListener<ForecastJobResponse> listener = new ActionListener<ForecastJobResponse>() {
@Override
public void onResponse(ForecastJobResponse forecastJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::forecast-job-execute-listener
ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job");
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::forecast-job-execute-async
client.machineLearning().forecastJobAsync(forecastJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::forecast-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetOverallBuckets() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId1 = "test-get-overall-buckets-1";
String jobId2 = "test-get-overall-buckets-2";
Job job1 = MachineLearningGetResultsIT.buildJob(jobId1);
Job job2 = MachineLearningGetResultsIT.buildJob(jobId2);
client.machineLearning().putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
client.machineLearning().putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
// Let us index some buckets
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
{
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.source("{\"job_id\":\"test-get-overall-buckets-1\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 60.0}", XContentType.JSON);
bulkRequest.add(indexRequest);
}
{
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.source("{\"job_id\":\"test-get-overall-buckets-2\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 3600,\"is_interim\": false, \"anomaly_score\": 100.0}", XContentType.JSON);
bulkRequest.add(indexRequest);
}
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::get-overall-buckets-request
GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2); // <1>
// end::get-overall-buckets-request
// tag::get-overall-buckets-bucket-span
request.setBucketSpan(TimeValue.timeValueHours(24)); // <1>
// end::get-overall-buckets-bucket-span
// tag::get-overall-buckets-end
request.setEnd("2018-08-21T00:00:00Z"); // <1>
// end::get-overall-buckets-end
// tag::get-overall-buckets-exclude-interim
request.setExcludeInterim(true); // <1>
// end::get-overall-buckets-exclude-interim
// tag::get-overall-buckets-overall-score
request.setOverallScore(75.0); // <1>
// end::get-overall-buckets-overall-score
// tag::get-overall-buckets-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-overall-buckets-start
// tag::get-overall-buckets-top-n
request.setTopN(2); // <1>
// end::get-overall-buckets-top-n
// tag::get-overall-buckets-execute
GetOverallBucketsResponse response = client.machineLearning().getOverallBuckets(request, RequestOptions.DEFAULT);
// end::get-overall-buckets-execute
// tag::get-overall-buckets-response
long count = response.count(); // <1>
List<OverallBucket> overallBuckets = response.overallBuckets(); // <2>
// end::get-overall-buckets-response
assertEquals(1, overallBuckets.size());
assertThat(overallBuckets.get(0).getOverallScore(), is(closeTo(80.0, 0.001)));
}
{
GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2);
// tag::get-overall-buckets-execute-listener
ActionListener<GetOverallBucketsResponse> listener =
new ActionListener<GetOverallBucketsResponse>() {
@Override
public void onResponse(GetOverallBucketsResponse getOverallBucketsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-overall-buckets-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-overall-buckets-execute-async
client.machineLearning().getOverallBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-overall-buckets-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetRecords() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-records";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a record
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-get-records\", \"result_type\":\"record\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 600,\"is_interim\": false, \"record_score\": 80.0}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-records-request
GetRecordsRequest request = new GetRecordsRequest(jobId); // <1>
// end::get-records-request
// tag::get-records-desc
request.setDescending(true); // <1>
// end::get-records-desc
// tag::get-records-end
request.setEnd("2018-08-21T00:00:00Z"); // <1>
// end::get-records-end
// tag::get-records-exclude-interim
request.setExcludeInterim(true); // <1>
// end::get-records-exclude-interim
// tag::get-records-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-records-page
// Set page params back to null so the response contains the record we indexed
request.setPageParams(null);
// tag::get-records-record-score
request.setRecordScore(75.0); // <1>
// end::get-records-record-score
// tag::get-records-sort
request.setSort("probability"); // <1>
// end::get-records-sort
// tag::get-records-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-records-start
// tag::get-records-execute
GetRecordsResponse response = client.machineLearning().getRecords(request, RequestOptions.DEFAULT);
// end::get-records-execute
// tag::get-records-response
long count = response.count(); // <1>
List<AnomalyRecord> records = response.records(); // <2>
// end::get-records-response
assertEquals(1, records.size());
}
{
GetRecordsRequest request = new GetRecordsRequest(jobId);
// tag::get-records-execute-listener
ActionListener<GetRecordsResponse> listener =
new ActionListener<GetRecordsResponse>() {
@Override
public void onResponse(GetRecordsResponse getRecordsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-records-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-records-execute-async
client.machineLearning().getRecordsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-records-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPostData() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("test-post-data");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
{
// tag::post-data-request
PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); // <1>
Map<String, Object> mapData = new HashMap<>();
mapData.put("total", 109);
jsonBuilder.addDoc(mapData); // <2>
jsonBuilder.addDoc("{\"total\":1000}"); // <3>
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <4>
// end::post-data-request
// tag::post-data-request-options
postDataRequest.setResetStart("2018-08-31T16:35:07+00:00"); // <1>
postDataRequest.setResetEnd("2018-08-31T16:35:17+00:00"); // <2>
// end::post-data-request-options
postDataRequest.setResetEnd(null);
postDataRequest.setResetStart(null);
// tag::post-data-execute
PostDataResponse postDataResponse = client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
// end::post-data-execute
// tag::post-data-response
DataCounts dataCounts = postDataResponse.getDataCounts(); // <1>
// end::post-data-response
assertEquals(2, dataCounts.getInputRecordCount());
}
{
// tag::post-data-execute-listener
ActionListener<PostDataResponse> listener = new ActionListener<PostDataResponse>() {
@Override
public void onResponse(PostDataResponse postDataResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::post-data-execute-listener
PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder();
Map<String, Object> mapData = new HashMap<>();
mapData.put("total", 109);
jsonBuilder.addDoc(mapData);
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <1>
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::post-data-execute-async
client.machineLearning().postDataAsync(postDataRequest, RequestOptions.DEFAULT, listener); // <1>
// end::post-data-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testFindFileStructure() throws Exception {
RestHighLevelClient client = highLevelClient();
Path anInterestingFile = createTempFile();
String contents = "{\"logger\":\"controller\",\"timestamp\":1478261151445,\"level\":\"INFO\"," +
"\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 1\",\"class\":\"ml\"," +
"\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n" +
"{\"logger\":\"controller\",\"timestamp\":1478261151445," +
"\"level\":\"INFO\",\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 2\",\"class\":\"ml\"," +
"\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n";
Files.write(anInterestingFile, Collections.singleton(contents), StandardCharsets.UTF_8);
{
// tag::find-file-structure-request
FindFileStructureRequest findFileStructureRequest = new FindFileStructureRequest(); // <1>
findFileStructureRequest.setSample(Files.readAllBytes(anInterestingFile)); // <2>
// end::find-file-structure-request
// tag::find-file-structure-request-options
findFileStructureRequest.setLinesToSample(500); // <1>
findFileStructureRequest.setExplain(true); // <2>
// end::find-file-structure-request-options
// tag::find-file-structure-execute
FindFileStructureResponse findFileStructureResponse =
client.machineLearning().findFileStructure(findFileStructureRequest, RequestOptions.DEFAULT);
// end::find-file-structure-execute
// tag::find-file-structure-response
FileStructure structure = findFileStructureResponse.getFileStructure(); // <1>
// end::find-file-structure-response
assertEquals(2, structure.getNumLinesAnalyzed());
}
{
// tag::find-file-structure-execute-listener
ActionListener<FindFileStructureResponse> listener = new ActionListener<FindFileStructureResponse>() {
@Override
public void onResponse(FindFileStructureResponse findFileStructureResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::find-file-structure-execute-listener
FindFileStructureRequest findFileStructureRequest = new FindFileStructureRequest();
findFileStructureRequest.setSample(Files.readAllBytes(anInterestingFile));
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::find-file-structure-execute-async
client.machineLearning().findFileStructureAsync(findFileStructureRequest, RequestOptions.DEFAULT, listener); // <1>
// end::find-file-structure-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetInfluencers() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-influencers";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a record
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-get-influencers\", \"result_type\":\"influencer\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 600,\"is_interim\": false, \"influencer_score\": 80.0, \"influencer_field_name\": \"my_influencer\"," +
"\"influencer_field_value\":\"foo\"}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-influencers-request
GetInfluencersRequest request = new GetInfluencersRequest(jobId); // <1>
// end::get-influencers-request
// tag::get-influencers-desc
request.setDescending(true); // <1>
// end::get-influencers-desc
// tag::get-influencers-end
request.setEnd("2018-08-21T00:00:00Z"); // <1>
// end::get-influencers-end
// tag::get-influencers-exclude-interim
request.setExcludeInterim(true); // <1>
// end::get-influencers-exclude-interim
// tag::get-influencers-influencer-score
request.setInfluencerScore(75.0); // <1>
// end::get-influencers-influencer-score
// tag::get-influencers-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-influencers-page
// Set page params back to null so the response contains the influencer we indexed
request.setPageParams(null);
// tag::get-influencers-sort
request.setSort("probability"); // <1>
// end::get-influencers-sort
// tag::get-influencers-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-influencers-start
// tag::get-influencers-execute
GetInfluencersResponse response = client.machineLearning().getInfluencers(request, RequestOptions.DEFAULT);
// end::get-influencers-execute
// tag::get-influencers-response
long count = response.count(); // <1>
List<Influencer> influencers = response.influencers(); // <2>
// end::get-influencers-response
assertEquals(1, influencers.size());
}
{
GetInfluencersRequest request = new GetInfluencersRequest(jobId);
// tag::get-influencers-execute-listener
ActionListener<GetInfluencersResponse> listener =
new ActionListener<GetInfluencersResponse>() {
@Override
public void onResponse(GetInfluencersResponse getInfluencersResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-influencers-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-influencers-execute-async
client.machineLearning().getInfluencersAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-influencers-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetCategories() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-categories";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a category
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\": \"test-get-categories\", \"category_id\": 1, \"terms\": \"AAL\"," +
" \"regex\": \".*?AAL.*\", \"max_matching_length\": 3, \"examples\": [\"AAL\"]}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-categories-request
GetCategoriesRequest request = new GetCategoriesRequest(jobId); // <1>
// end::get-categories-request
// tag::get-categories-category-id
request.setCategoryId(1L); // <1>
// end::get-categories-category-id
// tag::get-categories-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-categories-page
// Set page params back to null so the response contains the category we indexed
request.setPageParams(null);
// tag::get-categories-execute
GetCategoriesResponse response = client.machineLearning().getCategories(request, RequestOptions.DEFAULT);
// end::get-categories-execute
// tag::get-categories-response
long count = response.count(); // <1>
List<CategoryDefinition> categories = response.categories(); // <2>
// end::get-categories-response
assertEquals(1, categories.size());
}
{
GetCategoriesRequest request = new GetCategoriesRequest(jobId);
// tag::get-categories-execute-listener
ActionListener<GetCategoriesResponse> listener =
new ActionListener<GetCategoriesResponse>() {
@Override
public void onResponse(GetCategoriesResponse getcategoriesResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-categories-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-categories-execute-async
client.machineLearning().getCategoriesAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-categories-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteExpiredData() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-delete-expired-data";
MachineLearningIT.buildJob(jobId);
{
// tag::delete-expired-data-request
DeleteExpiredDataRequest request = new DeleteExpiredDataRequest( // <1>
null, // <2>
1000.0f, // <3>
TimeValue.timeValueHours(12) // <4>
);
// end::delete-expired-data-request
// tag::delete-expired-data-execute
DeleteExpiredDataResponse response = client.machineLearning().deleteExpiredData(request, RequestOptions.DEFAULT);
// end::delete-expired-data-execute
// tag::delete-expired-data-response
boolean deleted = response.getDeleted(); // <1>
// end::delete-expired-data-response
assertTrue(deleted);
}
{
// tag::delete-expired-data-execute-listener
ActionListener<DeleteExpiredDataResponse> listener = new ActionListener<DeleteExpiredDataResponse>() {
@Override
public void onResponse(DeleteExpiredDataResponse deleteExpiredDataResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-expired-data-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteExpiredDataRequest deleteExpiredDataRequest = new DeleteExpiredDataRequest();
// tag::delete-expired-data-execute-async
client.machineLearning().deleteExpiredDataAsync(deleteExpiredDataRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-expired-data-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteModelSnapshot() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-delete-model-snapshot";
String snapshotId = "1541587919";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"" + jobId + "\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"" + snapshotId + "\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"" + jobId + "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON);
{
client.index(indexRequest, RequestOptions.DEFAULT);
// tag::delete-model-snapshot-request
DeleteModelSnapshotRequest request = new DeleteModelSnapshotRequest(jobId, snapshotId); // <1>
// end::delete-model-snapshot-request
// tag::delete-model-snapshot-execute
AcknowledgedResponse response = client.machineLearning().deleteModelSnapshot(request, RequestOptions.DEFAULT);
// end::delete-model-snapshot-execute
// tag::delete-model-snapshot-response
boolean isAcknowledged = response.isAcknowledged(); // <1>
// end::delete-model-snapshot-response
assertTrue(isAcknowledged);
}
{
client.index(indexRequest, RequestOptions.DEFAULT);
// tag::delete-model-snapshot-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-model-snapshot-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteModelSnapshotRequest deleteModelSnapshotRequest = new DeleteModelSnapshotRequest(jobId, "1541587919");
// tag::delete-model-snapshot-execute-async
client.machineLearning().deleteModelSnapshotAsync(deleteModelSnapshotRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-model-snapshot-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetModelSnapshots() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-model-snapshots";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-get-model-snapshots\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"test-get-model-snapshots\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-model-snapshots-request
GetModelSnapshotsRequest request = new GetModelSnapshotsRequest(jobId); // <1>
// end::get-model-snapshots-request
// tag::get-model-snapshots-snapshot-id
request.setSnapshotId("1541587919"); // <1>
// end::get-model-snapshots-snapshot-id
// Set snapshot id to null as it is incompatible with other args
request.setSnapshotId(null);
// tag::get-model-snapshots-desc
request.setDesc(true); // <1>
// end::get-model-snapshots-desc
// tag::get-model-snapshots-end
request.setEnd("2018-11-07T21:00:00Z"); // <1>
// end::get-model-snapshots-end
// tag::get-model-snapshots-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-model-snapshots-page
// Set page params back to null so the response contains the snapshot we indexed
request.setPageParams(null);
// tag::get-model-snapshots-sort
request.setSort("latest_result_time_stamp"); // <1>
// end::get-model-snapshots-sort
// tag::get-model-snapshots-start
request.setStart("2018-11-07T00:00:00Z"); // <1>
// end::get-model-snapshots-start
// tag::get-model-snapshots-execute
GetModelSnapshotsResponse response = client.machineLearning().getModelSnapshots(request, RequestOptions.DEFAULT);
// end::get-model-snapshots-execute
// tag::get-model-snapshots-response
long count = response.count(); // <1>
List<ModelSnapshot> modelSnapshots = response.snapshots(); // <2>
// end::get-model-snapshots-response
assertEquals(1, modelSnapshots.size());
}
{
GetModelSnapshotsRequest request = new GetModelSnapshotsRequest(jobId);
// tag::get-model-snapshots-execute-listener
ActionListener<GetModelSnapshotsResponse> listener =
new ActionListener<GetModelSnapshotsResponse>() {
@Override
public void onResponse(GetModelSnapshotsResponse getModelSnapshotsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-model-snapshots-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-model-snapshots-execute-async
client.machineLearning().getModelSnapshotsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-model-snapshots-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testRevertModelSnapshot() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-revert-model-snapshot";
String snapshotId = "1541587919";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
String documentId = jobId + "_model_snapshot_" + snapshotId;
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"test-revert-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false, " +
"\"quantiles\":{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " +
"\"quantile_state\":\"state\"}}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::revert-model-snapshot-request
RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId); // <1>
// end::revert-model-snapshot-request
// tag::revert-model-snapshot-delete-intervening-results
request.setDeleteInterveningResults(true); // <1>
// end::revert-model-snapshot-delete-intervening-results
// tag::revert-model-snapshot-execute
RevertModelSnapshotResponse response = client.machineLearning().revertModelSnapshot(request, RequestOptions.DEFAULT);
// end::revert-model-snapshot-execute
// tag::revert-model-snapshot-response
ModelSnapshot modelSnapshot = response.getModel(); // <1>
// end::revert-model-snapshot-response
assertEquals(snapshotId, modelSnapshot.getSnapshotId());
assertEquals("State persisted due to job close at 2018-11-07T10:51:59+0000", modelSnapshot.getDescription());
assertEquals(51722, modelSnapshot.getModelSizeStats().getModelBytes());
}
{
RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId);
// tag::revert-model-snapshot-execute-listener
ActionListener<RevertModelSnapshotResponse> listener =
new ActionListener<RevertModelSnapshotResponse>() {
@Override
public void onResponse(RevertModelSnapshotResponse revertModelSnapshotResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::revert-model-snapshot-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::revert-model-snapshot-execute-async
client.machineLearning().revertModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::revert-model-snapshot-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testUpdateModelSnapshot() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-update-model-snapshot";
String snapshotId = "1541587919";
String documentId = jobId + "_model_snapshot_" + snapshotId;
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-update-model-snapshot\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"test-update-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::update-model-snapshot-request
UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId); // <1>
// end::update-model-snapshot-request
// tag::update-model-snapshot-description
request.setDescription("My Snapshot"); // <1>
// end::update-model-snapshot-description
// tag::update-model-snapshot-retain
request.setRetain(true); // <1>
// end::update-model-snapshot-retain
// tag::update-model-snapshot-execute
UpdateModelSnapshotResponse response = client.machineLearning().updateModelSnapshot(request, RequestOptions.DEFAULT);
// end::update-model-snapshot-execute
// tag::update-model-snapshot-response
boolean acknowledged = response.getAcknowledged(); // <1>
ModelSnapshot modelSnapshot = response.getModel(); // <2>
// end::update-model-snapshot-response
assertTrue(acknowledged);
assertEquals("My Snapshot", modelSnapshot.getDescription()); }
{
UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId);
// tag::update-model-snapshot-execute-listener
ActionListener<UpdateModelSnapshotResponse> listener =
new ActionListener<UpdateModelSnapshotResponse>() {
@Override
public void onResponse(UpdateModelSnapshotResponse updateModelSnapshotResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-model-snapshot-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-model-snapshot-execute-async
client.machineLearning().updateModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::update-model-snapshot-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutCalendar() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
// tag::put-calendar-request
Calendar calendar = new Calendar("public_holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest request = new PutCalendarRequest(calendar); // <1>
// end::put-calendar-request
// tag::put-calendar-execute
PutCalendarResponse response = client.machineLearning().putCalendar(request, RequestOptions.DEFAULT);
// end::put-calendar-execute
// tag::put-calendar-response
Calendar newCalendar = response.getCalendar(); // <1>
// end::put-calendar-response
assertThat(newCalendar.getId(), equalTo("public_holidays"));
// tag::put-calendar-execute-listener
ActionListener<PutCalendarResponse> listener = new ActionListener<PutCalendarResponse>() {
@Override
public void onResponse(PutCalendarResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-calendar-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-calendar-execute-async
client.machineLearning().putCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-calendar-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testPutCalendarJob() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
// tag::put-calendar-job-request
PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", // <1>
"job_2", "job_group_1"); // <2>
// end::put-calendar-job-request
// tag::put-calendar-job-execute
PutCalendarResponse response = client.machineLearning().putCalendarJob(request, RequestOptions.DEFAULT);
// end::put-calendar-job-execute
// tag::put-calendar-job-response
Calendar updatedCalendar = response.getCalendar(); // <1>
// end::put-calendar-job-response
assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_1", "job_2", "job_group_1"));
}
{
PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", "job_4");
// tag::put-calendar-job-execute-listener
ActionListener<PutCalendarResponse> listener =
new ActionListener<PutCalendarResponse>() {
@Override
public void onResponse(PutCalendarResponse putCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-calendar-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-calendar-job-execute-async
client.machineLearning().putCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-calendar-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteCalendarJob() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays",
Arrays.asList("job_1", "job_group_1", "job_2"),
"A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
// tag::delete-calendar-job-request
DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", // <1>
"job_1", "job_group_1"); // <2>
// end::delete-calendar-job-request
// tag::delete-calendar-job-execute
PutCalendarResponse response = client.machineLearning().deleteCalendarJob(request, RequestOptions.DEFAULT);
// end::delete-calendar-job-execute
// tag::delete-calendar-job-response
Calendar updatedCalendar = response.getCalendar(); // <1>
// end::delete-calendar-job-response
assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_2"));
}
{
DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", "job_2");
// tag::delete-calendar-job-execute-listener
ActionListener<PutCalendarResponse> listener =
new ActionListener<PutCalendarResponse>() {
@Override
public void onResponse(PutCalendarResponse deleteCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-calendar-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-calendar-job-execute-async
client.machineLearning().deleteCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-calendar-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetCalendar() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
// tag::get-calendars-request
GetCalendarsRequest request = new GetCalendarsRequest(); // <1>
// end::get-calendars-request
// tag::get-calendars-id
request.setCalendarId("holidays"); // <1>
// end::get-calendars-id
// tag::get-calendars-page
request.setPageParams(new PageParams(10, 20)); // <1>
// end::get-calendars-page
// reset page params
request.setPageParams(null);
// tag::get-calendars-execute
GetCalendarsResponse response = client.machineLearning().getCalendars(request, RequestOptions.DEFAULT);
// end::get-calendars-execute
// tag::get-calendars-response
long count = response.count(); // <1>
List<Calendar> calendars = response.calendars(); // <2>
// end::get-calendars-response
assertEquals(1, calendars.size());
}
{
GetCalendarsRequest request = new GetCalendarsRequest("holidays");
// tag::get-calendars-execute-listener
ActionListener<GetCalendarsResponse> listener =
new ActionListener<GetCalendarsResponse>() {
@Override
public void onResponse(GetCalendarsResponse getCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-calendars-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-calendars-execute-async
client.machineLearning().getCalendarsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-calendars-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteCalendar() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putCalendarRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putCalendarRequest, RequestOptions.DEFAULT);
// tag::delete-calendar-request
DeleteCalendarRequest request = new DeleteCalendarRequest("holidays"); // <1>
// end::delete-calendar-request
// tag::delete-calendar-execute
AcknowledgedResponse response = client.machineLearning().deleteCalendar(request, RequestOptions.DEFAULT);
// end::delete-calendar-execute
// tag::delete-calendar-response
boolean isAcknowledged = response.isAcknowledged(); // <1>
// end::delete-calendar-response
assertTrue(isAcknowledged);
// tag::delete-calendar-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-calendar-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-calendar-execute-async
client.machineLearning().deleteCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-calendar-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testGetCalendarEvent() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null));
client.machineLearning().postCalendarEvent(new PostCalendarEventRequest("holidays", events), RequestOptions.DEFAULT);
{
// tag::get-calendar-events-request
GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays"); // <1>
// end::get-calendar-events-request
// tag::get-calendar-events-page
request.setPageParams(new PageParams(10, 20)); // <1>
// end::get-calendar-events-page
// tag::get-calendar-events-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-calendar-events-start
// tag::get-calendar-events-end
request.setEnd("2018-08-02T00:00:00Z"); // <1>
// end::get-calendar-events-end
// tag::get-calendar-events-jobid
request.setJobId("job_1"); // <1>
// end::get-calendar-events-jobid
// reset params
request.setPageParams(null);
request.setJobId(null);
request.setStart(null);
request.setEnd(null);
// tag::get-calendar-events-execute
GetCalendarEventsResponse response = client.machineLearning().getCalendarEvents(request, RequestOptions.DEFAULT);
// end::get-calendar-events-execute
// tag::get-calendar-events-response
long count = response.count(); // <1>
List<ScheduledEvent> scheduledEvents = response.events(); // <2>
// end::get-calendar-events-response
assertEquals(1, scheduledEvents.size());
}
{
GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays");
// tag::get-calendar-events-execute-listener
ActionListener<GetCalendarEventsResponse> listener =
new ActionListener<GetCalendarEventsResponse>() {
@Override
public void onResponse(GetCalendarEventsResponse getCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-calendar-events-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-calendar-events-execute-async
client.machineLearning().getCalendarEventsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-calendar-events-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPostCalendarEvent() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null));
// tag::post-calendar-event-request
PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", // <1>
events); // <2>
// end::post-calendar-event-request
// tag::post-calendar-event-execute
PostCalendarEventResponse response = client.machineLearning().postCalendarEvent(request, RequestOptions.DEFAULT);
// end::post-calendar-event-execute
// tag::post-calendar-event-response
List<ScheduledEvent> scheduledEvents = response.getScheduledEvents(); // <1>
// end::post-calendar-event-response
assertEquals(1, scheduledEvents.size());
}
{
List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance());
PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", events); // <1>
// tag::post-calendar-event-execute-listener
ActionListener<PostCalendarEventResponse> listener =
new ActionListener<PostCalendarEventResponse>() {
@Override
public void onResponse(PostCalendarEventResponse postCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::post-calendar-event-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::post-calendar-event-execute-async
client.machineLearning().postCalendarEventAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::post-calendar-event-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteCalendarEvent() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays",
Arrays.asList("job_1", "job_group_1", "job_2"),
"A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
List<ScheduledEvent> events = Arrays.asList(ScheduledEventTests.testInstance(calendar.getId(), null),
ScheduledEventTests.testInstance(calendar.getId(), null));
client.machineLearning().postCalendarEvent(new PostCalendarEventRequest("holidays", events), RequestOptions.DEFAULT);
GetCalendarEventsResponse getCalendarEventsResponse =
client.machineLearning().getCalendarEvents(new GetCalendarEventsRequest("holidays"), RequestOptions.DEFAULT);
{
// tag::delete-calendar-event-request
DeleteCalendarEventRequest request = new DeleteCalendarEventRequest("holidays", // <1>
"EventId"); // <2>
// end::delete-calendar-event-request
request = new DeleteCalendarEventRequest("holidays", getCalendarEventsResponse.events().get(0).getEventId());
// tag::delete-calendar-event-execute
AcknowledgedResponse response = client.machineLearning().deleteCalendarEvent(request, RequestOptions.DEFAULT);
// end::delete-calendar-event-execute
// tag::delete-calendar-event-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::delete-calendar-event-response
assertThat(acknowledged, is(true));
}
{
DeleteCalendarEventRequest request = new DeleteCalendarEventRequest("holidays",
getCalendarEventsResponse.events().get(1).getEventId());
// tag::delete-calendar-event-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse deleteCalendarEventResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-calendar-event-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-calendar-event-execute-async
client.machineLearning().deleteCalendarEventAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-calendar-event-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::get-data-frame-analytics-request
GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::get-data-frame-analytics-request
// tag::get-data-frame-analytics-execute
GetDataFrameAnalyticsResponse response = client.machineLearning().getDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::get-data-frame-analytics-execute
// tag::get-data-frame-analytics-response
List<DataFrameAnalyticsConfig> configs = response.getAnalytics();
// end::get-data-frame-analytics-response
assertThat(configs, hasSize(1));
}
{
GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config");
// tag::get-data-frame-analytics-execute-listener
ActionListener<GetDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(GetDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-data-frame-analytics-execute-async
client.machineLearning().getDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDataFrameAnalyticsStats() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::get-data-frame-analytics-stats-request
GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config"); // <1>
// end::get-data-frame-analytics-stats-request
// tag::get-data-frame-analytics-stats-execute
GetDataFrameAnalyticsStatsResponse response =
client.machineLearning().getDataFrameAnalyticsStats(request, RequestOptions.DEFAULT);
// end::get-data-frame-analytics-stats-execute
// tag::get-data-frame-analytics-stats-response
List<DataFrameAnalyticsStats> stats = response.getAnalyticsStats();
// end::get-data-frame-analytics-stats-response
assertThat(stats, hasSize(1));
}
{
GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config");
// tag::get-data-frame-analytics-stats-execute-listener
ActionListener<GetDataFrameAnalyticsStatsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(GetDataFrameAnalyticsStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-data-frame-analytics-stats-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-data-frame-analytics-stats-execute-async
client.machineLearning().getDataFrameAnalyticsStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-data-frame-analytics-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
RestHighLevelClient client = highLevelClient();
{
// tag::put-data-frame-analytics-query-config
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
// end::put-data-frame-analytics-query-config
// tag::put-data-frame-analytics-source-config
DataFrameAnalyticsSource sourceConfig = DataFrameAnalyticsSource.builder() // <1>
.setIndex("put-test-source-index") // <2>
.setQueryConfig(queryConfig) // <3>
.setSourceFiltering(new FetchSourceContext(true,
new String[] { "included_field_1", "included_field_2" },
new String[] { "excluded_field" })) // <4>
.build();
// end::put-data-frame-analytics-source-config
// tag::put-data-frame-analytics-dest-config
DataFrameAnalyticsDest destConfig = DataFrameAnalyticsDest.builder() // <1>
.setIndex("put-test-dest-index") // <2>
.build();
// end::put-data-frame-analytics-dest-config
// tag::put-data-frame-analytics-outlier-detection-default
DataFrameAnalysis outlierDetection = OutlierDetection.createDefault(); // <1>
// end::put-data-frame-analytics-outlier-detection-default
// tag::put-data-frame-analytics-outlier-detection-customized
DataFrameAnalysis outlierDetectionCustomized = OutlierDetection.builder() // <1>
.setMethod(OutlierDetection.Method.DISTANCE_KNN) // <2>
.setNNeighbors(5) // <3>
.setFeatureInfluenceThreshold(0.1) // <4>
.setComputeFeatureInfluence(true) // <5>
.setOutlierFraction(0.05) // <6>
.setStandardizationEnabled(true) // <7>
.build();
// end::put-data-frame-analytics-outlier-detection-customized
// tag::put-data-frame-analytics-classification
DataFrameAnalysis classification = Classification.builder("my_dependent_variable") // <1>
.setLambda(1.0) // <2>
.setGamma(5.5) // <3>
.setEta(5.5) // <4>
.setMaxTrees(50) // <5>
.setFeatureBagFraction(0.4) // <6>
.setNumTopFeatureImportanceValues(3) // <7>
.setPredictionFieldName("my_prediction_field_name") // <8>
.setTrainingPercent(50.0) // <9>
.setRandomizeSeed(1234L) // <10>
.setClassAssignmentObjective(Classification.ClassAssignmentObjective.MAXIMIZE_ACCURACY) // <11>
.setNumTopClasses(1) // <12>
.build();
// end::put-data-frame-analytics-classification
// tag::put-data-frame-analytics-regression
DataFrameAnalysis regression = org.elasticsearch.client.ml.dataframe.Regression.builder("my_dependent_variable") // <1>
.setLambda(1.0) // <2>
.setGamma(5.5) // <3>
.setEta(5.5) // <4>
.setMaxTrees(50) // <5>
.setFeatureBagFraction(0.4) // <6>
.setNumTopFeatureImportanceValues(3) // <7>
.setPredictionFieldName("my_prediction_field_name") // <8>
.setTrainingPercent(50.0) // <9>
.setRandomizeSeed(1234L) // <10>
.setLossFunction(Regression.LossFunction.MSE) // <11>
.setLossFunctionParameter(1.0) // <12>
.build();
// end::put-data-frame-analytics-regression
// tag::put-data-frame-analytics-analyzed-fields
FetchSourceContext analyzedFields =
new FetchSourceContext(
true,
new String[] { "included_field_1", "included_field_2" },
new String[] { "excluded_field" });
// end::put-data-frame-analytics-analyzed-fields
// tag::put-data-frame-analytics-config
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder()
.setId("my-analytics-config") // <1>
.setSource(sourceConfig) // <2>
.setDest(destConfig) // <3>
.setAnalysis(outlierDetection) // <4>
.setAnalyzedFields(analyzedFields) // <5>
.setModelMemoryLimit(new ByteSizeValue(5, ByteSizeUnit.MB)) // <6>
.setDescription("this is an example description") // <7>
.build();
// end::put-data-frame-analytics-config
// tag::put-data-frame-analytics-request
PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(config); // <1>
// end::put-data-frame-analytics-request
// tag::put-data-frame-analytics-execute
PutDataFrameAnalyticsResponse response = client.machineLearning().putDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::put-data-frame-analytics-execute
// tag::put-data-frame-analytics-response
DataFrameAnalyticsConfig createdConfig = response.getConfig();
// end::put-data-frame-analytics-response
assertThat(createdConfig.getId(), equalTo("my-analytics-config"));
}
{
PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG);
// tag::put-data-frame-analytics-execute-listener
ActionListener<PutDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(PutDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-data-frame-analytics-execute-async
client.machineLearning().putDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::delete-data-frame-analytics-request
DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::delete-data-frame-analytics-request
//tag::delete-data-frame-analytics-request-options
request.setForce(false); // <1>
request.setTimeout(TimeValue.timeValueMinutes(1)); // <2>
//end::delete-data-frame-analytics-request-options
// tag::delete-data-frame-analytics-execute
AcknowledgedResponse response = client.machineLearning().deleteDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::delete-data-frame-analytics-execute
// tag::delete-data-frame-analytics-response
boolean acknowledged = response.isAcknowledged();
// end::delete-data-frame-analytics-response
assertThat(acknowledged, is(true));
}
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config");
// tag::delete-data-frame-analytics-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-data-frame-analytics-execute-async
client.machineLearning().deleteDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testStartDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
highLevelClient().index(
new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::start-data-frame-analytics-request
StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::start-data-frame-analytics-request
// tag::start-data-frame-analytics-execute
StartDataFrameAnalyticsResponse response = client.machineLearning().startDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::start-data-frame-analytics-execute
// tag::start-data-frame-analytics-response
boolean acknowledged = response.isAcknowledged();
String node = response.getNode(); // <1>
// end::start-data-frame-analytics-response
assertThat(acknowledged, is(true));
assertThat(node, notNullValue());
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
{
StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config");
// tag::start-data-frame-analytics-execute-listener
ActionListener<StartDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(StartDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::start-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::start-data-frame-analytics-execute-async
client.machineLearning().startDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::start-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
}
public void testStopDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
highLevelClient().index(
new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::stop-data-frame-analytics-request
StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config"); // <1>
request.setForce(false); // <2>
// end::stop-data-frame-analytics-request
// tag::stop-data-frame-analytics-execute
StopDataFrameAnalyticsResponse response = client.machineLearning().stopDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::stop-data-frame-analytics-execute
// tag::stop-data-frame-analytics-response
boolean acknowledged = response.isStopped();
// end::stop-data-frame-analytics-response
assertThat(acknowledged, is(true));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
{
StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config");
// tag::stop-data-frame-analytics-execute-listener
ActionListener<StopDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(StopDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::stop-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::stop-data-frame-analytics-execute-async
client.machineLearning().stopDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::stop-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
}
public void testEvaluateDataFrame() throws Exception {
String indexName = "evaluate-test-index";
CreateIndexRequest createIndexRequest =
new CreateIndexRequest(indexName)
.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("label")
.field("type", "keyword")
.endObject()
.startObject("p")
.field("type", "double")
.endObject()
.endObject()
.endObject());
BulkRequest bulkRequest =
new BulkRequest(indexName)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.1)) // #0
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.2)) // #1
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.3)) // #2
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.4)) // #3
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.7)) // #4
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.2)) // #5
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.3)) // #6
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.4)) // #7
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.8)) // #8
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.9)); // #9
RestHighLevelClient client = highLevelClient();
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::evaluate-data-frame-evaluation-softclassification
Evaluation evaluation =
new BinarySoftClassification( // <1>
"label", // <2>
"p", // <3>
// Evaluation metrics // <4>
PrecisionMetric.at(0.4, 0.5, 0.6), // <5>
RecallMetric.at(0.5, 0.7), // <6>
ConfusionMatrixMetric.at(0.5), // <7>
AucRocMetric.withCurve()); // <8>
// end::evaluate-data-frame-evaluation-softclassification
// tag::evaluate-data-frame-request
EvaluateDataFrameRequest request =
new EvaluateDataFrameRequest( // <1>
indexName, // <2>
new QueryConfig(QueryBuilders.termQuery("dataset", "blue")), // <3>
evaluation); // <4>
// end::evaluate-data-frame-request
// tag::evaluate-data-frame-execute
EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT);
// end::evaluate-data-frame-execute
// tag::evaluate-data-frame-response
List<EvaluationMetric.Result> metrics = response.getMetrics(); // <1>
// end::evaluate-data-frame-response
// tag::evaluate-data-frame-results-softclassification
PrecisionMetric.Result precisionResult = response.getMetricByName(PrecisionMetric.NAME); // <1>
double precision = precisionResult.getScoreByThreshold("0.4"); // <2>
ConfusionMatrixMetric.Result confusionMatrixResult = response.getMetricByName(ConfusionMatrixMetric.NAME); // <3>
ConfusionMatrix confusionMatrix = confusionMatrixResult.getScoreByThreshold("0.5"); // <4>
// end::evaluate-data-frame-results-softclassification
assertThat(
metrics.stream().map(EvaluationMetric.Result::getMetricName).collect(Collectors.toList()),
containsInAnyOrder(PrecisionMetric.NAME, RecallMetric.NAME, ConfusionMatrixMetric.NAME, AucRocMetric.NAME));
assertThat(precision, closeTo(0.6, 1e-9));
assertThat(confusionMatrix.getTruePositives(), equalTo(2L)); // docs #8 and #9
assertThat(confusionMatrix.getFalsePositives(), equalTo(1L)); // doc #4
assertThat(confusionMatrix.getTrueNegatives(), equalTo(4L)); // docs #0, #1, #2 and #3
assertThat(confusionMatrix.getFalseNegatives(), equalTo(3L)); // docs #5, #6 and #7
}
{
EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(
indexName,
new QueryConfig(QueryBuilders.termQuery("dataset", "blue")),
new BinarySoftClassification(
"label",
"p",
PrecisionMetric.at(0.4, 0.5, 0.6),
RecallMetric.at(0.5, 0.7),
ConfusionMatrixMetric.at(0.5),
AucRocMetric.withCurve()));
// tag::evaluate-data-frame-execute-listener
ActionListener<EvaluateDataFrameResponse> listener = new ActionListener<>() {
@Override
public void onResponse(EvaluateDataFrameResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::evaluate-data-frame-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::evaluate-data-frame-execute-async
client.machineLearning().evaluateDataFrameAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::evaluate-data-frame-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testEvaluateDataFrame_Classification() throws Exception {
String indexName = "evaluate-classification-test-index";
CreateIndexRequest createIndexRequest =
new CreateIndexRequest(indexName)
.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("actual_class")
.field("type", "keyword")
.endObject()
.startObject("predicted_class")
.field("type", "keyword")
.endObject()
.endObject()
.endObject());
BulkRequest bulkRequest =
new BulkRequest(indexName)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #0
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #1
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #2
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "dog")) // #3
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "fox")) // #4
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "cat")) // #5
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #6
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #7
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #8
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "ant", "predicted_class", "cat")); // #9
RestHighLevelClient client = highLevelClient();
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::evaluate-data-frame-evaluation-classification
Evaluation evaluation =
new org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification( // <1>
"actual_class", // <2>
"predicted_class", // <3>
// Evaluation metrics // <4>
new AccuracyMetric(), // <5>
new org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric(), // <6>
new org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric(), // <7>
new MulticlassConfusionMatrixMetric(3)); // <8>
// end::evaluate-data-frame-evaluation-classification
EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(indexName, null, evaluation);
EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT);
// tag::evaluate-data-frame-results-classification
AccuracyMetric.Result accuracyResult = response.getMetricByName(AccuracyMetric.NAME); // <1>
double accuracy = accuracyResult.getOverallAccuracy(); // <2>
org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.Result precisionResult =
response.getMetricByName(org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.NAME); // <3>
double precision = precisionResult.getAvgPrecision(); // <4>
org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.Result recallResult =
response.getMetricByName(org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.NAME); // <5>
double recall = recallResult.getAvgRecall(); // <6>
MulticlassConfusionMatrixMetric.Result multiclassConfusionMatrix =
response.getMetricByName(MulticlassConfusionMatrixMetric.NAME); // <7>
List<ActualClass> confusionMatrix = multiclassConfusionMatrix.getConfusionMatrix(); // <8>
long otherClassesCount = multiclassConfusionMatrix.getOtherActualClassCount(); // <9>
// end::evaluate-data-frame-results-classification
assertThat(accuracyResult.getMetricName(), equalTo(AccuracyMetric.NAME));
assertThat(accuracy, equalTo(0.6));
assertThat(
precisionResult.getMetricName(),
equalTo(org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.NAME));
assertThat(precision, equalTo(0.675));
assertThat(
recallResult.getMetricName(),
equalTo(org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.NAME));
assertThat(recall, equalTo(0.45));
assertThat(multiclassConfusionMatrix.getMetricName(), equalTo(MulticlassConfusionMatrixMetric.NAME));
assertThat(
confusionMatrix,
equalTo(
List.of(
new ActualClass(
"ant",
1L,
List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 1L), new PredictedClass("dog", 0L)),
0L),
new ActualClass(
"cat",
5L,
List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 3L), new PredictedClass("dog", 1L)),
1L),
new ActualClass(
"dog",
4L,
List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 1L), new PredictedClass("dog", 3L)),
0L))));
assertThat(otherClassesCount, equalTo(0L));
}
}
public void testEvaluateDataFrame_Regression() throws Exception {
String indexName = "evaluate-classification-test-index";
CreateIndexRequest createIndexRequest =
new CreateIndexRequest(indexName)
.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("actual_value")
.field("type", "double")
.endObject()
.startObject("predicted_value")
.field("type", "double")
.endObject()
.endObject()
.endObject());
BulkRequest bulkRequest =
new BulkRequest(indexName)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.0, "predicted_value", 1.0)) // #0
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.0, "predicted_value", 0.9)) // #1
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.0, "predicted_value", 2.0)) // #2
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.5, "predicted_value", 1.4)) // #3
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.2, "predicted_value", 1.3)) // #4
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.7, "predicted_value", 2.0)) // #5
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.1, "predicted_value", 2.1)) // #6
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.5, "predicted_value", 2.7)) // #7
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 0.8, "predicted_value", 1.0)) // #8
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.5, "predicted_value", 2.4)); // #9
RestHighLevelClient client = highLevelClient();
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::evaluate-data-frame-evaluation-regression
Evaluation evaluation =
new org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression( // <1>
"actual_value", // <2>
"predicted_value", // <3>
// Evaluation metrics // <4>
new MeanSquaredErrorMetric(), // <5>
new RSquaredMetric()); // <6>
// end::evaluate-data-frame-evaluation-regression
EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(indexName, null, evaluation);
EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT);
// tag::evaluate-data-frame-results-regression
MeanSquaredErrorMetric.Result meanSquaredErrorResult = response.getMetricByName(MeanSquaredErrorMetric.NAME); // <1>
double meanSquaredError = meanSquaredErrorResult.getError(); // <2>
RSquaredMetric.Result rSquaredResult = response.getMetricByName(RSquaredMetric.NAME); // <3>
double rSquared = rSquaredResult.getValue(); // <4>
// end::evaluate-data-frame-results-regression
assertThat(meanSquaredError, closeTo(0.021, 1e-3));
assertThat(rSquared, closeTo(0.941, 1e-3));
}
}
public void testExplainDataFrameAnalytics() throws Exception {
createIndex("explain-df-test-source-index");
BulkRequest bulkRequest =
new BulkRequest("explain-df-test-source-index")
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 0; i < 10; ++i) {
bulkRequest.add(new IndexRequest().source(XContentType.JSON, "timestamp", 123456789L, "total", 10L));
}
RestHighLevelClient client = highLevelClient();
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::explain-data-frame-analytics-id-request
ExplainDataFrameAnalyticsRequest request = new ExplainDataFrameAnalyticsRequest("existing_job_id"); // <1>
// end::explain-data-frame-analytics-id-request
// tag::explain-data-frame-analytics-config-request
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder()
.setSource(DataFrameAnalyticsSource.builder().setIndex("explain-df-test-source-index").build())
.setAnalysis(OutlierDetection.createDefault())
.build();
request = new ExplainDataFrameAnalyticsRequest(config); // <1>
// end::explain-data-frame-analytics-config-request
// tag::explain-data-frame-analytics-execute
ExplainDataFrameAnalyticsResponse response = client.machineLearning().explainDataFrameAnalytics(request,
RequestOptions.DEFAULT);
// end::explain-data-frame-analytics-execute
// tag::explain-data-frame-analytics-response
List<FieldSelection> fieldSelection = response.getFieldSelection(); // <1>
MemoryEstimation memoryEstimation = response.getMemoryEstimation(); // <2>
// end::explain-data-frame-analytics-response
assertThat(fieldSelection.size(), equalTo(2));
assertThat(fieldSelection.stream().map(FieldSelection::getName).collect(Collectors.toList()), contains("timestamp", "total"));
ByteSizeValue expectedMemoryWithoutDisk = memoryEstimation.getExpectedMemoryWithoutDisk(); // <1>
ByteSizeValue expectedMemoryWithDisk = memoryEstimation.getExpectedMemoryWithDisk(); // <2>
// We are pretty liberal here as this test does not aim at verifying concrete numbers but rather end-to-end user workflow.
ByteSizeValue lowerBound = new ByteSizeValue(1, ByteSizeUnit.KB);
ByteSizeValue upperBound = new ByteSizeValue(1, ByteSizeUnit.GB);
assertThat(expectedMemoryWithoutDisk, allOf(greaterThan(lowerBound), lessThan(upperBound)));
assertThat(expectedMemoryWithDisk, allOf(greaterThan(lowerBound), lessThan(upperBound)));
}
{
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder()
.setSource(DataFrameAnalyticsSource.builder().setIndex("explain-df-test-source-index").build())
.setAnalysis(OutlierDetection.createDefault())
.build();
ExplainDataFrameAnalyticsRequest request = new ExplainDataFrameAnalyticsRequest(config);
// tag::explain-data-frame-analytics-execute-listener
ActionListener<ExplainDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(ExplainDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::explain-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::explain-data-frame-analytics-execute-async
client.machineLearning().explainDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::explain-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetTrainedModels() throws Exception {
putTrainedModel("my-trained-model");
RestHighLevelClient client = highLevelClient();
{
// tag::get-trained-models-request
GetTrainedModelsRequest request = new GetTrainedModelsRequest("my-trained-model") // <1>
.setPageParams(new PageParams(0, 1)) // <2>
.setIncludeDefinition(false) // <3>
.setDecompressDefinition(false) // <4>
.setAllowNoMatch(true) // <5>
.setTags("regression") // <6>
.setForExport(false); // <7>
// end::get-trained-models-request
request.setTags((List<String>)null);
// tag::get-trained-models-execute
GetTrainedModelsResponse response = client.machineLearning().getTrainedModels(request, RequestOptions.DEFAULT);
// end::get-trained-models-execute
// tag::get-trained-models-response
List<TrainedModelConfig> models = response.getTrainedModels();
// end::get-trained-models-response
assertThat(models, hasSize(1));
}
{
GetTrainedModelsRequest request = new GetTrainedModelsRequest("my-trained-model");
// tag::get-trained-models-execute-listener
ActionListener<GetTrainedModelsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(GetTrainedModelsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-trained-models-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-trained-models-execute-async
client.machineLearning().getTrainedModelsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-trained-models-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutTrainedModel() throws Exception {
TrainedModelDefinition definition = TrainedModelDefinitionTests.createRandomBuilder(TargetType.REGRESSION).build();
// tag::put-trained-model-config
TrainedModelConfig trainedModelConfig = TrainedModelConfig.builder()
.setDefinition(definition) // <1>
.setCompressedDefinition(InferenceToXContentCompressor.deflate(definition)) // <2>
.setModelId("my-new-trained-model") // <3>
.setInput(new TrainedModelInput("col1", "col2", "col3", "col4")) // <4>
.setDescription("test model") // <5>
.setMetadata(new HashMap<>()) // <6>
.setTags("my_regression_models") // <7>
.setInferenceConfig(new RegressionConfig("value", 0)) // <8>
.build();
// end::put-trained-model-config
trainedModelConfig = TrainedModelConfig.builder()
.setDefinition(definition)
.setInferenceConfig(new RegressionConfig(null, null))
.setModelId("my-new-trained-model")
.setInput(new TrainedModelInput("col1", "col2", "col3", "col4"))
.setDescription("test model")
.setMetadata(new HashMap<>())
.setTags("my_regression_models")
.build();
RestHighLevelClient client = highLevelClient();
{
// tag::put-trained-model-request
PutTrainedModelRequest request = new PutTrainedModelRequest(trainedModelConfig); // <1>
// end::put-trained-model-request
// tag::put-trained-model-execute
PutTrainedModelResponse response = client.machineLearning().putTrainedModel(request, RequestOptions.DEFAULT);
// end::put-trained-model-execute
// tag::put-trained-model-response
TrainedModelConfig model = response.getResponse();
// end::put-trained-model-response
assertThat(model.getModelId(), equalTo(trainedModelConfig.getModelId()));
highLevelClient().machineLearning()
.deleteTrainedModel(new DeleteTrainedModelRequest("my-new-trained-model"), RequestOptions.DEFAULT);
}
{
PutTrainedModelRequest request = new PutTrainedModelRequest(trainedModelConfig);
// tag::put-trained-model-execute-listener
ActionListener<PutTrainedModelResponse> listener = new ActionListener<>() {
@Override
public void onResponse(PutTrainedModelResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-trained-model-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-trained-model-execute-async
client.machineLearning().putTrainedModelAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-trained-model-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
highLevelClient().machineLearning()
.deleteTrainedModel(new DeleteTrainedModelRequest("my-new-trained-model"), RequestOptions.DEFAULT);
}
}
public void testGetTrainedModelsStats() throws Exception {
putTrainedModel("my-trained-model");
RestHighLevelClient client = highLevelClient();
{
// tag::get-trained-models-stats-request
GetTrainedModelsStatsRequest request =
new GetTrainedModelsStatsRequest("my-trained-model") // <1>
.setPageParams(new PageParams(0, 1)) // <2>
.setAllowNoMatch(true); // <3>
// end::get-trained-models-stats-request
// tag::get-trained-models-stats-execute
GetTrainedModelsStatsResponse response =
client.machineLearning().getTrainedModelsStats(request, RequestOptions.DEFAULT);
// end::get-trained-models-stats-execute
// tag::get-trained-models-stats-response
List<TrainedModelStats> models = response.getTrainedModelStats();
// end::get-trained-models-stats-response
assertThat(models, hasSize(1));
}
{
GetTrainedModelsStatsRequest request = new GetTrainedModelsStatsRequest("my-trained-model");
// tag::get-trained-models-stats-execute-listener
ActionListener<GetTrainedModelsStatsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(GetTrainedModelsStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-trained-models-stats-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-trained-models-stats-execute-async
client.machineLearning()
.getTrainedModelsStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-trained-models-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteTrainedModel() throws Exception {
RestHighLevelClient client = highLevelClient();
{
putTrainedModel("my-trained-model");
// tag::delete-trained-model-request
DeleteTrainedModelRequest request = new DeleteTrainedModelRequest("my-trained-model"); // <1>
// end::delete-trained-model-request
// tag::delete-trained-model-execute
AcknowledgedResponse response = client.machineLearning().deleteTrainedModel(request, RequestOptions.DEFAULT);
// end::delete-trained-model-execute
// tag::delete-trained-model-response
boolean deleted = response.isAcknowledged();
// end::delete-trained-model-response
assertThat(deleted, is(true));
}
{
putTrainedModel("my-trained-model");
DeleteTrainedModelRequest request = new DeleteTrainedModelRequest("my-trained-model");
// tag::delete-trained-model-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-trained-model-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-trained-model-execute-async
client.machineLearning().deleteTrainedModelAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-trained-model-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testCreateFilter() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::put-filter-config
MlFilter.Builder filterBuilder = MlFilter.builder("my_safe_domains") // <1>
.setDescription("A list of safe domains") // <2>
.setItems("*.google.com", "wikipedia.org"); // <3>
// end::put-filter-config
// tag::put-filter-request
PutFilterRequest request = new PutFilterRequest(filterBuilder.build()); // <1>
// end::put-filter-request
// tag::put-filter-execute
PutFilterResponse response = client.machineLearning().putFilter(request, RequestOptions.DEFAULT);
// end::put-filter-execute
// tag::put-filter-response
MlFilter createdFilter = response.getResponse(); // <1>
// end::put-filter-response
assertThat(createdFilter.getId(), equalTo("my_safe_domains"));
}
{
MlFilter.Builder filterBuilder = MlFilter.builder("safe_domains_async")
.setDescription("A list of safe domains")
.setItems("*.google.com", "wikipedia.org");
PutFilterRequest request = new PutFilterRequest(filterBuilder.build());
// tag::put-filter-execute-listener
ActionListener<PutFilterResponse> listener = new ActionListener<PutFilterResponse>() {
@Override
public void onResponse(PutFilterResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-filter-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-filter-execute-async
client.machineLearning().putFilterAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-filter-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetFilters() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String filterId = "get-filter-doc-test";
MlFilter.Builder filterBuilder = MlFilter.builder(filterId).setDescription("test").setItems("*.google.com", "wikipedia.org");
client.machineLearning().putFilter(new PutFilterRequest(filterBuilder.build()), RequestOptions.DEFAULT);
{
// tag::get-filters-request
GetFiltersRequest request = new GetFiltersRequest(); // <1>
// end::get-filters-request
// tag::get-filters-filter-id
request.setFilterId("get-filter-doc-test"); // <1>
// end::get-filters-filter-id
// tag::get-filters-page-params
request.setFrom(100); // <1>
request.setSize(200); // <2>
// end::get-filters-page-params
request.setFrom(null);
request.setSize(null);
// tag::get-filters-execute
GetFiltersResponse response = client.machineLearning().getFilter(request, RequestOptions.DEFAULT);
// end::get-filters-execute
// tag::get-filters-response
long count = response.count(); // <1>
List<MlFilter> filters = response.filters(); // <2>
// end::get-filters-response
assertEquals(1, filters.size());
}
{
GetFiltersRequest request = new GetFiltersRequest();
request.setFilterId(filterId);
// tag::get-filters-execute-listener
ActionListener<GetFiltersResponse> listener = new ActionListener<GetFiltersResponse>() {
@Override
public void onResponse(GetFiltersResponse getfiltersResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-filters-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-filters-execute-async
client.machineLearning().getFilterAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-filters-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testUpdateFilter() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String filterId = "update-filter-doc-test";
MlFilter.Builder filterBuilder = MlFilter.builder(filterId).setDescription("test").setItems("*.google.com", "wikipedia.org");
client.machineLearning().putFilter(new PutFilterRequest(filterBuilder.build()), RequestOptions.DEFAULT);
{
// tag::update-filter-request
UpdateFilterRequest request = new UpdateFilterRequest(filterId); // <1>
// end::update-filter-request
// tag::update-filter-description
request.setDescription("my new description"); // <1>
// end::update-filter-description
// tag::update-filter-add-items
request.setAddItems(Arrays.asList("*.bing.com", "*.elastic.co")); // <1>
// end::update-filter-add-items
// tag::update-filter-remove-items
request.setRemoveItems(Arrays.asList("*.google.com")); // <1>
// end::update-filter-remove-items
// tag::update-filter-execute
PutFilterResponse response = client.machineLearning().updateFilter(request, RequestOptions.DEFAULT);
// end::update-filter-execute
// tag::update-filter-response
MlFilter updatedFilter = response.getResponse(); // <1>
// end::update-filter-response
assertEquals(request.getDescription(), updatedFilter.getDescription());
}
{
UpdateFilterRequest request = new UpdateFilterRequest(filterId);
// tag::update-filter-execute-listener
ActionListener<PutFilterResponse> listener = new ActionListener<PutFilterResponse>() {
@Override
public void onResponse(PutFilterResponse putFilterResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-filter-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-filter-execute-async
client.machineLearning().updateFilterAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::update-filter-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteFilter() throws Exception {
RestHighLevelClient client = highLevelClient();
String filterId = createFilter(client);
{
// tag::delete-filter-request
DeleteFilterRequest request = new DeleteFilterRequest(filterId); // <1>
// end::delete-filter-request
// tag::delete-filter-execute
AcknowledgedResponse response = client.machineLearning().deleteFilter(request, RequestOptions.DEFAULT);
// end::delete-filter-execute
// tag::delete-filter-response
boolean isAcknowledged = response.isAcknowledged(); // <1>
// end::delete-filter-response
assertTrue(isAcknowledged);
}
filterId = createFilter(client);
{
DeleteFilterRequest request = new DeleteFilterRequest(filterId);
// tag::delete-filter-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-filter-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-filter-execute-async
client.machineLearning().deleteFilterAsync(request, RequestOptions.DEFAULT, listener); //<1>
// end::delete-filter-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetMlInfo() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::get-ml-info-request
MlInfoRequest request = new MlInfoRequest(); // <1>
// end::get-ml-info-request
// tag::get-ml-info-execute
MlInfoResponse response = client.machineLearning()
.getMlInfo(request, RequestOptions.DEFAULT);
// end::get-ml-info-execute
// tag::get-ml-info-response
final Map<String, Object> info = response.getInfo();// <1>
// end::get-ml-info-response
assertTrue(info.containsKey("defaults"));
assertTrue(info.containsKey("limits"));
}
{
MlInfoRequest request = new MlInfoRequest();
// tag::get-ml-info-execute-listener
ActionListener<MlInfoResponse> listener = new ActionListener<MlInfoResponse>() {
@Override
public void onResponse(MlInfoResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-ml-info-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-ml-info-execute-async
client.machineLearning()
.getMlInfoAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-ml-info-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testSetUpgradeMode() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::set-upgrade-mode-request
SetUpgradeModeRequest request = new SetUpgradeModeRequest(true); // <1>
request.setTimeout(TimeValue.timeValueMinutes(10)); // <2>
// end::set-upgrade-mode-request
// Set to false so that the cluster setting does not have to be unset at the end of the test.
request.setEnabled(false);
// tag::set-upgrade-mode-execute
AcknowledgedResponse acknowledgedResponse = client.machineLearning().setUpgradeMode(request, RequestOptions.DEFAULT);
// end::set-upgrade-mode-execute
// tag::set-upgrade-mode-response
boolean acknowledged = acknowledgedResponse.isAcknowledged(); // <1>
// end::set-upgrade-mode-response
assertThat(acknowledged, is(true));
}
{
SetUpgradeModeRequest request = new SetUpgradeModeRequest(false);
// tag::set-upgrade-mode-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::set-upgrade-mode-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::set-upgrade-mode-execute-async
client.machineLearning()
.setUpgradeModeAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::set-upgrade-mode-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testEstimateModelMemory() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::estimate-model-memory-request
Detector.Builder detectorBuilder = new Detector.Builder()
.setFunction("count")
.setPartitionFieldName("status");
AnalysisConfig.Builder analysisConfigBuilder =
new AnalysisConfig.Builder(Collections.singletonList(detectorBuilder.build()))
.setBucketSpan(TimeValue.timeValueMinutes(10))
.setInfluencers(Collections.singletonList("src_ip"));
EstimateModelMemoryRequest request = new EstimateModelMemoryRequest(analysisConfigBuilder.build()); // <1>
request.setOverallCardinality(Collections.singletonMap("status", 50L)); // <2>
request.setMaxBucketCardinality(Collections.singletonMap("src_ip", 30L)); // <3>
// end::estimate-model-memory-request
// tag::estimate-model-memory-execute
EstimateModelMemoryResponse estimateModelMemoryResponse =
client.machineLearning().estimateModelMemory(request, RequestOptions.DEFAULT);
// end::estimate-model-memory-execute
// tag::estimate-model-memory-response
ByteSizeValue modelMemoryEstimate = estimateModelMemoryResponse.getModelMemoryEstimate(); // <1>
long estimateInBytes = modelMemoryEstimate.getBytes();
// end::estimate-model-memory-response
assertThat(estimateInBytes, greaterThan(10000000L));
}
{
AnalysisConfig analysisConfig =
AnalysisConfig.builder(Collections.singletonList(Detector.builder().setFunction("count").build())).build();
EstimateModelMemoryRequest request = new EstimateModelMemoryRequest(analysisConfig);
// tag::estimate-model-memory-execute-listener
ActionListener<EstimateModelMemoryResponse> listener = new ActionListener<EstimateModelMemoryResponse>() {
@Override
public void onResponse(EstimateModelMemoryResponse estimateModelMemoryResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::estimate-model-memory-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::estimate-model-memory-execute-async
client.machineLearning()
.estimateModelMemoryAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::estimate-model-memory-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
private String createFilter(RestHighLevelClient client) throws IOException {
MlFilter.Builder filterBuilder = MlFilter.builder("my_safe_domains")
.setDescription("A list of safe domains")
.setItems("*.google.com", "wikipedia.org");
PutFilterRequest putFilterRequest = new PutFilterRequest(filterBuilder.build());
PutFilterResponse putFilterResponse = client.machineLearning().putFilter(putFilterRequest, RequestOptions.DEFAULT);
MlFilter createdFilter = putFilterResponse.getResponse();
assertThat(createdFilter.getId(), equalTo("my_safe_domains"));
return createdFilter.getId();
}
private void createIndex(String indexName) throws IOException {
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
}
private DataFrameAnalyticsState getAnalyticsState(String configId) throws IOException {
GetDataFrameAnalyticsStatsResponse statsResponse =
highLevelClient().machineLearning().getDataFrameAnalyticsStats(
new GetDataFrameAnalyticsStatsRequest(configId), RequestOptions.DEFAULT);
assertThat(statsResponse.getAnalyticsStats(), hasSize(1));
DataFrameAnalyticsStats stats = statsResponse.getAnalyticsStats().get(0);
return stats.getState();
}
private void putTrainedModel(String modelId) throws IOException {
TrainedModelDefinition definition = TrainedModelDefinitionTests.createRandomBuilder(TargetType.REGRESSION).build();
TrainedModelConfig trainedModelConfig = TrainedModelConfig.builder()
.setDefinition(definition)
.setModelId(modelId)
.setInferenceConfig(new RegressionConfig("value", 0))
.setInput(new TrainedModelInput(Arrays.asList("col1", "col2", "col3", "col4")))
.setDescription("test model")
.build();
highLevelClient().machineLearning().putTrainedModel(new PutTrainedModelRequest(trainedModelConfig), RequestOptions.DEFAULT);
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(new MlInferenceNamedXContentProvider().getNamedXContentParsers());
}
private static final DataFrameAnalyticsConfig DF_ANALYTICS_CONFIG =
DataFrameAnalyticsConfig.builder()
.setId("my-analytics-config")
.setSource(DataFrameAnalyticsSource.builder()
.setIndex("put-test-source-index")
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex("put-test-dest-index")
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
}
| uschindler/elasticsearch | client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java | Java | apache-2.0 | 199,205 |
package org.tmarciniak.mtp.model;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import org.apache.commons.lang3.builder.ToStringBuilder;
/**
* @author tomasz.marciniak
*
* Immutable class representing trade message
*/
@Entity
@Table
public final class TradeMessage implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "trade_message_id_seq")
@SequenceGenerator(name = "trade_message_id_seq", sequenceName = "trade_message_id_seq", allocationSize = 1)
private long id;
private String userId;
private String currencyFrom;
private String currencyTo;
private BigDecimal amountBuy;
private BigDecimal amountSell;
private BigDecimal rate;
private Date timePlaced;
private String originatingCountry;
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getCurrencyFrom() {
return currencyFrom;
}
public void setCurrencyFrom(String currencyFrom) {
this.currencyFrom = currencyFrom;
}
public String getCurrencyTo() {
return currencyTo;
}
public void setCurrencyTo(String currencyTo) {
this.currencyTo = currencyTo;
}
public BigDecimal getAmountBuy() {
return amountBuy;
}
public void setAmountBuy(BigDecimal amountBuy) {
this.amountBuy = amountBuy;
}
public BigDecimal getAmountSell() {
return amountSell;
}
public void setAmountSell(BigDecimal amountSell) {
this.amountSell = amountSell;
}
public BigDecimal getRate() {
return rate;
}
public void setRate(BigDecimal rate) {
this.rate = rate;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((amountBuy == null) ? 0 : amountBuy.hashCode());
result = prime * result
+ ((amountSell == null) ? 0 : amountSell.hashCode());
result = prime * result
+ ((currencyFrom == null) ? 0 : currencyFrom.hashCode());
result = prime * result
+ ((currencyTo == null) ? 0 : currencyTo.hashCode());
result = prime * result + (int) (id ^ (id >>> 32));
result = prime
* result
+ ((originatingCountry == null) ? 0 : originatingCountry
.hashCode());
result = prime * result + ((rate == null) ? 0 : rate.hashCode());
result = prime * result
+ ((timePlaced == null) ? 0 : timePlaced.hashCode());
result = prime * result + ((userId == null) ? 0 : userId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TradeMessage other = (TradeMessage) obj;
if (amountBuy == null) {
if (other.amountBuy != null)
return false;
} else if (!amountBuy.equals(other.amountBuy))
return false;
if (amountSell == null) {
if (other.amountSell != null)
return false;
} else if (!amountSell.equals(other.amountSell))
return false;
if (currencyFrom == null) {
if (other.currencyFrom != null)
return false;
} else if (!currencyFrom.equals(other.currencyFrom))
return false;
if (currencyTo == null) {
if (other.currencyTo != null)
return false;
} else if (!currencyTo.equals(other.currencyTo))
return false;
if (id != other.id)
return false;
if (originatingCountry == null) {
if (other.originatingCountry != null)
return false;
} else if (!originatingCountry.equals(other.originatingCountry))
return false;
if (rate == null) {
if (other.rate != null)
return false;
} else if (!rate.equals(other.rate))
return false;
if (timePlaced == null) {
if (other.timePlaced != null)
return false;
} else if (!timePlaced.equals(other.timePlaced))
return false;
if (userId == null) {
if (other.userId != null)
return false;
} else if (!userId.equals(other.userId))
return false;
return true;
}
public Date getTimePlaced() {
return new Date(timePlaced.getTime());
}
public void setTimePlaced(Date timePlaced) {
this.timePlaced = timePlaced;
}
public String getOriginatingCountry() {
return originatingCountry;
}
public void setOriginatingCountry(String originatingCountry) {
this.originatingCountry = originatingCountry;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
| tmarciniak/cfmtp | mtp-core/src/main/java/org/tmarciniak/mtp/model/TradeMessage.java | Java | apache-2.0 | 4,895 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.polly;
import javax.annotation.Generated;
import com.amazonaws.services.polly.model.*;
/**
* Abstract implementation of {@code AmazonPollyAsync}. Convenient method forms pass through to the corresponding
* overload that takes a request object and an {@code AsyncHandler}, which throws an
* {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAmazonPollyAsync extends AbstractAmazonPolly implements AmazonPollyAsync {
protected AbstractAmazonPollyAsync() {
}
@Override
public java.util.concurrent.Future<DeleteLexiconResult> deleteLexiconAsync(DeleteLexiconRequest request) {
return deleteLexiconAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteLexiconResult> deleteLexiconAsync(DeleteLexiconRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteLexiconRequest, DeleteLexiconResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeVoicesResult> describeVoicesAsync(DescribeVoicesRequest request) {
return describeVoicesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeVoicesResult> describeVoicesAsync(DescribeVoicesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeVoicesRequest, DescribeVoicesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetLexiconResult> getLexiconAsync(GetLexiconRequest request) {
return getLexiconAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetLexiconResult> getLexiconAsync(GetLexiconRequest request,
com.amazonaws.handlers.AsyncHandler<GetLexiconRequest, GetLexiconResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListLexiconsResult> listLexiconsAsync(ListLexiconsRequest request) {
return listLexiconsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListLexiconsResult> listLexiconsAsync(ListLexiconsRequest request,
com.amazonaws.handlers.AsyncHandler<ListLexiconsRequest, ListLexiconsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutLexiconResult> putLexiconAsync(PutLexiconRequest request) {
return putLexiconAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutLexiconResult> putLexiconAsync(PutLexiconRequest request,
com.amazonaws.handlers.AsyncHandler<PutLexiconRequest, PutLexiconResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<SynthesizeSpeechResult> synthesizeSpeechAsync(SynthesizeSpeechRequest request) {
return synthesizeSpeechAsync(request, null);
}
@Override
public java.util.concurrent.Future<SynthesizeSpeechResult> synthesizeSpeechAsync(SynthesizeSpeechRequest request,
com.amazonaws.handlers.AsyncHandler<SynthesizeSpeechRequest, SynthesizeSpeechResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
}
| dagnir/aws-sdk-java | aws-java-sdk-polly/src/main/java/com/amazonaws/services/polly/AbstractAmazonPollyAsync.java | Java | apache-2.0 | 4,021 |
package dk.itu.pervasive.mobile.data;
import android.app.Activity;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.preference.PreferenceManager;
import android.provider.MediaStore;
import android.util.Log;
import android.widget.Toast;
import dk.itu.pervasive.mobile.R;
import java.io.FileOutputStream;
/**
* @author Tony Beltramelli www.tonybeltramelli.com
*/
public class DataManager
{
public static final String PREF_KEY_SAVE = "save";
public static final String PREF_KEY_USERNAME = "username";
public static final String PREF_KEY_SURFACE_ADDRESS = "surfaceAddress";
public static final String PREF_KEY_STICKER_ID = "stickerID";
private static DataManager _instance = null;
private Activity _context;
private String _username = "";
private String _surfaceAddress = "";
private String _stickerID = "";
private DataManager()
{
}
public static DataManager getInstance()
{
if (_instance == null)
{
_instance = new DataManager();
}
return _instance;
}
public void saveData()
{
_username = PreferenceManager.getDefaultSharedPreferences(_context).getString(PREF_KEY_USERNAME, _context.getResources().getString(R.string.preference_user_name_default));
_surfaceAddress = PreferenceManager.getDefaultSharedPreferences(_context).getString(PREF_KEY_SURFACE_ADDRESS, _context.getResources().getString(R.string.preference_surface_address_default));
_stickerID = PreferenceManager.getDefaultSharedPreferences(_context).getString(PREF_KEY_STICKER_ID, _context.getResources().getString(R.string.preference_sticker_id_default));
Log.wtf("save data", _username + ", " + _surfaceAddress + ", " + _stickerID);
}
public String getPathFromUri(Uri uri)
{
String[] projection = { MediaStore.Images.Media.DATA };
Cursor cursor = _context.getContentResolver().query(uri, projection, null, null, null);
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
}
public void saveImage(String imageName, byte[] bytes)
{
FileOutputStream fos;
try
{
fos = _context.openFileOutput(imageName, Context.MODE_PRIVATE);
fos.write(bytes);
fos.close();
} catch (Exception e)
{
e.printStackTrace();
}
}
public void displayMessage(final String message)
{
_context.runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(_context, message, Toast.LENGTH_SHORT).show();
}
});
}
public String getUsername()
{
return _username;
}
public String getSurfaceAddress()
{
return _surfaceAddress;
}
public String getStickerID()
{
return _stickerID;
}
public void setContext(Activity context)
{
_context = context;
saveData();
}
public Context getContext(){
return _context;
}
}
| tonybeltramelli/Ubiquitous-Media-Sharing-Surface | dk.itu.pervasive.mobile.android/src/dk/itu/pervasive/mobile/data/DataManager.java | Java | apache-2.0 | 2,863 |
/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.accounts.struts.actionforms;
import static org.mifos.framework.util.helpers.DateUtils.dateFallsBeforeDate;
import static org.mifos.framework.util.helpers.DateUtils.getDateAsSentFromBrowser;
import java.sql.Date;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.commons.lang.StringUtils;
import org.apache.struts.Globals;
import org.apache.struts.action.ActionErrors;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.joda.time.LocalDate;
import org.mifos.accounts.servicefacade.AccountTypeDto;
import org.mifos.accounts.util.helpers.AccountConstants;
import org.mifos.application.admin.servicefacade.InvalidDateException;
import org.mifos.application.master.business.MifosCurrency;
import org.mifos.config.AccountingRules;
import org.mifos.framework.business.util.helpers.MethodNameConstants;
import org.mifos.framework.struts.actionforms.BaseActionForm;
import org.mifos.framework.util.helpers.Constants;
import org.mifos.framework.util.helpers.DateUtils;
import org.mifos.framework.util.helpers.DoubleConversionResult;
import org.mifos.framework.util.helpers.SessionUtils;
import org.mifos.security.login.util.helpers.LoginConstants;
import org.mifos.security.util.ActivityMapper;
import org.mifos.security.util.UserContext;
public class AccountApplyPaymentActionForm extends BaseActionForm {
private String input;
private String transactionDateDD;
private String transactionDateMM;
private String transactionDateYY;
private String amount;
private Short currencyId;
private String receiptId;
private String receiptDateDD;
private String receiptDateMM;
private String receiptDateYY;
/*
* Among other things, this field holds the PaymentTypes value for disbursements.
*/
private String paymentTypeId;
private String waiverInterest;
private String globalAccountNum;
private String accountId;
private String prdOfferingName;
private boolean amountCannotBeZero = true;
private java.util.Date lastPaymentDate;
private String accountForTransfer;
private Short transferPaymentTypeId;
public boolean amountCannotBeZero() {
return this.amountCannotBeZero;
}
public void setAmountCannotBeZero(boolean amountCannotBeZero) {
this.amountCannotBeZero = amountCannotBeZero;
}
public String getPrdOfferingName() {
return prdOfferingName;
}
public void setPrdOfferingName(String prdOfferingName) {
this.prdOfferingName = prdOfferingName;
}
public String getAmount() {
return amount;
}
public void setAmount(String amount) {
this.amount = amount;
}
public String getInput() {
return input;
}
@Override
public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) {
String methodCalled = request.getParameter(MethodNameConstants.METHOD);
ActionErrors errors = new ActionErrors();
if (methodCalled != null && methodCalled.equals("preview")) {
validateTransfer(errors);
validateTransactionDate(errors);
validatePaymentType(errors);
validateReceiptDate(errors);
String accountType = (String) request.getSession().getAttribute(Constants.ACCOUNT_TYPE);
validateAccountType(errors, accountType);
validateAmount(errors);
validateModeOfPaymentSecurity(request, errors);
}
if (!errors.isEmpty()) {
request.setAttribute(Globals.ERROR_KEY, errors);
request.setAttribute("methodCalled", methodCalled);
}
return errors;
}
private void validateModeOfPaymentSecurity(HttpServletRequest request, ActionErrors errors){
UserContext userContext = (UserContext) SessionUtils.getAttribute(Constants.USER_CONTEXT_KEY, request.getSession());
if(getPaymentTypeId().equals("4") && !ActivityMapper.getInstance().isModeOfPaymentSecurity(userContext)){
errors.add(AccountConstants.LOAN_TRANSFER_PERMISSION, new ActionMessage(AccountConstants.LOAN_TRANSFER_PERMISSION,
getLocalizedMessage("accounts.mode_of_payment_permission")));
}
}
private void validateTransfer(ActionErrors errors) {
if (paymentTypeId.equals(String.valueOf(transferPaymentTypeId))
&& StringUtils.isBlank(accountForTransfer)) {
errors.add(AccountConstants.NO_ACCOUNT_FOR_TRANSFER, new ActionMessage(AccountConstants.NO_ACCOUNT_FOR_TRANSFER));
}
}
private void validateAccountType(ActionErrors errors, String accountType) {
if (accountType != null && accountType.equals(AccountTypeDto.LOAN_ACCOUNT.name())) {
if (getAmount() == null || getAmount().equals("")) {
errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY,
getLocalizedMessage("accounts.amt")));
}
}
}
private void validatePaymentType(ActionErrors errors) {
if (StringUtils.isEmpty(getPaymentTypeId())) {
errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY,
getLocalizedMessage("accounts.mode_of_payment")));
}
}
private void validateReceiptDate(ActionErrors errors) {
if (getReceiptDate() != null && !getReceiptDate().equals("")) {
ActionErrors validationErrors = validateDate(getReceiptDate(), getLocalizedMessage("accounts.receiptdate"));
if (null != validationErrors && !validationErrors.isEmpty()) {
errors.add(validationErrors);
}
}
}
private void validateTransactionDate(ActionErrors errors) {
String fieldName = "accounts.date_of_trxn";
ActionErrors validationErrors = validateDate(getTransactionDate(), getLocalizedMessage(fieldName));
if (null != validationErrors && !validationErrors.isEmpty()) {
errors.add(validationErrors);
}
if (null != getTransactionDate()){
validationErrors = validatePaymentDate(getTransactionDate(), getLocalizedMessage(fieldName));
if (validationErrors != null && !validationErrors.isEmpty()) {
errors.add(validationErrors);
}
}
}
//exposed for testing
public ActionErrors validatePaymentDate(String transactionDate, String fieldName) {
ActionErrors errors = null;
try {
if (lastPaymentDate != null && dateFallsBeforeDate(getDateAsSentFromBrowser(transactionDate), lastPaymentDate)) {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_PAYMENT_DATE_BEFORE_LAST_PAYMENT,
new ActionMessage(AccountConstants.ERROR_PAYMENT_DATE_BEFORE_LAST_PAYMENT,
fieldName));
}
} catch (InvalidDateException ide) {
errors = new ActionErrors(); //dont add a message, since it was already added in validateDate()
}
return errors;
}
protected ActionErrors validateDate(String date, String fieldName) {
ActionErrors errors = null;
java.sql.Date sqlDate = null;
if (date != null && !date.equals("")) {
try {
sqlDate = getDateAsSentFromBrowser(date);
if (DateUtils.whichDirection(sqlDate) > 0) {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_FUTUREDATE, new ActionMessage(AccountConstants.ERROR_FUTUREDATE,
fieldName));
}
} catch (InvalidDateException ide) {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_INVALIDDATE, new ActionMessage(AccountConstants.ERROR_INVALIDDATE,
fieldName));
}
} else {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY,
fieldName));
}
return errors;
}
protected Locale getUserLocale(HttpServletRequest request) {
Locale locale = null;
HttpSession session = request.getSession();
if (session != null) {
UserContext userContext = (UserContext) session.getAttribute(LoginConstants.USERCONTEXT);
if (null != userContext) {
locale = userContext.getCurrentLocale();
}
}
return locale;
}
protected void validateAmount(ActionErrors errors) {
MifosCurrency currency = null;
if (getCurrencyId() != null && AccountingRules.isMultiCurrencyEnabled()) {
currency = AccountingRules.getCurrencyByCurrencyId(getCurrencyId());
}
DoubleConversionResult conversionResult = validateAmount(getAmount(), currency , AccountConstants.ACCOUNT_AMOUNT, errors, "");
if (amountCannotBeZero() && conversionResult.getErrors().size() == 0 && !(conversionResult.getDoubleValue() > 0.0)) {
addError(errors, AccountConstants.ACCOUNT_AMOUNT, AccountConstants.ERRORS_MUST_BE_GREATER_THAN_ZERO,
getLocalizedMessage(AccountConstants.ACCOUNT_AMOUNT));
}
}
public void setInput(String input) {
this.input = input;
}
public String getPaymentTypeId() {
return paymentTypeId;
}
public void setPaymentTypeId(String paymentTypeId) {
this.paymentTypeId = paymentTypeId;
}
public String getReceiptDate() {
return compileDateString(receiptDateDD, receiptDateMM, receiptDateYY);
}
public void setReceiptDate(String receiptDate) throws InvalidDateException {
if (StringUtils.isBlank(receiptDate)) {
receiptDateDD = null;
receiptDateMM = null;
receiptDateYY = null;
} else {
Calendar cal = new GregorianCalendar();
java.sql.Date date = getDateAsSentFromBrowser(receiptDate);
cal.setTime(date);
receiptDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH));
receiptDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1);
receiptDateYY = Integer.toString(cal.get(Calendar.YEAR));
}
}
public String getReceiptId() {
return receiptId;
}
public void setReceiptId(String receiptId) {
this.receiptId = receiptId;
}
public String getTransactionDate() {
return compileDateString(transactionDateDD, transactionDateMM, transactionDateYY);
}
public void setTransactionDate(String receiptDate) throws InvalidDateException {
if (StringUtils.isBlank(receiptDate)) {
transactionDateDD = null;
transactionDateMM = null;
transactionDateYY = null;
} else {
Calendar cal = new GregorianCalendar();
java.sql.Date date = getDateAsSentFromBrowser(receiptDate);
cal.setTime(date);
transactionDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH));
transactionDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1);
transactionDateYY = Integer.toString(cal.get(Calendar.YEAR));
}
}
public String getAccountId() {
return accountId;
}
public void setAccountId(String accountId) {
this.accountId = accountId;
}
public String getGlobalAccountNum() {
return globalAccountNum;
}
public void setGlobalAccountNum(String globalAccountNum) {
this.globalAccountNum = globalAccountNum;
}
protected void clear() throws InvalidDateException {
this.amount = null;
this.paymentTypeId = null;
setReceiptDate(null);
this.receiptId = null;
}
public String getReceiptDateDD() {
return receiptDateDD;
}
public void setReceiptDateDD(String receiptDateDD) {
this.receiptDateDD = receiptDateDD;
}
public String getReceiptDateMM() {
return receiptDateMM;
}
public void setReceiptDateMM(String receiptDateMM) {
this.receiptDateMM = receiptDateMM;
}
public String getReceiptDateYY() {
return receiptDateYY;
}
public void setReceiptDateYY(String receiptDateYY) {
this.receiptDateYY = receiptDateYY;
}
public String getTransactionDateDD() {
return transactionDateDD;
}
public void setTransactionDateDD(String transactionDateDD) {
this.transactionDateDD = transactionDateDD;
}
public String getTransactionDateMM() {
return transactionDateMM;
}
public void setTransactionDateMM(String transactionDateMM) {
this.transactionDateMM = transactionDateMM;
}
public String getTransactionDateYY() {
return transactionDateYY;
}
public void setTransactionDateYY(String transactionDateYY) {
this.transactionDateYY = transactionDateYY;
}
public Short getCurrencyId() {
return this.currencyId;
}
public void setCurrencyId(Short currencyId) {
this.currencyId = currencyId;
}
public String getWaiverInterest() {
return waiverInterest;
}
public void setWaiverInterest(String waiverInterest) {
this.waiverInterest = waiverInterest;
}
public LocalDate getReceiptDateAsLocalDate() throws InvalidDateException {
Date receiptDateStr = getDateAsSentFromBrowser(getReceiptDate());
return (receiptDateStr != null) ? new LocalDate(receiptDateStr.getTime()) : null;
}
public LocalDate getTrxnDateAsLocalDate() throws InvalidDateException {
return new LocalDate(getTrxnDate().getTime());
}
public Date getTrxnDate() throws InvalidDateException {
return getDateAsSentFromBrowser(getTransactionDate());
}
public void setLastPaymentDate(java.util.Date lastPaymentDate) {
this.lastPaymentDate = lastPaymentDate;
}
public String getAccountForTransfer() {
return accountForTransfer;
}
public void setAccountForTransfer(String accountForTransfer) {
this.accountForTransfer = accountForTransfer;
}
public Short getTransferPaymentTypeId() {
return transferPaymentTypeId;
}
public void setTransferPaymentTypeId(Short transferPaymentTypeId) {
this.transferPaymentTypeId = transferPaymentTypeId;
}
}
| jpodeszwik/mifos | application/src/main/java/org/mifos/accounts/struts/actionforms/AccountApplyPaymentActionForm.java | Java | apache-2.0 | 15,538 |
package org.apache.activemq.nob.filestore.uuiddir;
import org.apache.activemq.nob.filestore.BrokerFilenameDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.UUID;
/**
* Decoder of filenames in a UUID-based filesystem store of broker configuration files. This store only supports
* broker IDs in the form of UUIDs.
*
* Created by art on 2/19/15.
*/
public class UUIDDirectoryStoreFilenameDecoder implements BrokerFilenameDecoder {
public static final String XBEAN_FILE_PATH_SUFFIX = "-xbean.xml";
private static final Logger DEFAULT_LOGGER = LoggerFactory.getLogger(UUIDDirectoryStoreFilenameDecoder.class);
private Logger LOG = DEFAULT_LOGGER;
/**
* Decode the pathname as a UUID if it is a regular file (i.e. not a directory) and return the UUID.
*
* @param brokerPath path to the candidate broker.
* @return
*/
@Override
public String extractIdFromFilename(File brokerPath) {
String result = null;
if ( ! brokerPath.isDirectory() ) {
try {
UUID uuid = UUID.fromString(brokerPath.getName());
if (uuid != null) {
result = uuid.toString();
}
} catch ( IllegalArgumentException illegalArgExc ) {
LOG.debug("invalid UUID {}", brokerPath.getName());
}
}
return result;
}
/**
* Locate the path to the xbean configuration file for the broker at the given path. This method validates the
* broker path as it must to determine the broker ID.
*
* @param brokerPath path to the broker.
* @return path to the xbean configuration file, even if it does not exist.
*/
@Override
public File getBrokerXbeanFile(File brokerPath) {
File result = null;
String brokerId = this.extractIdFromFilename(brokerPath);
if ( brokerId != null ) {
result = new File(brokerPath.getPath() + XBEAN_FILE_PATH_SUFFIX);
}
return result;
}
}
| hzbarcea/activemq-nob | activemq-nob-filestore/src/main/java/org/apache/activemq/nob/filestore/uuiddir/UUIDDirectoryStoreFilenameDecoder.java | Java | apache-2.0 | 2,083 |
package antw.logger;
import java.util.Date;
import org.apache.tools.ant.BuildEvent;
import antw.common.util.Constants;
import antw.common.util.StringUtil;
import antw.common.util.TimeUtil;
import antw.logger.model.Project;
import antw.logger.model.Target;
public class TreeLogger extends LoggerAdapter {
protected Project _lastProject = new Project("");
protected int _spaceCount = 2;
protected Date _start;
protected final LoggerContext _context;
private boolean _junitTaskWasRunning;
public TreeLogger(LoggerContext context) {
_context = context;
}
@Override
public void targetStarted(BuildEvent event) {
Target target = _context.getTarget(event);
if (!target.getProject().equals(_lastProject)) {
switchToSubProject(_lastProject, target.getProject());
switchFromSubProject(_lastProject, target.getProject());
printProject(target.getProject());
}
_lastProject = target.getProject();
space(_spaceCount + 2);
out("%-40s %-40s%n",
new Object[] {
"|--- " + target.getName(),
"[" + target.getCounter() + " times; "
+ TimeUtil.formatTimeDuration(System.currentTimeMillis() - _start.getTime()) + "]" });
}
private void printProject(Project project) {
space(_spaceCount + 2);
out("|");
space(_spaceCount + 1);
out(project.getName());
}
private void switchFromSubProject(Project lastProject, Project project) {
if (lastProject.isSubProject()) {
if (!project.isSubProject()) {
space(_spaceCount + 1);
_spaceCount -= 2;
out("/");
}
}
}
private void switchToSubProject(Project lastProject, Project currentProject) {
if (!lastProject.isSubProject()) {
if (currentProject.isSubProject()) {
_spaceCount += 2;
space(_spaceCount + 1);
out("\\");
}
}
}
@Override
public void buildStarted(BuildEvent event) {
newLine();
_start = new Date();
}
@Override
public void buildFinished(BuildEvent event) {
if (event.getException() != null) {
newLine(3);
err(event.getException(), false);
newLine(3);
out("BUILD FAILED :(");
out("Total Time: " + _context.getProjects().getDurationAsString());
} else {
newLine(3);
out("BUILD SUCCESSFUL :)");
out("Total Time: " + _context.getProjects().getDurationAsString());
}
newLine(2);
}
@Override
public void messageLogged(BuildEvent event) {
if (event.getTask() != null) {
if ("junit".equals(event.getTask().getTaskType())) {
if (!_junitTaskWasRunning) {
switchToTestSuite();
}
if (event.getPriority() <= org.apache.tools.ant.Project.MSG_INFO) {
String message = event.getMessage();
if (message.contains(Constants.TEST_SUITE_LABEL)) {
printTestSuite(message);
} else if (message.contains(Constants.TEST_CASE_LABEL)) {
printTestCase(message);
}
}
_junitTaskWasRunning = true;
} else {
if (_junitTaskWasRunning) {
switchFromTestSuite();
}
_junitTaskWasRunning = false;
}
}
}
private void switchFromTestSuite() {
space(_spaceCount + 1);
_spaceCount -= 2;
out("/");
}
private void printTestCase(String message) {
space(_spaceCount + 2);
out("|--- " + StringUtil.remove(Constants.TEST_CASE_LABEL, message));
}
private void switchToTestSuite() {
_spaceCount += 2;
space(_spaceCount + 1);
out("\\");
}
private void printTestSuite(String testStuite) {
space(_spaceCount + 2);
out("|");
space(_spaceCount + 1);
out(StringUtil.remove(Constants.TEST_SUITE_LABEL, testStuite));
}
}
| mbauhardt/antw | modules/logger/src/main/java/antw/logger/TreeLogger.java | Java | apache-2.0 | 4,319 |
/**
* Copyright 2011-2017 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.testdriver.rule;
import java.math.BigDecimal;
import java.text.MessageFormat;
/**
* Accepts iff actual decimal is in [ expected + lower-bound, expected + upper-bound ].
* @since 0.2.0
*/
public class DecimalRange implements ValuePredicate<BigDecimal> {
private final BigDecimal lowerBound;
private final BigDecimal upperBound;
/**
* Creates a new instance.
* @param lowerBound lower bound offset from expected value
* @param upperBound upper bound offset from expected value
*/
public DecimalRange(BigDecimal lowerBound, BigDecimal upperBound) {
this.lowerBound = lowerBound;
this.upperBound = upperBound;
}
@Override
public boolean accepts(BigDecimal expected, BigDecimal actual) {
if (expected == null || actual == null) {
throw new IllegalArgumentException();
}
return expected.add(lowerBound).compareTo(actual) <= 0
&& actual.compareTo(expected.add(upperBound)) <= 0;
}
@Override
public String describeExpected(BigDecimal expected, BigDecimal actual) {
if (expected == null) {
return "(error)"; //$NON-NLS-1$
}
return MessageFormat.format(
"{0} ~ {1}", //$NON-NLS-1$
Util.format(expected.add(lowerBound)),
Util.format(expected.add(upperBound)));
}
}
| cocoatomo/asakusafw | testing-project/asakusa-test-moderator/src/main/java/com/asakusafw/testdriver/rule/DecimalRange.java | Java | apache-2.0 | 2,011 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2015 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.security.sasl.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
import javax.security.sasl.SaslServerFactory;
;
import org.wildfly.common.Assert;
import org.wildfly.security.auth.callback.ServerCredentialCallback;
import org.wildfly.security.credential.Credential;
/**
* A {@link SaslServerFactory} which sets the server's credential.
*
* @author <a href="mailto:fjuma@redhat.com">Farah Juma</a>
*/
public final class CredentialSaslServerFactory extends AbstractDelegatingSaslServerFactory {
private final Credential credential;
/**
* Construct a new instance.
*
* @param delegate the delegate SASL server factory
* @param credential the server credential to use
*/
public CredentialSaslServerFactory(final SaslServerFactory delegate, final Credential credential) {
super(delegate);
Assert.checkNotNullParam("credential", credential);
this.credential = credential;
}
public SaslServer createSaslServer(final String mechanism, final String protocol, final String serverName, final Map<String, ?> props, final CallbackHandler cbh) throws SaslException {
return delegate.createSaslServer(mechanism, protocol, serverName, props, callbacks -> {
ArrayList<Callback> list = new ArrayList<>(Arrays.asList(callbacks));
final Iterator<Callback> iterator = list.iterator();
while (iterator.hasNext()) {
Callback callback = iterator.next();
if (callback instanceof ServerCredentialCallback) {
final ServerCredentialCallback credentialCallback = (ServerCredentialCallback) callback;
if (credentialCallback.isCredentialSupported(credential)) {
credentialCallback.setCredential(credential);
iterator.remove();
}
}
}
if (!list.isEmpty()) {
cbh.handle(list.toArray(new Callback[list.size()]));
}
});
}
}
| sguilhen/wildfly-elytron | src/main/java/org/wildfly/security/sasl/util/CredentialSaslServerFactory.java | Java | apache-2.0 | 2,996 |
package com.board;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
public class BoardDAO {
private Connection conn;
public BoardDAO(Connection conn){
this.conn = conn;
}
//1. num의 최대값
public int getMaxNum(){
int maxNum = 0;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
sql = "select nvl(max(num),0) from board";
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
if(rs.next()){
maxNum = rs.getInt(1);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return maxNum;
}
// 입력(created.jsp -> created_ok.jsp)
public int insertData(BoardForm dto){
int result = 0;
/*
PreparedStatement pstmt = null;
StringBuffer sql = new StringBuffer();
*/
PreparedStatement pstmt = null;
String sql;
try {
/*
sql.append("insert into board");
sql.append("(num, name, pwd, email, subject, content,");
*/
sql = "insert into board" +
"(num, name, pwd, email, subject, content," +
"ipAddr, hitCount, created) " +
"values(?, ?, ?, ?, ?, ?, ?, 0, sysdate)";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, dto.getNum());
pstmt.setString(2, dto.getName());
pstmt.setString(3, dto.getPwd());
pstmt.setString(4, dto.getEmail());
pstmt.setString(5, dto.getSubject());
pstmt.setString(6, dto.getContent());
pstmt.setString(7, dto.getIpAddr());
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println("# insertData");
System.out.println(e.toString());
}
return result;
}
// 전체데이터 가지고 올거야
public List<BoardForm> getList(int start, int end){
List<BoardForm> lists = new ArrayList<BoardForm>();
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
sql = "select * from (";
sql += "select rownum rnum,data.* " +
" from (select num,name,subject,hitCount," +
" to_char(created, 'YYYY-MM-DD') created" +
" from board order by num desc) data )" +
" where rnum >= ? and rnum <= ? ";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, start);
pstmt.setInt(2, end);
rs = pstmt.executeQuery();
while(rs.next()){
BoardForm dto = new BoardForm();
dto.setNum(rs.getInt(2));
dto.setName(rs.getString(3));
dto.setSubject(rs.getString(4));
dto.setHitCount(rs.getInt(5));
dto.setCreated(rs.getString(6));
lists.add(dto);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return lists;
}
//전체 데이터수 구하기
public int getDataCount(){
int result= 0;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
sql = "select nvl(count(*),0) from board";
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
if(rs.next()){
result = rs.getInt(1);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
//조회수증가
public int updateHitCount(int num){
int result = 0;
PreparedStatement pstmt = null;
String sql;
try {
sql = "update board set hitCount=hitCount+1 where num=?" ;
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, num);
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
// 한명의 데이터 출력
public BoardForm getReadData(int num){
BoardForm dto = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
// 제목, 작성자, 줄수, 등록일, 조회수, 내용, ip주소
sql = "select num, name, pwd, email, subject, content, ipaddr, created, hitCount " +
"from board where num=?";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, num);
rs = pstmt.executeQuery();
if(rs.next()){
dto = new BoardForm();
dto.setNum(rs.getInt("num"));
dto.setName(rs.getString("name"));
dto.setPwd(rs.getString("pwd"));
dto.setEmail(rs.getString("email"));
dto.setSubject(rs.getString("subject"));
dto.setContent(rs.getString("content"));
dto.setIpAddr(rs.getString("ipAddr"));
dto.setHitCount(rs.getInt("hitCount"));
dto.setCreated(rs.getString("created"));
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return dto;
}
// 삭제
public int deleteData(int num){
int result = 0;
PreparedStatement pstmt = null;
String sql;
try {
sql = "delete board where num=?";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, num);
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
// 수정
public int updateData(BoardForm dto){
int result = 0;
PreparedStatement pstmt = null;
String sql;
try {
sql = "update board set name=?, pwd=?, subject=?, content=?, email=? where num=? ";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, dto.getName());
pstmt.setString(2, dto.getPwd());
pstmt.setString(3, dto.getSubject());
pstmt.setString(4, dto.getContent());
pstmt.setString(5, dto.getEmail());
pstmt.setInt(6, dto.getNum());
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
//검색된 데이터수 구하기
public int getDataCount(String searchKey, String searchValue){
int result= 0;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
searchValue = "%" + searchValue + "%";
sql = "select nvl(count(*),0) from board where "+searchKey + " like ?";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, searchValue);
rs = pstmt.executeQuery();
if(rs.next()){
result = rs.getInt(1);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
// 검색데이터 가지고 올거야
public List<BoardForm> getList(int start, int end, String searchKey, String searchValue){
List<BoardForm> lists = new ArrayList<BoardForm>();
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
searchValue = "%" + searchValue + "%";
sql = "select * from (";
sql += "select rownum rnum,data.* " +
" from (select num,name,subject,hitCount," +
" to_char(created, 'YYYY-MM-DD') created" +
" from board where "+searchKey + " like ? order by num desc) data )" +
" where rnum >= ? and rnum <= ? ";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, searchValue);
pstmt.setInt(2, start);
pstmt.setInt(3, end);
rs = pstmt.executeQuery();
while(rs.next()){
BoardForm dto = new BoardForm();
dto.setNum(rs.getInt(2));
dto.setName(rs.getString(3));
dto.setSubject(rs.getString(4));
dto.setHitCount(rs.getInt(5));
dto.setCreated(rs.getString(6));
lists.add(dto);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return lists;
}
}
/////////////////
| xerato/vk-study | src/com/board/BoardDAO.java | Java | apache-2.0 | 7,784 |
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.antennaesdk.common.messages;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
* <code>ServerRestMessage</code> carries a REST api call to the mobile-broker.
* Broker executes this call, and returns the result via <code>ClientMessage</code>.
*
* @see ClientMessage
*/
public class ServerRestMessage {
// from where the message originates.
// it can be from a user or from a server (bot)
private ClientAddress from;
// rest resource path such as "/api/books"
// another example would be "/api/books?id=383763"
// another example would be "/api/books/383763"
private String path;
// represents the "protocol//host:port" such as "https://toys.company.com:8443"
// or port can be optional such as "https://toys.company.com"
private String host;
// represents REST method such as "GET", "POST", "PUT", "DELETE" etc
// TODO: use an enum instead of string
private String method;
// actual message ( this the payload if its POST/PUT call )
// this is optional
private String payLoad;
// the headers for a REST message
private Map<String, String> headers = new HashMap<>();
// The name/value pairs of multipart entities. Implies a multipart request.
private Map<String, String> multipartEntities;
// unique identified to track the request on the client side.
private String requestId;
// TODO: use TypeAdapterFactory instead of passing the type.
private String classType = ServerRestMessage.class.getName();
// getters and setters
public ServerRestMessage(){
requestId = UUID.randomUUID().toString();
}
public ServerRestMessage( String requestId ){
this.requestId = requestId;
}
public ClientAddress getFrom() {
return from;
}
public void setFrom(ClientAddress from) {
this.from = from;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
public String getPayLoad() {
return payLoad;
}
public void setPayLoad(String payLoad) {
this.payLoad = payLoad;
}
public Map<String, String> getHeaders() { return headers; }
public void setHeaders(Map<String, String> headers) { this.headers = headers; }
public void setMultipartEntities(Map<String, String> multipartEntities) { this.multipartEntities = multipartEntities; }
public Map<String, String> getMultipartEntities() { return multipartEntities; }
public String getRequestId() {
return requestId;
}
public void setRequestId(String requestId) {
this.requestId = requestId;
}
// utility methods
public String toJson(){
Gson gson = new Gson();
String json = gson.toJson(this);
return json;
}
public String toJsonPretty(){
Gson gson = new GsonBuilder().setPrettyPrinting().create();
String json = gson.toJson(this);
return json;
}
public static ServerRestMessage fromJson(String json ){
Gson gson = new Gson();
ServerRestMessage result = gson.fromJson( json, ServerRestMessage.class);
return result;
}
}
| AntennaeSDK/MMS | client-api/src/main/java/com/github/antennaesdk/common/messages/ServerRestMessage.java | Java | apache-2.0 | 4,171 |
package io.quarkus.it.spring.data.jpa;
import java.io.Serializable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.NoRepositoryBean;
@NoRepositoryBean
public interface IntermediateRepository<T, ID extends Serializable> extends JpaRepository<T, ID> {
default public void doNothing() {
}
default public T findMandatoryById(ID id) {
return findById(id).orElseThrow(() -> new IllegalStateException("not found: " + id));
}
}
| quarkusio/quarkus | integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/IntermediateRepository.java | Java | apache-2.0 | 508 |
/*
* Copyright 2016 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns.threadsafety;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.errorprone.VisitorState;
import com.google.errorprone.annotations.CheckReturnValue;
import com.google.errorprone.annotations.Immutable;
import com.google.errorprone.annotations.ImmutableTypeParameter;
import com.google.errorprone.annotations.concurrent.LazyInit;
import com.google.errorprone.bugpatterns.BugChecker;
import com.google.errorprone.bugpatterns.threadsafety.ThreadSafety.Purpose;
import com.google.errorprone.bugpatterns.threadsafety.ThreadSafety.Violation;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.fixes.SuggestedFixes;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.util.ASTHelpers;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.Tree;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.ClassSymbol;
import com.sun.tools.javac.code.Symbol.TypeVariableSymbol;
import com.sun.tools.javac.code.Symbol.VarSymbol;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.code.Type.ClassType;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.Modifier;
import javax.lang.model.type.TypeKind;
/** Analyzes types for deep immutability. */
public class ImmutableAnalysis {
private final BugChecker bugChecker;
private final VisitorState state;
private final WellKnownMutability wellKnownMutability;
private final ThreadSafety threadSafety;
ImmutableAnalysis(
BugChecker bugChecker,
VisitorState state,
WellKnownMutability wellKnownMutability,
ImmutableSet<String> immutableAnnotations) {
this.bugChecker = bugChecker;
this.state = state;
this.wellKnownMutability = wellKnownMutability;
this.threadSafety =
ThreadSafety.builder()
.setPurpose(Purpose.FOR_IMMUTABLE_CHECKER)
.knownTypes(wellKnownMutability)
.markerAnnotations(immutableAnnotations)
.typeParameterAnnotation(ImmutableTypeParameter.class)
.build(state);
}
public ImmutableAnalysis(
BugChecker bugChecker, VisitorState state, WellKnownMutability wellKnownMutability) {
this(bugChecker, state, wellKnownMutability, ImmutableSet.of(Immutable.class.getName()));
}
Violation isThreadSafeType(
boolean allowContainerTypeParameters, Set<String> containerTypeParameters, Type type) {
return threadSafety.isThreadSafeType(
allowContainerTypeParameters, containerTypeParameters, type);
}
boolean hasThreadSafeTypeParameterAnnotation(TypeVariableSymbol sym) {
return threadSafety.hasThreadSafeTypeParameterAnnotation(sym);
}
Violation checkInstantiation(
Collection<TypeVariableSymbol> classTypeParameters, Collection<Type> typeArguments) {
return threadSafety.checkInstantiation(classTypeParameters, typeArguments);
}
public Violation checkInvocation(Type methodType, Symbol symbol) {
return threadSafety.checkInvocation(methodType, symbol);
}
/** Accepts {@link Violation violations} that are found during the analysis. */
@FunctionalInterface
public interface ViolationReporter {
Description.Builder describe(Tree tree, Violation info);
@CheckReturnValue
default Description report(Tree tree, Violation info, Optional<SuggestedFix> suggestedFix) {
Description.Builder description = describe(tree, info);
suggestedFix.ifPresent(description::addFix);
return description.build();
}
}
/**
* Check that an {@code @Immutable}-annotated class:
*
* <ul>
* <li>does not declare or inherit any mutable fields,
* <li>any immutable supertypes are instantiated with immutable type arguments as required by
* their containerOf spec, and
* <li>any enclosing instances are immutable.
* </ul>
*
* requiring supertypes to be annotated immutable would be too restrictive.
*/
public Violation checkForImmutability(
Optional<ClassTree> tree,
ImmutableSet<String> immutableTyParams,
ClassType type,
ViolationReporter reporter) {
Violation info = areFieldsImmutable(tree, immutableTyParams, type, reporter);
if (info.isPresent()) {
return info;
}
for (Type interfaceType : state.getTypes().interfaces(type)) {
AnnotationInfo interfaceAnnotation = getImmutableAnnotation(interfaceType.tsym, state);
if (interfaceAnnotation == null) {
continue;
}
info =
threadSafety.checkSuperInstantiation(
immutableTyParams, interfaceAnnotation, interfaceType);
if (info.isPresent()) {
return info.plus(
String.format(
"'%s' extends '%s'",
threadSafety.getPrettyName(type.tsym),
threadSafety.getPrettyName(interfaceType.tsym)));
}
}
if (!type.asElement().isEnum()) {
// don't check enum super types here to avoid double-reporting errors
info = checkSuper(immutableTyParams, type);
if (info.isPresent()) {
return info;
}
}
Type mutableEnclosing = threadSafety.mutableEnclosingInstance(tree, type);
if (mutableEnclosing != null) {
return info.plus(
String.format(
"'%s' has mutable enclosing instance '%s'",
threadSafety.getPrettyName(type.tsym), mutableEnclosing));
}
return Violation.absent();
}
private Violation checkSuper(ImmutableSet<String> immutableTyParams, ClassType type) {
ClassType superType = (ClassType) state.getTypes().supertype(type);
if (superType.getKind() == TypeKind.NONE
|| state.getTypes().isSameType(state.getSymtab().objectType, superType)) {
return Violation.absent();
}
if (WellKnownMutability.isAnnotation(state, type)) {
// TODO(b/25630189): add enforcement
return Violation.absent();
}
AnnotationInfo superannotation = getImmutableAnnotation(superType.tsym, state);
String message =
String.format(
"'%s' extends '%s'",
threadSafety.getPrettyName(type.tsym), threadSafety.getPrettyName(superType.tsym));
if (superannotation != null) {
// If the superclass does happen to be immutable, we don't need to recursively
// inspect it. We just have to check that it's instantiated correctly:
Violation info =
threadSafety.checkSuperInstantiation(immutableTyParams, superannotation, superType);
if (!info.isPresent()) {
return Violation.absent();
}
return info.plus(message);
}
// Recursive case: check if the supertype is 'effectively' immutable.
Violation info =
checkForImmutability(
Optional.<ClassTree>empty(),
immutableTyParams,
superType,
new ViolationReporter() {
@Override
public Description.Builder describe(Tree tree, Violation info) {
return bugChecker
.buildDescription(tree)
.setMessage(info.plus(info.message()).message());
}
});
if (!info.isPresent()) {
return Violation.absent();
}
return info.plus(message);
}
/**
* Check a single class' fields for immutability.
*
* @param immutableTyParams the in-scope immutable type parameters
* @param classType the type to check the fields of
*/
Violation areFieldsImmutable(
Optional<ClassTree> tree,
ImmutableSet<String> immutableTyParams,
ClassType classType,
ViolationReporter reporter) {
ClassSymbol classSym = (ClassSymbol) classType.tsym;
if (classSym.members() == null) {
return Violation.absent();
}
Predicate<Symbol> instanceFieldFilter =
symbol -> symbol.getKind() == ElementKind.FIELD && !symbol.isStatic();
Map<Symbol, Tree> declarations = new HashMap<>();
if (tree.isPresent()) {
for (Tree member : tree.get().getMembers()) {
Symbol sym = ASTHelpers.getSymbol(member);
if (sym != null) {
declarations.put(sym, member);
}
}
}
// javac gives us members in reverse declaration order
// handling them in declaration order leads to marginally better diagnostics
List<Symbol> members =
ImmutableList.copyOf(ASTHelpers.scope(classSym.members()).getSymbols(instanceFieldFilter))
.reverse();
for (Symbol member : members) {
Optional<Tree> memberTree = Optional.ofNullable(declarations.get(member));
Violation info =
isFieldImmutable(
memberTree, immutableTyParams, classSym, classType, (VarSymbol) member, reporter);
if (info.isPresent()) {
return info;
}
}
return Violation.absent();
}
/** Check a single field for immutability. */
private Violation isFieldImmutable(
Optional<Tree> tree,
ImmutableSet<String> immutableTyParams,
ClassSymbol classSym,
ClassType classType,
VarSymbol var,
ViolationReporter reporter) {
if (bugChecker.isSuppressed(var)) {
return Violation.absent();
}
if (!var.getModifiers().contains(Modifier.FINAL)
&& !ASTHelpers.hasAnnotation(var, LazyInit.class, state)) {
Violation info =
Violation.of(
String.format(
"'%s' has non-final field '%s'",
threadSafety.getPrettyName(classSym), var.getSimpleName()));
if (tree.isPresent()) {
// If we have a tree to attach diagnostics to, report the error immediately instead of
// accumulating the path to the error from the top-level class being checked
state.reportMatch(
reporter.report(
tree.get(), info, SuggestedFixes.addModifiers(tree.get(), state, Modifier.FINAL)));
return Violation.absent();
}
return info;
}
Type varType = state.getTypes().memberType(classType, var);
Violation info =
threadSafety.isThreadSafeType(
/* allowContainerTypeParameters= */ true, immutableTyParams, varType);
if (info.isPresent()) {
info =
info.plus(
String.format(
"'%s' has field '%s' of type '%s'",
threadSafety.getPrettyName(classSym), var.getSimpleName(), varType));
if (tree.isPresent()) {
// If we have a tree to attach diagnostics to, report the error immediately instead of
// accumulating the path to the error from the top-level class being checked
state.reportMatch(reporter.report(tree.get(), info, Optional.empty()));
return Violation.absent();
}
return info;
}
return Violation.absent();
}
/**
* Gets the {@link Symbol}'s {@code @Immutable} annotation info, either from an annotation on the
* symbol or from the list of well-known immutable types.
*/
AnnotationInfo getImmutableAnnotation(Symbol sym, VisitorState state) {
String nameStr = sym.flatName().toString();
AnnotationInfo known = wellKnownMutability.getKnownImmutableClasses().get(nameStr);
if (known != null) {
return known;
}
return threadSafety.getInheritedAnnotation(sym, state);
}
/**
* Gets the {@link Tree}'s {@code @Immutable} annotation info, either from an annotation on the
* symbol or from the list of well-known immutable types.
*/
AnnotationInfo getImmutableAnnotation(Tree tree, VisitorState state) {
Symbol sym = ASTHelpers.getSymbol(tree);
return sym == null ? null : threadSafety.getMarkerOrAcceptedAnnotation(sym, state);
}
}
| cushon/error-prone | core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableAnalysis.java | Java | apache-2.0 | 12,491 |
package com.planet_ink.coffee_mud.Abilities.Songs;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Play_Organs extends Play_Instrument
{
@Override public String ID() { return "Play_Organs"; }
private final static String localizedName = CMLib.lang()._("Organs");
@Override public String name() { return localizedName; }
@Override protected int requiredInstrumentType(){return MusicalInstrument.TYPE_ORGANS;}
@Override public String mimicSpell(){return "Prayer_ProtectHealth";}
@Override protected int canAffectCode(){return 0;}
private static Ability theSpell=null;
@Override
protected Ability getSpell()
{
if(theSpell!=null) return theSpell;
if(mimicSpell().length()==0) return null;
theSpell=CMClass.getAbility(mimicSpell());
return theSpell;
}
}
| vjanmey/EpicMudfia | com/planet_ink/coffee_mud/Abilities/Songs/Play_Organs.java | Java | apache-2.0 | 2,094 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.bookkeeper.mledger;
public class ManagedLedgerFactoryConfig {
private static final long MB = 1024 * 1024;
private long maxCacheSize = 128 * MB;
private double cacheEvictionWatermark = 0.90;
public long getMaxCacheSize() {
return maxCacheSize;
}
/**
*
* @param maxCacheSize
* @return
*/
public ManagedLedgerFactoryConfig setMaxCacheSize(long maxCacheSize) {
this.maxCacheSize = maxCacheSize;
return this;
}
public double getCacheEvictionWatermark() {
return cacheEvictionWatermark;
}
/**
* The cache eviction watermark is the percentage of the cache size to reach when removing entries from the cache.
*
* @param cacheEvictionWatermark
* @return
*/
public ManagedLedgerFactoryConfig setCacheEvictionWatermark(double cacheEvictionWatermark) {
this.cacheEvictionWatermark = cacheEvictionWatermark;
return this;
}
}
| yush1ga/pulsar | managed-ledger/src/main/java/org/apache/bookkeeper/mledger/ManagedLedgerFactoryConfig.java | Java | apache-2.0 | 1,792 |
package com.mygame;
import java.util.Vector;
import loon.geom.RectBox;
public class StepSwitch extends Switch
{
StepSwitch(int x, int y, Vector<Thing> blocks)
{
this.x = x;
this.y = y;
this.blocks = blocks;
orgblocks = new Vector<Thing>();
for(int i = 0; i < blocks.size(); i++)
orgblocks.add((Thing)blocks.get(i));
height = 50;
width = 50;
active = false;
col = new RectBox(x, y, width, height);
}
public void update(Player player, Vector<Thing> things)
{
boolean b = false;
for(int i = 0; i < things.size(); i++)
if(col.intersects((int)((Thing)things.get(i)).x, (int)((Thing)things.get(i)).y, ((Thing)things.get(i)).width, ((Thing)things.get(i)).height) && !b)
{
b = true;
active = true;
blocks.clear();
}
if(col.intersects((int)player.x, (int)player.y, player.width, player.height))
{
b = true;
active = true;
blocks.clear();
}
if(!b)
{
active = false;
if(blocks.isEmpty())
{
for(int i = 0; i < orgblocks.size(); i++)
blocks.add((Thing)orgblocks.get(i));
}
}
}
}
| cping/LGame | Java/Examples/arpggame(0.5)/src/com/mygame/StepSwitch.java | Java | apache-2.0 | 1,359 |
/*
* DateAxisBuilder.java
*
* Created on March 17, 2007, 10:17 PM
*
* To change this template, choose Tools | Template Manager
* and open the template in the editor.
*/
package com.thecoderscorner.groovychart.axis;
import java.beans.IntrospectionException;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.thecoderscorner.groovychart.chart.Buildable;
import com.thecoderscorner.groovychart.chart.ChartBuilder;
import com.thecoderscorner.groovychart.chart.BeanBuilder;
import org.jfree.chart.axis.NumberAxis;
/**
*
* @author jclarke
*/
public class NumberAxisBuilder extends BeanBuilder implements Buildable{
private static final Logger logger = Logger.getLogger(NumberAxisBuilder.class.getPackage().getName());
private NumberAxis axis = new NumberAxis();
private boolean domain;
/**
* Creates a new instance of DateAxisBuilder
*/
public NumberAxisBuilder() {
try {
setBeanClass(NumberAxis.class);
} catch (IntrospectionException ex) {
logger.log(Level.WARNING, ex.getMessage(), ex);
}
}
public void setChartBuilder(ChartBuilder chartBuilder) {
}
public void processNode(Object name, Map map, Object value) throws Exception {
String method = name.toString();
if(value != null) {
this.axis = (NumberAxis)value;
}else {
if(logger.isLoggable(Level.FINEST))
logger.finest("processNode: method = " + method);
if(method.equalsIgnoreCase("NumberAxis")) {
this.setProperties(this.axis, map);
}
}
}
private Object parent;
public Object getParent() {
return parent;
}
public void setParent(Object parent) {
this.parent = parent;
}
public void nodeCompleted(Object parent) {
if(parent != null && parent instanceof AxisSettable) {
logger.finest("Setting axis on parent");
((AxisSettable)parent).setAxis(this.axis);
}
}
private String name;
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public NumberAxis getAxis() {
return axis;
}
}
| davetcc/groovychart | src/main/java/com/thecoderscorner/groovychart/axis/NumberAxisBuilder.java | Java | apache-2.0 | 2,310 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.servicecatalog.model.transform;
import java.util.Map;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.servicecatalog.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* UpdateServiceActionRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class UpdateServiceActionRequestMarshaller {
private static final MarshallingInfo<String> ID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Id").build();
private static final MarshallingInfo<String> NAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Name").build();
private static final MarshallingInfo<Map> DEFINITION_BINDING = MarshallingInfo.builder(MarshallingType.MAP).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Definition").build();
private static final MarshallingInfo<String> DESCRIPTION_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("Description").build();
private static final MarshallingInfo<String> ACCEPTLANGUAGE_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("AcceptLanguage").build();
private static final UpdateServiceActionRequestMarshaller instance = new UpdateServiceActionRequestMarshaller();
public static UpdateServiceActionRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(UpdateServiceActionRequest updateServiceActionRequest, ProtocolMarshaller protocolMarshaller) {
if (updateServiceActionRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateServiceActionRequest.getId(), ID_BINDING);
protocolMarshaller.marshall(updateServiceActionRequest.getName(), NAME_BINDING);
protocolMarshaller.marshall(updateServiceActionRequest.getDefinition(), DEFINITION_BINDING);
protocolMarshaller.marshall(updateServiceActionRequest.getDescription(), DESCRIPTION_BINDING);
protocolMarshaller.marshall(updateServiceActionRequest.getAcceptLanguage(), ACCEPTLANGUAGE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| aws/aws-sdk-java | aws-java-sdk-servicecatalog/src/main/java/com/amazonaws/services/servicecatalog/model/transform/UpdateServiceActionRequestMarshaller.java | Java | apache-2.0 | 3,346 |
/*
* Copyright (C) 2016 the original author or authors.
*
* This file is part of jGrades Application Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*/
package org.jgrades.lic.api.model;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
@AllArgsConstructor
public class LicenceValidationResult {
private boolean valid;
private String errorMessage;
public LicenceValidationResult() {
valid = true;
errorMessage = null;
}
}
| jgrades/jgrades | jg-backend/implementation/base/jg-lic/interface/src/main/java/org/jgrades/lic/api/model/LicenceValidationResult.java | Java | apache-2.0 | 636 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mediaconvert.model;
import javax.annotation.Generated;
/**
* Use Deinterlacer (DeinterlaceMode) to choose how the service will do deinterlacing. Default is Deinterlace. -
* Deinterlace converts interlaced to progressive. - Inverse telecine converts Hard Telecine 29.97i to progressive
* 23.976p. - Adaptive auto-detects and converts to progressive.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public enum DeinterlacerMode {
DEINTERLACE("DEINTERLACE"),
INVERSE_TELECINE("INVERSE_TELECINE"),
ADAPTIVE("ADAPTIVE");
private String value;
private DeinterlacerMode(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return DeinterlacerMode corresponding to the value
*
* @throws IllegalArgumentException
* If the specified value does not map to one of the known values in this enum.
*/
public static DeinterlacerMode fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
}
for (DeinterlacerMode enumEntry : DeinterlacerMode.values()) {
if (enumEntry.toString().equals(value)) {
return enumEntry;
}
}
throw new IllegalArgumentException("Cannot create enum from " + value + " value!");
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-mediaconvert/src/main/java/com/amazonaws/services/mediaconvert/model/DeinterlacerMode.java | Java | apache-2.0 | 2,135 |
/*
* Copyright (c) 2015 Ngewi Fet <ngewif@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gnucash.android.ui.common;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.database.Cursor;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.LayoutRes;
import android.support.annotation.StringRes;
import android.support.design.widget.NavigationView;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.widget.PopupMenu;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.uservoice.uservoicesdk.UserVoice;
import org.gnucash.android.R;
import org.gnucash.android.app.GnuCashApplication;
import org.gnucash.android.db.DatabaseSchema;
import org.gnucash.android.db.adapter.BooksDbAdapter;
import org.gnucash.android.ui.account.AccountsActivity;
import org.gnucash.android.ui.passcode.PasscodeLockActivity;
import org.gnucash.android.ui.report.ReportsActivity;
import org.gnucash.android.ui.settings.PreferenceActivity;
import org.gnucash.android.ui.transaction.ScheduledActionsActivity;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Base activity implementing the navigation drawer, to be extended by all activities requiring one.
* <p>
* Each activity inheriting from this class has an indeterminate progress bar at the top,
* (above the action bar) which can be used to display busy operations. See {@link #getProgressBar()}
* </p>
*
* <p>Sub-classes should simply provide their layout using {@link #getContentView()} and then annotate
* any variables they wish to use with {@link ButterKnife#bind(Activity)} annotations. The view
* binding will be done in this base abstract class.<br>
* The activity layout of the subclass is expected to contain {@code DrawerLayout} and
* a {@code NavigationView}.<br>
* Sub-class should also consider using the {@code toolbar.xml} or {@code toolbar_with_spinner.xml}
* for the action bar in their XML layout. Otherwise provide another which contains widgets for the
* toolbar and progress indicator with the IDs {@code R.id.toolbar} and {@code R.id.progress_indicator} respectively.
* </p>
* @author Ngewi Fet <ngewif@gmail.com>
*/
public abstract class BaseDrawerActivity extends PasscodeLockActivity implements
PopupMenu.OnMenuItemClickListener {
public static final int ID_MANAGE_BOOKS = 0xB00C;
@BindView(R.id.drawer_layout) DrawerLayout mDrawerLayout;
@BindView(R.id.nav_view) NavigationView mNavigationView;
@BindView(R.id.toolbar) Toolbar mToolbar;
@BindView(R.id.toolbar_progress) ProgressBar mToolbarProgress;
protected TextView mBookNameTextView;
protected ActionBarDrawerToggle mDrawerToggle;
public static final int REQUEST_OPEN_DOCUMENT = 0x20;
private class DrawerItemClickListener implements NavigationView.OnNavigationItemSelectedListener {
@Override
public boolean onNavigationItemSelected(MenuItem menuItem) {
onDrawerMenuItemClicked(menuItem.getItemId());
return true;
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(getContentView());
//if a parameter was passed to open an account within a specific book, then switch
String bookUID = getIntent().getStringExtra(UxArgument.BOOK_UID);
if (bookUID != null && !bookUID.equals(BooksDbAdapter.getInstance().getActiveBookUID())){
GnuCashApplication.activateBook(bookUID);
}
ButterKnife.bind(this);
setSupportActionBar(mToolbar);
final ActionBar actionBar = getSupportActionBar();
if (actionBar != null){
actionBar.setHomeButtonEnabled(true);
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setTitle(getTitleRes());
}
mToolbarProgress.getIndeterminateDrawable().setColorFilter(Color.WHITE, PorterDuff.Mode.SRC_IN);
View headerView = mNavigationView.getHeaderView(0);
headerView.findViewById(R.id.drawer_title).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onClickAppTitle(v);
}
});
mBookNameTextView = (TextView) headerView.findViewById(R.id.book_name);
mBookNameTextView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onClickBook(v);
}
});
updateActiveBookName();
setUpNavigationDrawer();
}
@Override
protected void onResume() {
super.onResume();
updateActiveBookName();
}
/**
* Return the layout to inflate for this activity
* @return Layout resource identifier
*/
public abstract @LayoutRes int getContentView();
/**
* Return the title for this activity.
* This will be displayed in the action bar
* @return String resource identifier
*/
public abstract @StringRes int getTitleRes();
/**
* Returns the progress bar for the activity.
* <p>This progress bar is displayed above the toolbar and should be used to show busy status
* for long operations.<br/>
* The progress bar visibility is set to {@link View#GONE} by default. Make visible to use </p>
* @return Indeterminate progress bar.
*/
public ProgressBar getProgressBar(){
return mToolbarProgress;
}
/**
* Sets up the navigation drawer for this activity.
*/
private void setUpNavigationDrawer() {
mNavigationView.setNavigationItemSelectedListener(new DrawerItemClickListener());
mDrawerToggle = new ActionBarDrawerToggle(
this, /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.string.drawer_open, /* "open drawer" description */
R.string.drawer_close /* "close drawer" description */
) {
/** Called when a drawer has settled in a completely closed state. */
public void onDrawerClosed(View view) {
super.onDrawerClosed(view);
}
/** Called when a drawer has settled in a completely open state. */
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
}
};
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
mDrawerToggle.syncState();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home){
if (!mDrawerLayout.isDrawerOpen(mNavigationView))
mDrawerLayout.openDrawer(mNavigationView);
else
mDrawerLayout.closeDrawer(mNavigationView);
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Update the display name of the currently active book
*/
protected void updateActiveBookName(){
mBookNameTextView.setText(BooksDbAdapter.getInstance().getActiveBookDisplayName());
}
/**
* Handler for the navigation drawer items
* */
protected void onDrawerMenuItemClicked(int itemId) {
switch (itemId){
case R.id.nav_item_open: { //Open... files
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT){
//use the storage access framework
Intent openDocument = new Intent(Intent.ACTION_OPEN_DOCUMENT);
openDocument.addCategory(Intent.CATEGORY_OPENABLE);
openDocument.setType("*/*");
startActivityForResult(openDocument, REQUEST_OPEN_DOCUMENT);
} else {
AccountsActivity.startXmlFileChooser(this);
}
}
break;
case R.id.nav_item_favorites: { //favorite accounts
Intent intent = new Intent(this, AccountsActivity.class);
intent.putExtra(AccountsActivity.EXTRA_TAB_INDEX,
AccountsActivity.INDEX_FAVORITE_ACCOUNTS_FRAGMENT);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP|Intent.FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
}
break;
case R.id.nav_item_reports: {
Intent intent = new Intent(this, ReportsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
}
break;
/*
//todo: Re-enable this when Budget UI is complete
case R.id.nav_item_budgets:
startActivity(new Intent(this, BudgetsActivity.class));
break;
*/
case R.id.nav_item_scheduled_actions: { //show scheduled transactions
Intent intent = new Intent(this, ScheduledActionsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
}
break;
case R.id.nav_item_export:
AccountsActivity.openExportFragment(this);
break;
case R.id.nav_item_settings: //Settings activity
startActivity(new Intent(this, PreferenceActivity.class));
break;
case R.id.nav_item_help:
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
prefs.edit().putBoolean(UxArgument.SKIP_PASSCODE_SCREEN, true).apply();
UserVoice.launchUserVoice(this);
break;
}
mDrawerLayout.closeDrawer(mNavigationView);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == Activity.RESULT_CANCELED) {
super.onActivityResult(requestCode, resultCode, data);
return;
}
switch (requestCode) {
case AccountsActivity.REQUEST_PICK_ACCOUNTS_FILE:
AccountsActivity.importXmlFileFromIntent(this, data, null);
break;
case BaseDrawerActivity.REQUEST_OPEN_DOCUMENT: //this uses the Storage Access Framework
final int takeFlags = data.getFlags()
& (Intent.FLAG_GRANT_READ_URI_PERMISSION | Intent.FLAG_GRANT_WRITE_URI_PERMISSION);
AccountsActivity.importXmlFileFromIntent(this, data, null);
getContentResolver().takePersistableUriPermission(data.getData(), takeFlags);
break;
default:
super.onActivityResult(requestCode, resultCode, data);
break;
}
}
@Override
public boolean onMenuItemClick(MenuItem item) {
long id = item.getItemId();
if (id == ID_MANAGE_BOOKS){
Intent intent = new Intent(this, PreferenceActivity.class);
intent.setAction(PreferenceActivity.ACTION_MANAGE_BOOKS);
startActivity(intent);
mDrawerLayout.closeDrawer(mNavigationView);
return true;
}
BooksDbAdapter booksDbAdapter = BooksDbAdapter.getInstance();
String bookUID = booksDbAdapter.getUID(id);
if (!bookUID.equals(booksDbAdapter.getActiveBookUID())){
GnuCashApplication.loadBook(bookUID);
finish();
}
AccountsActivity.start(GnuCashApplication.getAppContext());
return true;
}
public void onClickAppTitle(View view){
mDrawerLayout.closeDrawer(mNavigationView);
AccountsActivity.start(this);
}
public void onClickBook(View view){
PopupMenu popup = new PopupMenu(this, view);
popup.setOnMenuItemClickListener(this);
Menu menu = popup.getMenu();
int maxRecent = 0;
Cursor cursor = BooksDbAdapter.getInstance().fetchAllRecords(null, null,
DatabaseSchema.BookEntry.COLUMN_MODIFIED_AT + " DESC");
while (cursor.moveToNext() && maxRecent++ < 5) {
long id = cursor.getLong(cursor.getColumnIndexOrThrow(DatabaseSchema.BookEntry._ID));
String name = cursor.getString(cursor.getColumnIndexOrThrow(DatabaseSchema.BookEntry.COLUMN_DISPLAY_NAME));
menu.add(0, (int)id, maxRecent, name);
}
menu.add(0, ID_MANAGE_BOOKS, maxRecent, R.string.menu_manage_books);
popup.show();
}
}
| lxbzmy/gnucash-android | app/src/main/java/org/gnucash/android/ui/common/BaseDrawerActivity.java | Java | apache-2.0 | 13,912 |
package de.nl.moo.data.loader.systems;
import de.nl.moo.data.beans.systems.SystemsApplyerBean;
import de.nl.moo.data.beans.systems.SystemsBean;
import de.nl.moo.data.beans.systems.SystemsSystemBean;
import de.nl.moo.data.dao.GameBeanDAO;
import de.nl.moo.data.loader.AbstractBeanLoader;
import org.springframework.beans.factory.annotation.Autowired;
import javax.inject.Provider;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
public class SystemsBeanLoader extends AbstractBeanLoader<SystemsBean> {
@Autowired
private SystemsBean systemsBean = null;
@Autowired
private Provider<SystemsSystemBeanLoader> systemLoaderProvider = null;
@Autowired
private Provider<SystemsApplyerBeanLoader> applyerLoaderProvider = null;
public SystemsBeanLoader() {
super();
}
@Override
protected SystemsBean load(GameBeanDAO dao) {
List<SystemsApplyerBean> applyers = this.loadApplyers(dao);
this.systemsBean.setApplyers(applyers);
List<SystemsSystemBean> systems = this.loadSystems(dao);
this.systemsBean.setSystems(systems);
return this.systemsBean;
}
// ##############################################
private List<SystemsApplyerBean> loadApplyers(GameBeanDAO dao) {
Path file = dao.getFile();
Path parent = file.getParent();
List<String> paths = dao.getList("applyers");
List<SystemsApplyerBean> applyers = new ArrayList<>();
paths.stream()
.map(parent::resolve)
.map(this::loadApplyer)
.forEach(applyers::add);
return applyers;
}
private SystemsApplyerBean loadApplyer(Path path) {
SystemsApplyerBeanLoader beanLoader = this.applyerLoaderProvider.get();
SystemsApplyerBean applyerBean = beanLoader.load(path);
return applyerBean;
}
// ##############################################
private List<SystemsSystemBean> loadSystems(GameBeanDAO dao) {
Path file = dao.getFile();
Path parent = file.getParent();
List<String> paths = dao.getList("systems");
List<SystemsSystemBean> systems = new ArrayList<>();
paths.stream()
.map(parent::resolve)
.map(this::loadSystem)
.forEach(systems::add);
return systems;
}
private SystemsSystemBean loadSystem(Path path) {
SystemsSystemBeanLoader beanLoader = this.systemLoaderProvider.get();
SystemsSystemBean systemBean = beanLoader.load(path);
return systemBean;
}
}
| dayaftereh/master-of-orion | src/main/java/de/nl/moo/data/loader/systems/SystemsBeanLoader.java | Java | apache-2.0 | 2,608 |
package com.github.setial.intellijjavadocs.configuration.impl;
import com.github.setial.intellijjavadocs.configuration.JavaDocConfiguration;
import com.github.setial.intellijjavadocs.exception.SetupTemplateException;
import com.github.setial.intellijjavadocs.model.settings.JavaDocSettings;
import com.github.setial.intellijjavadocs.model.settings.Level;
import com.github.setial.intellijjavadocs.model.settings.Mode;
import com.github.setial.intellijjavadocs.model.settings.Visibility;
import com.github.setial.intellijjavadocs.template.DocTemplateManager;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.ui.Messages;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashSet;
import java.util.Set;
import static com.github.setial.intellijjavadocs.configuration.JavaDocConfiguration.COMPONENT_CONFIG_PATH;
/**
* The type Java doc configuration impl.
*
* @author Sergey Timofiychuk
*/
@State(
name = JavaDocConfiguration.COMPONENT_NAME,
storages = {
@Storage(value = COMPONENT_CONFIG_PATH)
}
)
public class JavaDocConfigurationImpl implements JavaDocConfiguration, PersistentStateComponent<Element> {
public static final String JAVADOCS_PLUGIN_TITLE_MSG = "Javadocs plugin";
private static final Logger LOGGER = Logger.getInstance(JavaDocConfigurationImpl.class);
private JavaDocSettings settings;
private DocTemplateManager templateManager;
private boolean loadedStoredConfig = false;
/**
* Instantiates a new Java doc configuration object.
*/
public JavaDocConfigurationImpl() {
templateManager = ServiceManager.getService(DocTemplateManager.class);
initSettings();
}
@Override
public JavaDocSettings getConfiguration() {
return settings;
}
@Nullable
@Override
public Element getState() {
Element root = new Element("JAVA_DOC_SETTINGS_PLUGIN");
if (settings != null) {
settings.addToDom(root);
loadedStoredConfig = true;
}
return root;
}
@Override
public void loadState(@NotNull Element javaDocSettings) {
settings = new JavaDocSettings(javaDocSettings);
setupTemplates();
loadedStoredConfig = true;
}
private void initSettings() {
if (!loadedStoredConfig) {
// setup default values
settings = new JavaDocSettings();
Set<Level> levels = new HashSet<>();
levels.add(Level.TYPE);
levels.add(Level.METHOD);
levels.add(Level.FIELD);
Set<Visibility> visibilities = new HashSet<>();
visibilities.add(Visibility.PUBLIC);
visibilities.add(Visibility.PROTECTED);
visibilities.add(Visibility.DEFAULT);
settings.getGeneralSettings().setOverriddenMethods(false);
settings.getGeneralSettings().setSplittedClassName(true);
settings.getGeneralSettings().setMode(Mode.UPDATE);
settings.getGeneralSettings().setLevels(levels);
settings.getGeneralSettings().setVisibilities(visibilities);
settings.getTemplateSettings().setClassTemplates(templateManager.getClassTemplates());
settings.getTemplateSettings().setConstructorTemplates(templateManager.getConstructorTemplates());
settings.getTemplateSettings().setMethodTemplates(templateManager.getMethodTemplates());
settings.getTemplateSettings().setFieldTemplates(templateManager.getFieldTemplates());
}
}
@Override
public void setupTemplates() {
try {
templateManager.setClassTemplates(settings.getTemplateSettings().getClassTemplates());
templateManager.setConstructorTemplates(settings.getTemplateSettings().getConstructorTemplates());
templateManager.setMethodTemplates(settings.getTemplateSettings().getMethodTemplates());
templateManager.setFieldTemplates(settings.getTemplateSettings().getFieldTemplates());
} catch (SetupTemplateException e) {
LOGGER.error(e);
Messages.showErrorDialog("Javadocs plugin is not available, cause: " + e.getMessage(), JAVADOCS_PLUGIN_TITLE_MSG);
}
}
}
| setial/intellij-javadocs | src/main/java/com/github/setial/intellijjavadocs/configuration/impl/JavaDocConfigurationImpl.java | Java | apache-2.0 | 4,548 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.