code
stringlengths 3
1.04M
| repo_name
stringlengths 5
109
| path
stringlengths 6
306
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.04M
|
---|---|---|---|---|---|
package com.siyeh.ig.assignment;
import com.IGInspectionTestCase;
public class AssignmentToMethodParameterInspectionTest extends IGInspectionTestCase {
public void test() throws Exception {
final AssignmentToMethodParameterInspection inspection =
new AssignmentToMethodParameterInspection();
inspection.ignoreTransformationOfOriginalParameter = true;
doTest("com/siyeh/igtest/assignment/method_parameter",
inspection);
}
} | jexp/idea2 | plugins/InspectionGadgets/testsrc/com/siyeh/ig/assignment/AssignmentToMethodParameterInspectionTest.java | Java | apache-2.0 | 489 |
/**
* Copyright 2014
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loon
* @author cping
* @email:javachenpeng@yahoo.com
* @version 0.4.2
*/
package loon.core.graphics.component.table;
import loon.core.graphics.LComponent;
import loon.core.graphics.LContainer;
import loon.core.graphics.device.LColor;
import loon.core.graphics.opengl.GLEx;
import loon.core.graphics.opengl.LTexture;
import loon.utils.collection.ArrayList;
public class TableLayout extends LContainer {
private TableLayoutRow[] tableRows;
private boolean grid = true;
public TableLayout(int x, int y, int w, int h) {
this(x, y, w, h, 4, 4);
}
public TableLayout(int x, int y, int w, int h, int cols, int rows) {
super(x, y, w, h);
prepareTable(cols, rows);
}
protected void renderComponents(GLEx g) {
for (int i = 0; i < getComponentCount(); i++) {
getComponents()[i].createUI(g);
}
if (grid) {
for (int i = 0; i < tableRows.length; i++) {
tableRows[i].paint(g);
}
g.drawRect(getX(), getY(), getWidth(), getHeight(), LColor.gray);
}
}
@Override
public void createUI(GLEx g, int x, int y, LComponent component,
LTexture[] buttonImage) {
}
private void prepareTable(int cols, int rows) {
tableRows = new TableLayoutRow[rows];
if (rows > 0 && cols > 0) {
int rowHeight = getHeight() / rows;
for (int i = 0; i < rows; i++) {
tableRows[i] = new TableLayoutRow(x(), y() + (i * rowHeight),
getWidth(), rowHeight, cols);
}
}
}
public void setComponent(LComponent component, int col, int row) {
add(component);
remove(tableRows[row].getComponent(col));
tableRows[row].setComponent(component, col);
}
public void removeComponent(int col, int row) {
remove(tableRows[row].getComponent(col));
tableRows[row].setComponent(null, col);
}
public void addRow(int column, int position) {
ArrayList newRows = new ArrayList();
int newRowHeight = getHeight() / (tableRows.length + 1);
if (canAddRow(newRowHeight)) {
if (position == 0) {
newRows.add(new TableLayoutRow(x(), y(), getWidth(),
newRowHeight, column));
}
for (int i = 0; i < tableRows.length; i++) {
if (i == position && position != 0) {
newRows.add(new TableLayoutRow(x(), y(), getWidth(),
newRowHeight, column));
}
newRows.add(tableRows[i]);
}
if (position == tableRows.length && position != 0) {
newRows.add(new TableLayoutRow(x(), y(), getWidth(),
newRowHeight, column));
}
for (int i = 0; i < newRows.size(); i++) {
((TableLayoutRow) newRows.get(i))
.setY(y() + (i * newRowHeight));
((TableLayoutRow) newRows.get(i)).setHeight(newRowHeight);
}
tableRows = (TableLayoutRow[]) newRows.toArray();
}
}
public void addRow(int column) {
addRow(column, tableRows.length);
}
private boolean canAddRow(int newRowHeight) {
if (tableRows != null && tableRows.length > 0) {
return tableRows[0].canSetHeight(newRowHeight);
}
return true;
}
public boolean setColumnWidth(int width, int col, int row) {
return tableRows[row].setColumnWidth(width, col);
}
public boolean setColumnHeight(int height, int row) {
if (!tableRows[row].canSetHeight(height)) {
return false;
}
tableRows[row].setHeight(height);
return true;
}
public void setMargin(int leftMargin, int rightMargin, int topMargin,
int bottomMargin, int col, int row) {
tableRows[row].getColumn(col).setMargin(leftMargin, rightMargin,
topMargin, bottomMargin);
}
public void setAlignment(int horizontalAlignment, int verticalAlignment,
int col, int row) {
tableRows[row].getColumn(col).setHorizontalAlignment(
horizontalAlignment);
tableRows[row].getColumn(col).setVerticalAlignment(verticalAlignment);
}
public int getRows() {
return tableRows.length;
}
public int getColumns(int row) {
return tableRows[row].getCoulumnSize();
}
@Override
public void setWidth(int width) {
boolean couldShrink = true;
for (int i = 0; i < tableRows.length; i++) {
if (!tableRows[i].setWidth(width)) {
couldShrink = false;
}
}
if (couldShrink) {
super.setWidth(width);
}
}
@Override
public void setHeight(int height) {
super.setHeight(height);
for (int i = 0; i < tableRows.length; i++) {
tableRows[i].setHeight(height);
}
}
public boolean isGrid() {
return grid;
}
public void setGrid(boolean grid) {
this.grid = grid;
}
@Override
public String getUIName() {
return "TableLayout";
}
}
| cping/LGame | Java/old/OpenGL-1.0(old_ver)/Loon-backend-JavaSE/src/loon/core/graphics/component/table/TableLayout.java | Java | apache-2.0 | 4,975 |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.engine.evaluation.expression;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluateExceptionUtil;
import com.intellij.debugger.engine.evaluation.EvaluateRuntimeException;
import com.intellij.debugger.jdi.VirtualMachineProxyImpl;
import com.intellij.openapi.diagnostic.Logger;
import java.util.HashMap;
import com.sun.jdi.Value;
import java.util.Map;
/**
* @author lex
*/
public class CodeFragmentEvaluator extends BlockStatementEvaluator{
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.engine.evaluation.expression.CodeFragmentEvaluator");
private final CodeFragmentEvaluator myParentFragmentEvaluator;
private final Map<String, Object> mySyntheticLocals = new HashMap<>();
public CodeFragmentEvaluator(CodeFragmentEvaluator parentFragmentEvaluator) {
super(null);
myParentFragmentEvaluator = parentFragmentEvaluator;
}
public void setStatements(Evaluator[] evaluators) {
myStatements = evaluators;
}
public Value getValue(String localName, VirtualMachineProxyImpl vm) throws EvaluateException {
if(!mySyntheticLocals.containsKey(localName)) {
if(myParentFragmentEvaluator != null){
return myParentFragmentEvaluator.getValue(localName, vm);
} else {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.variable.not.declared", localName));
}
}
Object value = mySyntheticLocals.get(localName);
if(value instanceof Value) {
return (Value)value;
}
else if(value == null) {
return null;
}
else if(value instanceof Boolean) {
return vm.mirrorOf(((Boolean)value).booleanValue());
}
else if(value instanceof Byte) {
return vm.mirrorOf(((Byte)value).byteValue());
}
else if(value instanceof Character) {
return vm.mirrorOf(((Character)value).charValue());
}
else if(value instanceof Short) {
return vm.mirrorOf(((Short)value).shortValue());
}
else if(value instanceof Integer) {
return vm.mirrorOf(((Integer)value).intValue());
}
else if(value instanceof Long) {
return vm.mirrorOf(((Long)value).longValue());
}
else if(value instanceof Float) {
return vm.mirrorOf(((Float)value).floatValue());
}
else if(value instanceof Double) {
return vm.mirrorOf(((Double)value).doubleValue());
}
else if(value instanceof String) {
return vm.mirrorOf((String)value);
}
else {
LOG.error("unknown default initializer type " + value.getClass().getName());
return null;
}
}
private boolean hasValue(String localName) {
if(!mySyntheticLocals.containsKey(localName)) {
if(myParentFragmentEvaluator != null){
return myParentFragmentEvaluator.hasValue(localName);
} else {
return false;
}
} else {
return true;
}
}
public void setInitialValue(String localName, Object value) {
LOG.assertTrue(!(value instanceof Value), "use setValue for jdi values");
if(hasValue(localName)) {
throw new EvaluateRuntimeException(
EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.variable.already.declared", localName)));
}
mySyntheticLocals.put(localName, value);
}
public void setValue(String localName, Value value) throws EvaluateException {
if(!mySyntheticLocals.containsKey(localName)) {
if(myParentFragmentEvaluator != null){
myParentFragmentEvaluator.setValue(localName, value);
} else {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.variable.not.declared", localName));
}
}
else {
mySyntheticLocals.put(localName, value);
}
}
}
| mglukhikh/intellij-community | java/debugger/impl/src/com/intellij/debugger/engine/evaluation/expression/CodeFragmentEvaluator.java | Java | apache-2.0 | 4,502 |
package com.kit.imagelib.imagelooker;
public interface OnPageSelectedListener {
public void onPageSelected();
}
| BigAppOS/BigApp_Discuz_Android | libs/ImageLib/src/com/kit/imagelib/imagelooker/OnPageSelectedListener.java | Java | apache-2.0 | 126 |
/*******************************************************************************
* Copyright 2015 Unicon (R) Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*******************************************************************************/
/**
*
*/
package org.apereo.lai;
import java.io.Serializable;
/**
* @author ggilbert
*
*/
public interface Institution extends Serializable {
String getName();
String getKey();
String getSecret();
}
| ccooper1/OpenDash | src/main/java/org/apereo/lai/Institution.java | Java | apache-2.0 | 967 |
package org.goodsManagement.service.impl.PoiUtils;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.goodsManagement.po.GetGoodsDto;
import org.goodsManagement.vo.GetGoodsVO;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
/**
* Created by lifei on 2015/9/23.
*/
@Component
public class GetGoodsToExcel {
/*public static void main(String[] args){
List<GetGoodsVO> list = new ArrayList<GetGoodsVO>();
GetGoodsVO a1 = new GetGoodsVO();
a1.setStaffname("大黄");
a1.setGoodname("屎");
a1.setGetnumber(2);
a1.setGoodtype("一大坨");
list.add(a1);
GetGoodsVO a2 = new GetGoodsVO();
a2.setStaffname("小黄");
a2.setGoodname("屎");
a2.setGetnumber(2);
a2.setGoodtype("一桶");
list.add(a2);
String path = "C:\\Users\\lifei\\Desktop\\getgood.xls";
GetGoodsToExcel.toExcel(list,path);
System.out.println("导出完成");
}*/
/**
*
* @param list
* 数据库表中人员领用记录的集合
* @param path
* 要写入的文件的路径
*/
public void addtoExcel(List<GetGoodsVO> list,String path){
HSSFWorkbook wb = new HSSFWorkbook();
HSSFSheet sheet = wb.createSheet("Outgoods");
String[] n = { "姓名", "物品名称号", "物品型号", "物品数量" };
Object[][] value = new Object[list.size() + 1][4];
for (int m = 0; m < n.length; m++) {
value[0][m] = n[m];
}
for (int i = 0; i < list.size(); i++) {
GetGoodsVO getGoodsVOg= (GetGoodsVO) list.get(i);
value[i + 1][0] = getGoodsVOg.getStaffname();
value[i + 1][1] = getGoodsVOg.getGoodname();
value[i + 1][2] = getGoodsVOg.getGoodtype();
value[i + 1][3] = getGoodsVOg.getGetnumber();
}
ExcelUtils.writeArrayToExcel(wb, sheet, list.size() + 1, 4, value);
ExcelUtils.writeWorkbook(wb, path);
}
}
| sunshine-life/GoodsManagement | src/main/java/org/goodsManagement/service/impl/PoiUtils/GetGoodsToExcel.java | Java | apache-2.0 | 2,120 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.bean.validator.springboot;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
import org.apache.camel.CamelContext;
import org.apache.camel.component.bean.validator.BeanValidatorComponent;
import org.apache.camel.spi.ComponentCustomizer;
import org.apache.camel.spi.HasId;
import org.apache.camel.spring.boot.CamelAutoConfiguration;
import org.apache.camel.spring.boot.ComponentConfigurationProperties;
import org.apache.camel.spring.boot.util.CamelPropertiesHelper;
import org.apache.camel.spring.boot.util.ConditionalOnCamelContextAndAutoConfigurationBeans;
import org.apache.camel.spring.boot.util.GroupCondition;
import org.apache.camel.spring.boot.util.HierarchicalPropertiesEvaluator;
import org.apache.camel.support.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
/**
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo")
@Configuration
@Conditional({ConditionalOnCamelContextAndAutoConfigurationBeans.class,
BeanValidatorComponentAutoConfiguration.GroupConditions.class})
@AutoConfigureAfter(CamelAutoConfiguration.class)
@EnableConfigurationProperties({ComponentConfigurationProperties.class,
BeanValidatorComponentConfiguration.class})
public class BeanValidatorComponentAutoConfiguration {
private static final Logger LOGGER = LoggerFactory
.getLogger(BeanValidatorComponentAutoConfiguration.class);
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CamelContext camelContext;
@Autowired
private BeanValidatorComponentConfiguration configuration;
@Autowired(required = false)
private List<ComponentCustomizer<BeanValidatorComponent>> customizers;
static class GroupConditions extends GroupCondition {
public GroupConditions() {
super("camel.component", "camel.component.bean-validator");
}
}
@Lazy
@Bean(name = "bean-validator-component")
@ConditionalOnMissingBean(BeanValidatorComponent.class)
public BeanValidatorComponent configureBeanValidatorComponent()
throws Exception {
BeanValidatorComponent component = new BeanValidatorComponent();
component.setCamelContext(camelContext);
Map<String, Object> parameters = new HashMap<>();
IntrospectionSupport.getProperties(configuration, parameters, null,
false);
for (Map.Entry<String, Object> entry : parameters.entrySet()) {
Object value = entry.getValue();
Class<?> paramClass = value.getClass();
if (paramClass.getName().endsWith("NestedConfiguration")) {
Class nestedClass = null;
try {
nestedClass = (Class) paramClass.getDeclaredField(
"CAMEL_NESTED_CLASS").get(null);
HashMap<String, Object> nestedParameters = new HashMap<>();
IntrospectionSupport.getProperties(value, nestedParameters,
null, false);
Object nestedProperty = nestedClass.newInstance();
CamelPropertiesHelper.setCamelProperties(camelContext,
nestedProperty, nestedParameters, false);
entry.setValue(nestedProperty);
} catch (NoSuchFieldException e) {
}
}
}
CamelPropertiesHelper.setCamelProperties(camelContext, component,
parameters, false);
if (ObjectHelper.isNotEmpty(customizers)) {
for (ComponentCustomizer<BeanValidatorComponent> customizer : customizers) {
boolean useCustomizer = (customizer instanceof HasId)
? HierarchicalPropertiesEvaluator.evaluate(
applicationContext.getEnvironment(),
"camel.component.customizer",
"camel.component.bean-validator.customizer",
((HasId) customizer).getId())
: HierarchicalPropertiesEvaluator.evaluate(
applicationContext.getEnvironment(),
"camel.component.customizer",
"camel.component.bean-validator.customizer");
if (useCustomizer) {
LOGGER.debug("Configure component {}, with customizer {}",
component, customizer);
customizer.customize(component);
}
}
}
return component;
}
} | kevinearls/camel | platforms/spring-boot/components-starter/camel-bean-validator-starter/src/main/java/org/apache/camel/component/bean/validator/springboot/BeanValidatorComponentAutoConfiguration.java | Java | apache-2.0 | 6,273 |
/*
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2019
*/
package com.ibm.streamsx.topology.internal.logging;
import java.util.logging.Level;
import java.util.logging.Logger;
public interface Logging {
/**
* Set the root logging levels from Python logging integer level.
* @param levelS
*/
public static void setRootLevels(String levelS) {
int loggingLevel = Integer.valueOf(levelS);
Level level;
if (loggingLevel >= 40) {
level = Level.SEVERE;
} else if (loggingLevel >= 30) {
level = Level.WARNING;
} else if (loggingLevel >= 20) {
level = Level.CONFIG;
} else {
level = Level.FINE;
}
Logger.getLogger("").setLevel(level);
}
}
| ddebrunner/streamsx.topology | java/src/com/ibm/streamsx/topology/internal/logging/Logging.java | Java | apache-2.0 | 800 |
package wikokit.base.wikt.db;
import android.util.Log;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/** Decompress ziped file.
*
* @see http://www.jondev.net/articles/Unzipping_Files_with_Android_%28Programmatically%29
*/
public class Decompressor {
private String _zipFile;
private String _location;
public Decompressor(String zipFile, String location) {
_zipFile = zipFile;
_location = location;
_dirChecker("");
}
public void unzip() {
try {
FileInputStream fin = new FileInputStream(_zipFile);
ZipInputStream zin = new ZipInputStream(fin);
ZipEntry ze = null;
while ((ze = zin.getNextEntry()) != null) {
Log.v("Decompress", "Unzipping " + ze.getName());
if(ze.isDirectory()) {
_dirChecker(ze.getName());
} else {
FileOutputStream fout = new FileOutputStream(_location + ze.getName());
for (int c = zin.read(); c != -1; c = zin.read()) {
fout.write(c);
}
zin.closeEntry();
fout.close();
}
}
zin.close();
} catch(Exception e) {
Log.e("Decompress", "unzip", e);
}
}
private void _dirChecker(String dir) {
File f = new File(_location + dir);
if(!f.isDirectory()) {
f.mkdirs();
}
}
} | componavt/wikokit | android/common_wiki_android/src/wikokit/base/wikt/db/Decompressor.java | Java | apache-2.0 | 1,533 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms.tx;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.spring.CamelSpringTestSupport;
import org.junit.Test;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* Simple unit test for transaction client EIP pattern and JMS.
*/
public class JMSTransactionalClientWithRollbackTest extends CamelSpringTestSupport {
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"/org/apache/camel/component/jms/tx/JMSTransactionalClientWithRollbackTest.xml");
}
@Test
public void testTransactionSuccess() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedBodiesReceived("Bye World");
// success at 3rd attempt
mock.message(0).header("count").isEqualTo(3);
template.sendBody("activemq:queue:okay", "Hello World");
mock.assertIsSatisfied();
}
public static class MyProcessor implements Processor {
private int count;
public void process(Exchange exchange) throws Exception {
exchange.getIn().setBody("Bye World");
exchange.getIn().setHeader("count", ++count);
}
}
} | Fabryprog/camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/tx/JMSTransactionalClientWithRollbackTest.java | Java | apache-2.0 | 2,206 |
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the Apache License Version 2.0.
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
package com.microsoft.uprove;
import java.util.Arrays;
/**
* Specifies a U-Prove token.
*/
public class UProveToken {
private byte[] issuerParametersUID;
private byte[] publicKey;
private byte[] tokenInformation;
private byte[] proverInformation;
private byte[] sigmaZ;
private byte[] sigmaC;
private byte[] sigmaR;
private boolean isDeviceProtected = false;
/**
* Constructs a new U-Prove token.
*/
public UProveToken() {
super();
}
/**
* Constructs a new U-Prove token.
* @param issuerParametersUID an issuer parameters UID.
* @param publicKey a public key.
* @param tokenInformation a token information value.
* @param proverInformation a prover information value.
* @param sigmaZ a sigmaZ value.
* @param sigmaC a sigmaC value.
* @param sigmaR a sigmaR value.
* @param isDeviceProtected indicates if the token is Device-protected.
*/
public UProveToken(byte[] issuerParametersUID, byte[] publicKey,
byte[] tokenInformation, byte[] proverInformation,
byte[] sigmaZ, byte[] sigmaC,
byte[] sigmaR, boolean isDeviceProtected) {
super();
this.issuerParametersUID = issuerParametersUID;
this.publicKey = publicKey;
this.tokenInformation = tokenInformation;
this.proverInformation = proverInformation;
this.sigmaZ = sigmaZ;
this.sigmaC = sigmaC;
this.sigmaR = sigmaR;
this.isDeviceProtected = isDeviceProtected;
}
/**
* Gets the issuer parameters UID value.
* @return the issuerParameters UID value.
*/
public byte[] getIssuerParametersUID() {
return issuerParametersUID;
}
/**
* Sets the issuer parameters UID value.
* @param issuerParametersUID the issuerParameters UID value to set.
*/
public void setIssuerParametersUID(byte[] issuerParametersUID) {
this.issuerParametersUID = issuerParametersUID;
}
/**
* Gets the public key value.
* @return the publicKey value.
*/
public byte[] getPublicKey() {
return publicKey;
}
/**
* Sets the public key value.
* @param publicKey the public key value to set.
*/
public void setPublicKey(byte[] publicKey) {
this.publicKey = publicKey;
}
/**
* Gets the token information value.
* @return the token information value.
*/
public byte[] getTokenInformation() {
return tokenInformation;
}
/**
* Sets the token information value.
* @param tokenInformation the token information value to set.
*/
public void setTokenInformation(byte[] tokenInformation) {
this.tokenInformation = tokenInformation;
}
/**
* Gets the prover information value.
* @return the prover information value.
*/
public byte[] getProverInformation() {
return proverInformation;
}
/**
* Sets the prover information value.
* @param proverInformation the prover information value to set.
*/
public void setProverInformation(byte[] proverInformation) {
this.proverInformation = proverInformation;
}
/**
* Gets the sigmaZ value.
* @return the sigmaZ value.
*/
public byte[] getSigmaZ() {
return sigmaZ;
}
/**
* Sets the sigmaZ value.
* @param sigmaZ the sigmaZ value to set.
*/
public void setSigmaZ(byte[] sigmaZ) {
this.sigmaZ = sigmaZ;
}
/**
* Gets the sigmaC value.
* @return the sigmaC value.
*/
public byte[] getSigmaC() {
return sigmaC;
}
/**
* Sets the sigmaC value.
* @param sigmaC the sigmaC value to set.
*/
public void setSigmaC(byte[] sigmaC) {
this.sigmaC = sigmaC;
}
/**
* Gets the sigmaR value.
* @return the sigmaR value.
*/
public byte[] getSigmaR() {
return sigmaR;
}
/**
* Sets the sigmaR value.
* @param sigmaR the sigmaR value to set.
*/
public void setSigmaR(byte[] sigmaR) {
this.sigmaR = sigmaR;
}
/**
* Returns true if the token is Device-protected, false otherwise.
* @return the Device-protected boolean.
*/
boolean isDeviceProtected() {
return isDeviceProtected;
}
/**
* Sets the boolean indicating if the token is Device-protected.
* @param isDeviceProtected true if the token is Device-protected.
*/
void setIsDeviceProtected(boolean isDeviceProtected) {
this.isDeviceProtected = isDeviceProtected;
}
/**
* Indicates whether some other object is "equal to" this one.
* @param o the reference object with which to compare.
* @return <code>true</code> if this object is the same as the
* <code>o</code> argument; <code>false</code> otherwise.
*/
public boolean equals(final Object o) {
if (o == this) {
return true;
}
if (!(o instanceof UProveToken)) {
return false;
}
UProveToken upt = (UProveToken) o;
return
Arrays.equals(this.issuerParametersUID, upt.issuerParametersUID) &&
Arrays.equals(this.publicKey, upt.publicKey) &&
Arrays.equals(this.tokenInformation, upt.tokenInformation) &&
Arrays.equals(this.proverInformation, upt.proverInformation) &&
Arrays.equals(this.sigmaZ, upt.sigmaZ) &&
Arrays.equals(this.sigmaC, upt.sigmaC) &&
Arrays.equals(this.sigmaR, upt.sigmaR) &&
this.isDeviceProtected == upt.isDeviceProtected;
}
/**
* Returns a hash code value for the object.
* @return a hash code value for the object.
*/
public int hashCode() {
int result = 237;
result = 201 * result + Arrays.hashCode(this.issuerParametersUID);
result = 201 * result + Arrays.hashCode(this.publicKey);
result = 201 * result + Arrays.hashCode(this.tokenInformation);
result = 201 * result + Arrays.hashCode(this.proverInformation);
result = 201 * result + Arrays.hashCode(this.sigmaZ);
result = 201 * result + Arrays.hashCode(this.sigmaC);
result = 201 * result + Arrays.hashCode(this.sigmaR);
result = result + (this.isDeviceProtected ? 201 : 0);
return result;
}
}
| albdum/uprove | src/main/java/com/microsoft/uprove/UProveToken.java | Java | apache-2.0 | 6,515 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.olio.webapp.cache;
/**
* The cache interface provides all operations necessary for the cache.
* We could have extended java.util.Map but that would make a lot of
* unnecessary work for the scope of this project. We can always implement that
* interface later if desired.
*/
public interface Cache {
/**
* Gets the cached value based on a key.
* @param key The key
* @return The cached object, or null if none is available
*/
Object get(String key);
/**
* Sets a cached item using a key.
* @param key The key
* @param value The object to cache.
*/
void put(String key, Object value);
/**
* Sets a cached item using a key.
* @param key The key
* @param value The object to cache.
* @param timeToLive Time to cache this object in seconds
*/
void put(String key, Object value, long timeToLive);
/**
* Invalidates a cached item using a key
* @param key
* @return success
*/
boolean invalidate(String key);
/*
* Check if cache needs refresh based on existence cached object and of Semaphore
* @param key The key
* @param cacheObjPresent false if the cache object for this key exists
* @return true if the cache object needs a refresh
*/
boolean needsRefresh (boolean cacheObjPresent, String key);
void doneRefresh (String key, long timeToNextRefresh) throws CacheException;
boolean isLocal();
}
| shanti/olio | webapp/java/trunk/ws/apps/webapp/src/java/org/apache/olio/webapp/cache/Cache.java | Java | apache-2.0 | 2,298 |
// Copyright 2004 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry.vlib.ejb;
import java.rmi.RemoteException;
import javax.ejb.EJBObject;
/**
* Remote interface for the BookQuery stateless session bean.
*
* @version $Id$
* @author Howard Lewis Ship
*
**/
public interface IBookQuery extends EJBObject
{
/**
* Returns the total number of results rows in the query.
*
**/
public int getResultCount() throws RemoteException;
/**
* Returns a selected subset of the results.
*
**/
public Book[] get(int offset, int length) throws RemoteException;
/**
* Performs a query of books with the matching title and (optionally) publisher.
*
* @param parameters defines subset of books to return.
* @param sortOrdering order of items in result set.
*
**/
public int masterQuery(MasterQueryParameters parameters, SortOrdering sortOrdering) throws RemoteException;
/**
* Queries on books owned by a given person.
*
**/
public int ownerQuery(Integer ownerPK, SortOrdering sortOrdering) throws RemoteException;
/**
* Queries on books held by a given person.
*
**/
public int holderQuery(Integer holderPK, SortOrdering sortOrdering) throws RemoteException;
/**
* Queries the list of books held by the borrower but not owned by the borrower.
*
**/
public int borrowerQuery(Integer borrowerPK, SortOrdering sortOrdering) throws RemoteException;
} | apache/tapestry3 | tapestry-examples/VlibBeans/src/org/apache/tapestry/vlib/ejb/IBookQuery.java | Java | apache-2.0 | 2,080 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.structuralsearch.impl.matcher.compiler;
import com.intellij.codeInsight.template.Template;
import com.intellij.codeInsight.template.TemplateManager;
import com.intellij.dupLocator.util.NodeFilter;
import com.intellij.lang.Language;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.LanguageFileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiErrorElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiRecursiveElementWalkingVisitor;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.structuralsearch.*;
import com.intellij.structuralsearch.impl.matcher.CompiledPattern;
import com.intellij.structuralsearch.impl.matcher.MatcherImplUtil;
import com.intellij.structuralsearch.impl.matcher.PatternTreeContext;
import com.intellij.structuralsearch.impl.matcher.filters.LexicalNodesFilter;
import com.intellij.structuralsearch.impl.matcher.handlers.MatchingHandler;
import com.intellij.structuralsearch.impl.matcher.handlers.SubstitutionHandler;
import com.intellij.structuralsearch.impl.matcher.predicates.*;
import com.intellij.structuralsearch.plugin.ui.Configuration;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.SmartList;
import gnu.trove.TIntArrayList;
import gnu.trove.TIntHashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Compiles the handlers for usability
*/
public class PatternCompiler {
private static CompileContext lastTestingContext;
public static CompiledPattern compilePattern(final Project project, final MatchOptions options)
throws MalformedPatternException, NoMatchFoundException, UnsupportedOperationException {
FileType fileType = options.getFileType();
assert fileType instanceof LanguageFileType;
Language language = ((LanguageFileType)fileType).getLanguage();
StructuralSearchProfile profile = StructuralSearchUtil.getProfileByLanguage(language);
assert profile != null;
CompiledPattern result = profile.createCompiledPattern();
final String[] prefixes = result.getTypedVarPrefixes();
assert prefixes.length > 0;
final CompileContext context = new CompileContext(result, options, project);
if (ApplicationManager.getApplication().isUnitTestMode()) lastTestingContext = context;
try {
List<PsiElement> elements = compileByAllPrefixes(project, options, result, context, prefixes);
final CompiledPattern pattern = context.getPattern();
checkForUnknownVariables(pattern, elements);
pattern.setNodes(elements);
if (context.getSearchHelper().doOptimizing() && context.getSearchHelper().isScannedSomething()) {
final Set<PsiFile> set = context.getSearchHelper().getFilesSetToScan();
final List<PsiFile> filesToScan = new SmartList<>();
final GlobalSearchScope scope = (GlobalSearchScope)options.getScope();
for (final PsiFile file : set) {
if (!scope.contains(file.getVirtualFile())) {
continue;
}
filesToScan.add(file);
}
if (filesToScan.size() == 0) {
throw new NoMatchFoundException(SSRBundle.message("ssr.will.not.find.anything", scope.getDisplayName()));
}
result.setScope(new LocalSearchScope(PsiUtilCore.toPsiElementArray(filesToScan)));
}
} finally {
context.clear();
}
return result;
}
private static void checkForUnknownVariables(final CompiledPattern pattern, List<PsiElement> elements) {
for (PsiElement element : elements) {
element.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitElement(PsiElement element) {
if (element.getUserData(CompiledPattern.HANDLER_KEY) != null) {
return;
}
super.visitElement(element);
if (!(element instanceof LeafElement) || !pattern.isTypedVar(element)) {
return;
}
final MatchingHandler handler = pattern.getHandler(pattern.getTypedVarString(element));
if (handler == null) {
throw new MalformedPatternException();
}
}
});
}
}
public static String getLastFindPlan() {
return ((TestModeOptimizingSearchHelper)lastTestingContext.getSearchHelper()).getSearchPlan();
}
@NotNull
private static List<PsiElement> compileByAllPrefixes(Project project,
MatchOptions options,
CompiledPattern pattern,
CompileContext context,
String[] applicablePrefixes) throws MalformedPatternException {
if (applicablePrefixes.length == 0) {
return Collections.emptyList();
}
List<PsiElement> elements = doCompile(project, options, pattern, new ConstantPrefixProvider(applicablePrefixes[0]), context);
if (elements.isEmpty()) {
return elements;
}
final PsiFile file = elements.get(0).getContainingFile();
if (file == null) {
return elements;
}
final PsiElement last = elements.get(elements.size() - 1);
final Pattern[] patterns = new Pattern[applicablePrefixes.length];
for (int i = 0; i < applicablePrefixes.length; i++) {
patterns[i] = Pattern.compile(StructuralSearchUtil.shieldRegExpMetaChars(applicablePrefixes[i]) + "\\w+\\b");
}
final int[] varEndOffsets = findAllTypedVarOffsets(file, patterns);
final int patternEndOffset = last.getTextRange().getEndOffset();
if (elements.size() == 0 ||
checkErrorElements(file, patternEndOffset, patternEndOffset, varEndOffsets, true) != Boolean.TRUE) {
return elements;
}
final int varCount = varEndOffsets.length;
final String[] prefixSequence = new String[varCount];
for (int i = 0; i < varCount; i++) {
prefixSequence[i] = applicablePrefixes[0];
}
final List<PsiElement> finalElements =
compileByPrefixes(project, options, pattern, context, applicablePrefixes, patterns, prefixSequence, 0);
return finalElements != null
? finalElements
: doCompile(project, options, pattern, new ConstantPrefixProvider(applicablePrefixes[0]), context);
}
@Nullable
private static List<PsiElement> compileByPrefixes(Project project,
MatchOptions options,
CompiledPattern pattern,
CompileContext context,
String[] applicablePrefixes,
Pattern[] substitutionPatterns,
String[] prefixSequence,
int index) throws MalformedPatternException {
if (index >= prefixSequence.length) {
final List<PsiElement> elements = doCompile(project, options, pattern, new ArrayPrefixProvider(prefixSequence), context);
if (elements.isEmpty()) {
return elements;
}
final PsiElement parent = elements.get(0).getParent();
final PsiElement last = elements.get(elements.size() - 1);
final int[] varEndOffsets = findAllTypedVarOffsets(parent.getContainingFile(), substitutionPatterns);
final int patternEndOffset = last.getTextRange().getEndOffset();
return checkErrorElements(parent, patternEndOffset, patternEndOffset, varEndOffsets, false) != Boolean.TRUE
? elements
: null;
}
String[] alternativeVariant = null;
for (String applicablePrefix : applicablePrefixes) {
prefixSequence[index] = applicablePrefix;
List<PsiElement> elements = doCompile(project, options, pattern, new ArrayPrefixProvider(prefixSequence), context);
if (elements.isEmpty()) {
return elements;
}
final PsiFile file = elements.get(0).getContainingFile();
if (file == null) {
return elements;
}
final int[] varEndOffsets = findAllTypedVarOffsets(file, substitutionPatterns);
final int offset = varEndOffsets[index];
final int patternEndOffset = elements.get(elements.size() - 1).getTextRange().getEndOffset();
final Boolean result = checkErrorElements(file, offset, patternEndOffset, varEndOffsets, false);
if (result == Boolean.TRUE) {
continue;
}
if (result == Boolean.FALSE || (result == null && alternativeVariant == null)) {
final List<PsiElement> finalElements =
compileByPrefixes(project, options, pattern, context, applicablePrefixes, substitutionPatterns, prefixSequence, index + 1);
if (finalElements != null) {
if (result == Boolean.FALSE) {
return finalElements;
}
alternativeVariant = new String[prefixSequence.length];
System.arraycopy(prefixSequence, 0, alternativeVariant, 0, prefixSequence.length);
}
}
}
return alternativeVariant != null ?
compileByPrefixes(project, options, pattern, context, applicablePrefixes, substitutionPatterns, alternativeVariant, index + 1) :
null;
}
@NotNull
private static int[] findAllTypedVarOffsets(final PsiFile file, final Pattern[] substitutionPatterns) {
final TIntHashSet result = new TIntHashSet();
file.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitElement(PsiElement element) {
super.visitElement(element);
if (element instanceof LeafElement) {
final String text = element.getText();
for (Pattern pattern : substitutionPatterns) {
final Matcher matcher = pattern.matcher(text);
while (matcher.find()) {
result.add(element.getTextRange().getStartOffset() + matcher.end());
}
}
}
}
});
final int[] resultArray = result.toArray();
Arrays.sort(resultArray);
return resultArray;
}
/**
* False: there are no error elements before offset, except patternEndOffset
* Null: there are only error elements located exactly after template variables or at the end of the pattern
* True: otherwise
*/
@Nullable
private static Boolean checkErrorElements(PsiElement element,
final int offset,
final int patternEndOffset,
final int[] varEndOffsets,
final boolean strict) {
final TIntArrayList errorOffsets = new TIntArrayList();
final boolean[] containsErrorTail = {false};
final TIntHashSet varEndOffsetsSet = new TIntHashSet(varEndOffsets);
element.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitErrorElement(PsiErrorElement element) {
super.visitErrorElement(element);
final int startOffset = element.getTextRange().getStartOffset();
if ((strict || !varEndOffsetsSet.contains(startOffset)) && startOffset != patternEndOffset) {
errorOffsets.add(startOffset);
}
if (startOffset == offset) {
containsErrorTail[0] = true;
}
}
});
for (int i = 0; i < errorOffsets.size(); i++) {
final int errorOffset = errorOffsets.get(i);
if (errorOffset <= offset) {
return true;
}
}
return containsErrorTail[0] ? null : false;
}
private interface PrefixProvider {
String getPrefix(int varIndex);
}
private static class ConstantPrefixProvider implements PrefixProvider {
private final String myPrefix;
ConstantPrefixProvider(String prefix) {
myPrefix = prefix;
}
@Override
public String getPrefix(int varIndex) {
return myPrefix;
}
}
private static class ArrayPrefixProvider implements PrefixProvider {
private final String[] myPrefixes;
ArrayPrefixProvider(String[] prefixes) {
myPrefixes = prefixes;
}
@Override
public String getPrefix(int varIndex) {
if (varIndex >= myPrefixes.length) return null;
return myPrefixes[varIndex];
}
}
private static List<PsiElement> doCompile(Project project,
MatchOptions options,
CompiledPattern result,
PrefixProvider prefixProvider,
CompileContext context) throws MalformedPatternException {
result.clearHandlers();
final StringBuilder buf = new StringBuilder();
Template template = TemplateManager.getInstance(project).createTemplate("","",options.getSearchPattern());
int segmentsCount = template.getSegmentsCount();
String text = template.getTemplateText();
int prevOffset = 0;
for(int i=0;i<segmentsCount;++i) {
final int offset = template.getSegmentOffset(i);
final String name = template.getSegmentName(i);
final String prefix = prefixProvider.getPrefix(i);
if (prefix == null) {
throw new MalformedPatternException();
}
buf.append(text.substring(prevOffset,offset));
buf.append(prefix);
buf.append(name);
MatchVariableConstraint constraint = options.getVariableConstraint(name);
if (constraint==null) {
// we do not edited the constraints
constraint = new MatchVariableConstraint();
constraint.setName( name );
options.addVariableConstraint(constraint);
}
SubstitutionHandler handler = result.createSubstitutionHandler(
name,
prefix + name,
constraint.isPartOfSearchResults(),
constraint.getMinCount(),
constraint.getMaxCount(),
constraint.isGreedy()
);
if(constraint.isWithinHierarchy()) {
handler.setSubtype(true);
}
if(constraint.isStrictlyWithinHierarchy()) {
handler.setStrictSubtype(true);
}
MatchPredicate predicate;
if (!StringUtil.isEmptyOrSpaces(constraint.getRegExp())) {
predicate = new RegExpPredicate(
constraint.getRegExp(),
options.isCaseSensitiveMatch(),
name,
constraint.isWholeWordsOnly(),
constraint.isPartOfSearchResults()
);
if (constraint.isInvertRegExp()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
if (constraint.isReference()) {
predicate = new ReferencePredicate( constraint.getNameOfReferenceVar() );
if (constraint.isInvertReference()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
addExtensionPredicates(options, constraint, handler);
addScriptConstraint(project, name, constraint, handler);
if (!StringUtil.isEmptyOrSpaces(constraint.getContainsConstraint())) {
predicate = new ContainsPredicate(name, constraint.getContainsConstraint());
if (constraint.isInvertContainsConstraint()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
if (!StringUtil.isEmptyOrSpaces(constraint.getWithinConstraint())) {
assert false;
}
prevOffset = offset;
}
MatchVariableConstraint constraint = options.getVariableConstraint(Configuration.CONTEXT_VAR_NAME);
if (constraint != null) {
SubstitutionHandler handler = result.createSubstitutionHandler(
Configuration.CONTEXT_VAR_NAME,
Configuration.CONTEXT_VAR_NAME,
constraint.isPartOfSearchResults(),
constraint.getMinCount(),
constraint.getMaxCount(),
constraint.isGreedy()
);
if (!StringUtil.isEmptyOrSpaces(constraint.getWithinConstraint())) {
MatchPredicate predicate = new WithinPredicate(constraint.getWithinConstraint(), options.getFileType(), project);
if (constraint.isInvertWithinConstraint()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
addExtensionPredicates(options, constraint, handler);
addScriptConstraint(project, Configuration.CONTEXT_VAR_NAME, constraint, handler);
}
buf.append(text.substring(prevOffset,text.length()));
PsiElement[] matchStatements;
try {
matchStatements = MatcherImplUtil.createTreeFromText(buf.toString(), PatternTreeContext.Block, options.getFileType(),
options.getDialect(), options.getPatternContext(), project, false);
if (matchStatements.length==0) throw new MalformedPatternException();
} catch (IncorrectOperationException e) {
throw new MalformedPatternException(e.getMessage());
}
NodeFilter filter = LexicalNodesFilter.getInstance();
GlobalCompilingVisitor compilingVisitor = new GlobalCompilingVisitor();
compilingVisitor.compile(matchStatements,context);
List<PsiElement> elements = new SmartList<>();
for (PsiElement matchStatement : matchStatements) {
if (!filter.accepts(matchStatement)) {
elements.add(matchStatement);
}
}
new DeleteNodesAction(compilingVisitor.getLexicalNodes()).run();
return elements;
}
private static void addExtensionPredicates(MatchOptions options, MatchVariableConstraint constraint, SubstitutionHandler handler) {
Set<MatchPredicate> predicates = new LinkedHashSet<>();
for (MatchPredicateProvider matchPredicateProvider : Extensions.getExtensions(MatchPredicateProvider.EP_NAME)) {
matchPredicateProvider.collectPredicates(constraint, handler.getName(), options, predicates);
}
for (MatchPredicate matchPredicate : predicates) {
addPredicate(handler, matchPredicate);
}
}
private static void addScriptConstraint(Project project, String name, MatchVariableConstraint constraint, SubstitutionHandler handler)
throws MalformedPatternException {
if (constraint.getScriptCodeConstraint()!= null && constraint.getScriptCodeConstraint().length() > 2) {
final String script = StringUtil.unquoteString(constraint.getScriptCodeConstraint());
final String problem = ScriptSupport.checkValidScript(script);
if (problem != null) {
throw new MalformedPatternException("Script constraint for " + constraint.getName() + " has problem " + problem);
}
addPredicate(handler, new ScriptPredicate(project, name, script));
}
}
private static void addPredicate(SubstitutionHandler handler, MatchPredicate predicate) {
if (handler.getPredicate()==null) {
handler.setPredicate(predicate);
} else {
handler.setPredicate(new AndPredicate(handler.getPredicate(), predicate));
}
}
} | apixandru/intellij-community | platform/structuralsearch/source/com/intellij/structuralsearch/impl/matcher/compiler/PatternCompiler.java | Java | apache-2.0 | 20,077 |
package examples.model;
import java.util.ArrayList;
import java.util.Collection;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.OneToMany;
@Entity
public class Department {
@Id
private int id;
private String name;
@OneToMany(mappedBy="department")
private Collection<Employee> employees;
public Department() {
employees = new ArrayList<Employee>();
}
public int getId() {
return id;
}
public String getName() {
return name;
}
public Collection<Employee> getEmployees() {
return employees;
}
public String toString() {
return "Department no: " + getId() +
", name: " + getName();
}
}
| velmuruganvelayutham/jpa | examples/Chapter7/02-namedQueryExample/src/model/examples/model/Department.java | Java | apache-2.0 | 759 |
package water.jdbc;
import org.junit.Test;
import org.junit.runner.RunWith;
import water.Key;
import water.Keyed;
import water.fvec.Frame;
import water.runner.CloudSize;
import water.runner.H2ORunner;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.junit.Assert.*;
@RunWith(H2ORunner.class)
@CloudSize(1)
public class SQLManagerKeyOverwiteTest {
@Test public void nextKeyHasRightPrefixAndPostfix() {
final String prefix = "foo";
final String postfix = "bar";
final Key<Frame> key = SQLManager.nextTableKey(prefix, postfix);
assertTrue(key.toString().startsWith(prefix));
assertTrue(key.toString().endsWith(postfix));
}
@Test public void nextKeyKeyHasNoWhitechars() {
final Key<Frame> key = SQLManager.nextTableKey("f o o ", "b a r");
assertFalse(key.toString().contains("\\W"));
}
@Test public void makeRandomKeyCreatesUniqueKeys() {
final int count = 1000;
final long actualCount = IntStream.range(0, count)
.boxed()
.parallel()
.map(i -> SQLManager.nextTableKey("foo", "bar"))
.map(Key::toString)
.count();
assertEquals(count, actualCount);
}
}
| michalkurka/h2o-3 | h2o-core/src/test/java/water/jdbc/SQLManagerKeyOverwiteTest.java | Java | apache-2.0 | 1,246 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.impl.statistics;
import com.intellij.execution.Executor;
import com.intellij.execution.configurations.ConfigurationFactory;
import com.intellij.execution.configurations.ConfigurationType;
import com.intellij.execution.configurations.RunConfiguration;
import com.intellij.execution.executors.ExecutorGroup;
import com.intellij.execution.target.TargetEnvironmentAwareRunProfile;
import com.intellij.execution.target.TargetEnvironmentConfiguration;
import com.intellij.execution.target.TargetEnvironmentType;
import com.intellij.execution.target.TargetEnvironmentsManager;
import com.intellij.internal.statistic.IdeActivityDefinition;
import com.intellij.internal.statistic.StructuredIdeActivity;
import com.intellij.internal.statistic.eventLog.EventLogGroup;
import com.intellij.internal.statistic.eventLog.events.*;
import com.intellij.internal.statistic.eventLog.validator.ValidationResultType;
import com.intellij.internal.statistic.eventLog.validator.rules.EventContext;
import com.intellij.internal.statistic.eventLog.validator.rules.impl.CustomValidationRule;
import com.intellij.internal.statistic.service.fus.collectors.CounterUsagesCollector;
import com.intellij.internal.statistic.utils.PluginInfo;
import com.intellij.internal.statistic.utils.PluginInfoDetectorKt;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.concurrency.NonUrgentExecutor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
import static com.intellij.execution.impl.statistics.RunConfigurationTypeUsagesCollector.createFeatureUsageData;
public final class RunConfigurationUsageTriggerCollector extends CounterUsagesCollector {
public static final String GROUP_NAME = "run.configuration.exec";
private static final EventLogGroup GROUP = new EventLogGroup(GROUP_NAME, 62);
private static final ObjectEventField ADDITIONAL_FIELD = EventFields.createAdditionalDataField(GROUP_NAME, "started");
private static final StringEventField EXECUTOR = EventFields.StringValidatedByCustomRule("executor", "run_config_executor");
private static final StringEventField TARGET =
EventFields.StringValidatedByCustomRule("target", RunConfigurationUsageTriggerCollector.RunTargetValidator.RULE_ID);
private static final EnumEventField<RunConfigurationFinishType> FINISH_TYPE =
EventFields.Enum("finish_type", RunConfigurationFinishType.class);
private static final IdeActivityDefinition ACTIVITY_GROUP = GROUP.registerIdeActivity(null,
new EventField<?>[]{ADDITIONAL_FIELD, EXECUTOR,
TARGET,
RunConfigurationTypeUsagesCollector.FACTORY_FIELD,
RunConfigurationTypeUsagesCollector.ID_FIELD,
EventFields.PluginInfo},
new EventField<?>[]{FINISH_TYPE});
public static final VarargEventId UI_SHOWN_STAGE = ACTIVITY_GROUP.registerStage("ui.shown");
@Override
public EventLogGroup getGroup() {
return GROUP;
}
@NotNull
public static StructuredIdeActivity trigger(@NotNull Project project,
@NotNull ConfigurationFactory factory,
@NotNull Executor executor,
@Nullable RunConfiguration runConfiguration) {
return ACTIVITY_GROUP
.startedAsync(project, () -> ReadAction.nonBlocking(() -> buildContext(project, factory, executor, runConfiguration))
.expireWith(project)
.submit(NonUrgentExecutor.getInstance()));
}
private static @NotNull List<EventPair<?>> buildContext(@NotNull Project project,
@NotNull ConfigurationFactory factory,
@NotNull Executor executor,
@Nullable RunConfiguration runConfiguration) {
final ConfigurationType configurationType = factory.getType();
List<EventPair<?>> eventPairs = createFeatureUsageData(configurationType, factory);
ExecutorGroup<?> group = ExecutorGroup.getGroupIfProxy(executor);
eventPairs.add(EXECUTOR.with(group != null ? group.getId() : executor.getId()));
if (runConfiguration instanceof FusAwareRunConfiguration) {
List<EventPair<?>> additionalData = ((FusAwareRunConfiguration)runConfiguration).getAdditionalUsageData();
ObjectEventData objectEventData = new ObjectEventData(additionalData);
eventPairs.add(ADDITIONAL_FIELD.with(objectEventData));
}
if (runConfiguration instanceof TargetEnvironmentAwareRunProfile) {
String defaultTargetName = ((TargetEnvironmentAwareRunProfile)runConfiguration).getDefaultTargetName();
if (defaultTargetName != null) {
TargetEnvironmentConfiguration target = TargetEnvironmentsManager.getInstance(project).getTargets().findByName(defaultTargetName);
if (target != null) {
eventPairs.add(TARGET.with(target.getTypeId()));
}
}
}
return eventPairs;
}
public static void logProcessFinished(@Nullable StructuredIdeActivity activity,
RunConfigurationFinishType finishType) {
if (activity != null) {
activity.finished(() -> Collections.singletonList(FINISH_TYPE.with(finishType)));
}
}
public static class RunConfigurationExecutorUtilValidator extends CustomValidationRule {
@Override
public boolean acceptRuleId(@Nullable String ruleId) {
return "run_config_executor".equals(ruleId);
}
@NotNull
@Override
protected ValidationResultType doValidate(@NotNull String data, @NotNull EventContext context) {
for (Executor executor : Executor.EXECUTOR_EXTENSION_NAME.getExtensions()) {
if (StringUtil.equals(executor.getId(), data)) {
final PluginInfo info = PluginInfoDetectorKt.getPluginInfo(executor.getClass());
return info.isSafeToReport() ? ValidationResultType.ACCEPTED : ValidationResultType.THIRD_PARTY;
}
}
return ValidationResultType.REJECTED;
}
}
public static class RunTargetValidator extends CustomValidationRule {
public static final String RULE_ID = "run_target";
@Override
public boolean acceptRuleId(@Nullable String ruleId) {
return RULE_ID.equals(ruleId);
}
@NotNull
@Override
protected ValidationResultType doValidate(@NotNull String data, @NotNull EventContext context) {
for (TargetEnvironmentType<?> type : TargetEnvironmentType.EXTENSION_NAME.getExtensions()) {
if (StringUtil.equals(type.getId(), data)) {
final PluginInfo info = PluginInfoDetectorKt.getPluginInfo(type.getClass());
return info.isSafeToReport() ? ValidationResultType.ACCEPTED : ValidationResultType.THIRD_PARTY;
}
}
return ValidationResultType.REJECTED;
}
}
public enum RunConfigurationFinishType {FAILED_TO_START, UNKNOWN}
}
| GunoH/intellij-community | platform/execution-impl/src/com/intellij/execution/impl/statistics/RunConfigurationUsageTriggerCollector.java | Java | apache-2.0 | 7,712 |
/**
* Copyright (c) 2010 RedEngine Ltd, http://www.redengine.co.nz. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package net.stickycode.deploy.sample.helloworld;
public class HelloWorld implements Runnable {
public void hello() {
System.out.println("Hello World!");
}
@Override
public void run() {
System.out.println("Hello Embedded World!");
try {
Thread.sleep(5000);
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
| walterDurin/stickycode | net.stickycode.deploy.samples/sticky-deploy-sample-helloworld/src/main/java/net/stickycode/deploy/sample/helloworld/HelloWorld.java | Java | apache-2.0 | 1,097 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.parser.rule;
import lombok.Getter;
import org.apache.shardingsphere.infra.rule.identifier.scope.GlobalRule;
import org.apache.shardingsphere.parser.config.SQLParserRuleConfiguration;
import org.apache.shardingsphere.sql.parser.api.CacheOption;
/**
* SQL parser rule.
*/
@Getter
public final class SQLParserRule implements GlobalRule {
private final boolean sqlCommentParseEnabled;
private final CacheOption sqlStatementCache;
private final CacheOption parseTreeCache;
public SQLParserRule(final SQLParserRuleConfiguration ruleConfig) {
sqlCommentParseEnabled = ruleConfig.isSqlCommentParseEnabled();
sqlStatementCache = ruleConfig.getSqlStatementCache();
parseTreeCache = ruleConfig.getParseTreeCache();
}
@Override
public String getType() {
return SQLParserRule.class.getSimpleName();
}
}
| apache/incubator-shardingsphere | shardingsphere-kernel/shardingsphere-parser/shardingsphere-parser-core/src/main/java/org/apache/shardingsphere/parser/rule/SQLParserRule.java | Java | apache-2.0 | 1,720 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.runtime.operators.sort;
import java.nio.ByteBuffer;
import java.util.List;
import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputPushRuntime;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputRuntimeFactory;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.INormalizedKeyComputer;
import org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.resources.IDeallocatable;
import org.apache.hyracks.api.util.CleanupUtils;
import org.apache.hyracks.dataflow.common.io.GeneratedRunFileReader;
import org.apache.hyracks.dataflow.std.buffermanager.EnumFreeSlotPolicy;
import org.apache.hyracks.dataflow.std.sort.Algorithm;
import org.apache.hyracks.dataflow.std.sort.ExternalSortRunGenerator;
import org.apache.hyracks.dataflow.std.sort.ExternalSortRunMerger;
public class MicroSortRuntimeFactory extends AbstractOneInputOneOutputRuntimeFactory {
private static final long serialVersionUID = 1L;
private final int framesLimit;
private final int[] sortFields;
private final INormalizedKeyComputerFactory[] keyNormalizerFactories;
private final IBinaryComparatorFactory[] comparatorFactories;
public MicroSortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory firstKeyNormalizerFactory,
IBinaryComparatorFactory[] comparatorFactories, int[] projectionList, int framesLimit) {
this(sortFields, firstKeyNormalizerFactory != null
? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory } : null, comparatorFactories,
projectionList, framesLimit);
}
public MicroSortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory[] keyNormalizerFactories,
IBinaryComparatorFactory[] comparatorFactories, int[] projectionList, int framesLimit) {
super(projectionList);
// Obs: the projection list is currently ignored.
if (projectionList != null) {
throw new NotImplementedException("Cannot push projection into InMemorySortRuntime.");
}
this.sortFields = sortFields;
this.keyNormalizerFactories = keyNormalizerFactories;
this.comparatorFactories = comparatorFactories;
this.framesLimit = framesLimit;
}
@Override
public AbstractOneInputOneOutputPushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx)
throws HyracksDataException {
InMemorySortPushRuntime pushRuntime = new InMemorySortPushRuntime(ctx);
ctx.registerDeallocatable(pushRuntime);
return pushRuntime;
}
private class InMemorySortPushRuntime extends AbstractOneInputOneOutputPushRuntime implements IDeallocatable {
final IHyracksTaskContext ctx;
ExternalSortRunGenerator runsGenerator = null;
ExternalSortRunMerger runsMerger = null;
IFrameWriter wrappingWriter = null;
private InMemorySortPushRuntime(IHyracksTaskContext ctx) {
this.ctx = ctx;
}
@Override
public void open() throws HyracksDataException {
if (runsGenerator == null) {
runsGenerator = new ExternalSortRunGenerator(ctx, sortFields, keyNormalizerFactories,
comparatorFactories, outputRecordDesc, Algorithm.MERGE_SORT, EnumFreeSlotPolicy.LAST_FIT,
framesLimit, Integer.MAX_VALUE);
}
// next writer will be opened later when preparing the merger
isOpen = true;
runsGenerator.open();
runsGenerator.getSorter().reset();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
runsGenerator.nextFrame(buffer);
}
@Override
public void close() throws HyracksDataException {
Throwable failure = null;
if (isOpen) {
try {
if (!failed) {
runsGenerator.close();
createOrResetRunsMerger();
if (runsGenerator.getRuns().isEmpty()) {
wrappingWriter = runsMerger.prepareSkipMergingFinalResultWriter(writer);
wrappingWriter.open();
if (runsGenerator.getSorter().hasRemaining()) {
runsGenerator.getSorter().flush(wrappingWriter);
}
} else {
wrappingWriter = runsMerger.prepareFinalMergeResultWriter(writer);
wrappingWriter.open();
runsMerger.process(wrappingWriter);
}
}
} catch (Throwable th) {
failure = th;
fail(th);
} finally {
failure = CleanupUtils.close(wrappingWriter, failure);
wrappingWriter = null;
}
}
isOpen = false;
if (failure != null) {
throw HyracksDataException.create(failure);
}
}
@Override
public void fail() throws HyracksDataException {
failed = true;
// clean up the runs if some have been generated. double close should be idempotent.
if (runsGenerator != null) {
List<GeneratedRunFileReader> runs = runsGenerator.getRuns();
for (int i = 0, size = runs.size(); i < size; i++) {
try {
runs.get(i).close();
} catch (Throwable th) {
// ignore
}
}
}
if (wrappingWriter != null) {
wrappingWriter.fail();
}
}
@Override
public void deallocate() {
if (runsGenerator != null) {
try {
runsGenerator.getSorter().close();
} catch (Exception e) {
// ignore
}
}
}
private void createOrResetRunsMerger() {
if (runsMerger == null) {
IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
INormalizedKeyComputer nmkComputer =
keyNormalizerFactories == null ? null : keyNormalizerFactories[0].createNormalizedKeyComputer();
runsMerger = new ExternalSortRunMerger(ctx, runsGenerator.getRuns(), sortFields, comparators,
nmkComputer, outputRecordDesc, framesLimit, Integer.MAX_VALUE);
} else {
runsMerger.reset(runsGenerator.getRuns());
}
}
}
}
| apache/incubator-asterixdb | hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/MicroSortRuntimeFactory.java | Java | apache-2.0 | 8,262 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.solr.cloud.ZkController;
import org.apache.solr.cloud.ZkSolrResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.handler.admin.CollectionsHandler;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.admin.InfoHandler;
import org.apache.solr.handler.component.ShardHandlerFactory;
import org.apache.solr.logging.LogWatcher;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.update.UpdateShardHandler;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.FileUtils;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static com.google.common.base.Preconditions.checkNotNull;
/**
*
* @since solr 1.3
*/
public class CoreContainer {
protected static final Logger log = LoggerFactory.getLogger(CoreContainer.class);
final SolrCores solrCores = new SolrCores(this);
public static class CoreLoadFailure {
public final CoreDescriptor cd;
public final Exception exception;
public CoreLoadFailure(CoreDescriptor cd, Exception loadFailure) {
this.cd = cd;
this.exception = loadFailure;
}
}
protected final Map<String, CoreLoadFailure> coreInitFailures = new ConcurrentHashMap<>();
protected CoreAdminHandler coreAdminHandler = null;
protected CollectionsHandler collectionsHandler = null;
private InfoHandler infoHandler;
protected Properties containerProperties;
private ConfigSetService coreConfigService;
protected ZkContainer zkSys = new ZkContainer();
protected ShardHandlerFactory shardHandlerFactory;
private UpdateShardHandler updateShardHandler;
protected LogWatcher logging = null;
private CloserThread backgroundCloser = null;
protected final ConfigSolr cfg;
protected final SolrResourceLoader loader;
protected final String solrHome;
protected final CoresLocator coresLocator;
private String hostName;
private final JarRepository jarRepository = new JarRepository(this);
public static final String CORES_HANDLER_PATH = "/admin/cores";
public static final String COLLECTIONS_HANDLER_PATH = "/admin/collections";
public static final String INFO_HANDLER_PATH = "/admin/info";
private Map<String, SolrRequestHandler> containerHandlers = new HashMap<>();
public SolrRequestHandler getRequestHandler(String path) {
return RequestHandlerBase.getRequestHandler(path, containerHandlers);
}
public Map<String, SolrRequestHandler> getRequestHandlers(){
return this.containerHandlers;
}
// private ClientConnectionManager clientConnectionManager = new PoolingClientConnectionManager();
{
log.info("New CoreContainer " + System.identityHashCode(this));
}
/**
* Create a new CoreContainer using system properties to detect the solr home
* directory. The container's cores are not loaded.
* @see #load()
*/
public CoreContainer() {
this(new SolrResourceLoader(SolrResourceLoader.locateSolrHome()));
}
/**
* Create a new CoreContainer using the given SolrResourceLoader. The container's
* cores are not loaded.
* @param loader the SolrResourceLoader
* @see #load()
*/
public CoreContainer(SolrResourceLoader loader) {
this(ConfigSolr.fromSolrHome(loader, loader.getInstanceDir()));
}
/**
* Create a new CoreContainer using the given solr home directory. The container's
* cores are not loaded.
* @param solrHome a String containing the path to the solr home directory
* @see #load()
*/
public CoreContainer(String solrHome) {
this(new SolrResourceLoader(solrHome));
}
/**
* Create a new CoreContainer using the given SolrResourceLoader,
* configuration and CoresLocator. The container's cores are
* not loaded.
* @param config a ConfigSolr representation of this container's configuration
* @see #load()
*/
public CoreContainer(ConfigSolr config) {
this(config, config.getCoresLocator());
}
public CoreContainer(ConfigSolr config, CoresLocator locator) {
this.loader = config.getSolrResourceLoader();
this.solrHome = loader.getInstanceDir();
this.cfg = checkNotNull(config);
this.coresLocator = locator;
}
/**
* This method allows subclasses to construct a CoreContainer
* without any default init behavior.
*
* @param testConstructor pass (Object)null.
* @lucene.experimental
*/
protected CoreContainer(Object testConstructor) {
solrHome = null;
loader = null;
coresLocator = null;
cfg = null;
}
/**
* Create a new CoreContainer and load its cores
* @param solrHome the solr home directory
* @param configFile the file containing this container's configuration
* @return a loaded CoreContainer
*/
public static CoreContainer createAndLoad(String solrHome, File configFile) {
SolrResourceLoader loader = new SolrResourceLoader(solrHome);
CoreContainer cc = new CoreContainer(ConfigSolr.fromFile(loader, configFile));
try {
cc.load();
} catch (Exception e) {
cc.shutdown();
throw e;
}
return cc;
}
public Properties getContainerProperties() {
return containerProperties;
}
//-------------------------------------------------------------------
// Initialization / Cleanup
//-------------------------------------------------------------------
/**
* Load the cores defined for this CoreContainer
*/
public void load() {
log.info("Loading cores into CoreContainer [instanceDir={}]", loader.getInstanceDir());
// add the sharedLib to the shared resource loader before initializing cfg based plugins
String libDir = cfg.getSharedLibDirectory();
if (libDir != null) {
File f = FileUtils.resolvePath(new File(solrHome), libDir);
log.info("loading shared library: " + f.getAbsolutePath());
loader.addToClassLoader(libDir, null, false);
loader.reloadLuceneSPI();
}
shardHandlerFactory = ShardHandlerFactory.newInstance(cfg.getShardHandlerFactoryPluginInfo(), loader);
updateShardHandler = new UpdateShardHandler(cfg);
solrCores.allocateLazyCores(cfg.getTransientCacheSize(), loader);
logging = LogWatcher.newRegisteredLogWatcher(cfg.getLogWatcherConfig(), loader);
hostName = cfg.getHost();
log.info("Host Name: " + hostName);
zkSys.initZooKeeper(this, solrHome, cfg);
collectionsHandler = createHandler(cfg.getCollectionsHandlerClass(), CollectionsHandler.class);
containerHandlers.put(COLLECTIONS_HANDLER_PATH, collectionsHandler);
infoHandler = createHandler(cfg.getInfoHandlerClass(), InfoHandler.class);
containerHandlers.put(INFO_HANDLER_PATH, infoHandler);
coreAdminHandler = createHandler(cfg.getCoreAdminHandlerClass(), CoreAdminHandler.class);
containerHandlers.put(CORES_HANDLER_PATH, coreAdminHandler);
coreConfigService = cfg.createCoreConfigService(loader, zkSys.getZkController());
containerProperties = cfg.getSolrProperties();
// setup executor to load cores in parallel
// do not limit the size of the executor in zk mode since cores may try and wait for each other.
ExecutorService coreLoadExecutor = Executors.newFixedThreadPool(
( zkSys.getZkController() == null ? cfg.getCoreLoadThreadCount() : Integer.MAX_VALUE ),
new DefaultSolrThreadFactory("coreLoadExecutor") );
try {
List<CoreDescriptor> cds = coresLocator.discover(this);
checkForDuplicateCoreNames(cds);
List<Callable<SolrCore>> creators = new ArrayList<>();
for (final CoreDescriptor cd : cds) {
if (cd.isTransient() || !cd.isLoadOnStartup()) {
solrCores.putDynamicDescriptor(cd.getName(), cd);
}
if (cd.isLoadOnStartup()) {
creators.add(new Callable<SolrCore>() {
@Override
public SolrCore call() throws Exception {
if (zkSys.getZkController() != null) {
zkSys.getZkController().throwErrorIfReplicaReplaced(cd);
}
return create(cd, false);
}
});
}
}
try {
coreLoadExecutor.invokeAll(creators);
}
catch (InterruptedException e) {
throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "Interrupted while loading cores");
}
// Start the background thread
backgroundCloser = new CloserThread(this, solrCores, cfg);
backgroundCloser.start();
} finally {
ExecutorUtil.shutdownNowAndAwaitTermination(coreLoadExecutor);
}
if (isZooKeeperAware()) {
// register in zk in background threads
Collection<SolrCore> cores = getCores();
if (cores != null) {
for (SolrCore core : cores) {
try {
zkSys.registerInZk(core, true);
} catch (Throwable t) {
SolrException.log(log, "Error registering SolrCore", t);
}
}
}
zkSys.getZkController().checkOverseerDesignate();
}
}
private static void checkForDuplicateCoreNames(List<CoreDescriptor> cds) {
Map<String, String> addedCores = Maps.newHashMap();
for (CoreDescriptor cd : cds) {
final String name = cd.getName();
if (addedCores.containsKey(name))
throw new SolrException(ErrorCode.SERVER_ERROR,
String.format(Locale.ROOT, "Found multiple cores with the name [%s], with instancedirs [%s] and [%s]",
name, addedCores.get(name), cd.getInstanceDir()));
addedCores.put(name, cd.getInstanceDir());
}
}
private volatile boolean isShutDown = false;
public boolean isShutDown() {
return isShutDown;
}
/**
* Stops all cores.
*/
public void shutdown() {
log.info("Shutting down CoreContainer instance="
+ System.identityHashCode(this));
isShutDown = true;
if (isZooKeeperAware()) {
cancelCoreRecoveries();
zkSys.publishCoresAsDown(solrCores.getCores());
}
try {
if (coreAdminHandler != null) coreAdminHandler.shutdown();
} catch (Exception e) {
log.warn("Error shutting down CoreAdminHandler. Continuing to close CoreContainer.", e);
}
try {
// First wake up the closer thread, it'll terminate almost immediately since it checks isShutDown.
synchronized (solrCores.getModifyLock()) {
solrCores.getModifyLock().notifyAll(); // wake up anyone waiting
}
if (backgroundCloser != null) { // Doesn't seem right, but tests get in here without initializing the core.
try {
backgroundCloser.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (log.isDebugEnabled()) {
log.debug("backgroundCloser thread was interrupted before finishing");
}
}
}
// Now clear all the cores that are being operated upon.
solrCores.close();
// It's still possible that one of the pending dynamic load operation is waiting, so wake it up if so.
// Since all the pending operations queues have been drained, there should be nothing to do.
synchronized (solrCores.getModifyLock()) {
solrCores.getModifyLock().notifyAll(); // wake up the thread
}
} finally {
try {
if (shardHandlerFactory != null) {
shardHandlerFactory.close();
}
} finally {
try {
if (updateShardHandler != null) {
updateShardHandler.close();
}
} finally {
// we want to close zk stuff last
zkSys.close();
}
}
}
org.apache.lucene.util.IOUtils.closeWhileHandlingException(loader); // best effort
}
public void cancelCoreRecoveries() {
List<SolrCore> cores = solrCores.getCores();
// we must cancel without holding the cores sync
// make sure we wait for any recoveries to stop
for (SolrCore core : cores) {
try {
core.getSolrCoreState().cancelRecovery();
} catch (Exception e) {
SolrException.log(log, "Error canceling recovery for core", e);
}
}
}
@Override
protected void finalize() throws Throwable {
try {
if(!isShutDown){
log.error("CoreContainer was not close prior to finalize(), indicates a bug -- POSSIBLE RESOURCE LEAK!!! instance=" + System.identityHashCode(this));
}
} finally {
super.finalize();
}
}
public CoresLocator getCoresLocator() {
return coresLocator;
}
protected SolrCore registerCore(String name, SolrCore core, boolean registerInZk) {
if( core == null ) {
throw new RuntimeException( "Can not register a null core." );
}
if( name == null ||
name.indexOf( '/' ) >= 0 ||
name.indexOf( '\\' ) >= 0 ){
throw new RuntimeException( "Invalid core name: "+name );
}
// We can register a core when creating them via the admin UI, so we need to insure that the dynamic descriptors
// are up to date
CoreDescriptor cd = core.getCoreDescriptor();
if ((cd.isTransient() || ! cd.isLoadOnStartup())
&& solrCores.getDynamicDescriptor(name) == null) {
// Store it away for later use. includes non-transient but not
// loaded at startup cores.
solrCores.putDynamicDescriptor(name, cd);
}
SolrCore old = null;
if (isShutDown) {
core.close();
throw new IllegalStateException("This CoreContainer has been close");
}
if (cd.isTransient()) {
old = solrCores.putTransientCore(cfg, name, core, loader);
} else {
old = solrCores.putCore(name, core);
}
/*
* set both the name of the descriptor and the name of the
* core, since the descriptors name is used for persisting.
*/
core.setName(name);
coreInitFailures.remove(name);
if( old == null || old == core) {
log.info( "registering core: "+name );
if (registerInZk) {
zkSys.registerInZk(core, false);
}
return null;
}
else {
log.info( "replacing core: "+name );
old.close();
if (registerInZk) {
zkSys.registerInZk(core, false);
}
return old;
}
}
/**
* Creates a new core based on a CoreDescriptor, publishing the core state to the cluster
* @param cd the CoreDescriptor
* @return the newly created core
*/
public SolrCore create(CoreDescriptor cd) {
return create(cd, true);
}
/**
* Creates a new core based on a CoreDescriptor.
*
* @param dcore a core descriptor
* @param publishState publish core state to the cluster if true
*
* @return the newly created core
*/
public SolrCore create(CoreDescriptor dcore, boolean publishState) {
if (isShutDown) {
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Solr has close.");
}
try {
if (zkSys.getZkController() != null) {
zkSys.getZkController().preRegister(dcore);
}
ConfigSet coreConfig = coreConfigService.getConfig(dcore);
log.info("Creating SolrCore '{}' using configuration from {}", dcore.getName(), coreConfig.getName());
SolrCore core = new SolrCore(dcore, coreConfig);
solrCores.addCreated(core);
// always kick off recovery if we are in non-Cloud mode
if (!isZooKeeperAware() && core.getUpdateHandler().getUpdateLog() != null) {
core.getUpdateHandler().getUpdateLog().recoverFromLog();
}
registerCore(dcore.getName(), core, publishState);
return core;
} catch (Exception e) {
coreInitFailures.put(dcore.getName(), new CoreLoadFailure(dcore, e));
log.error("Error creating core [{}]: {}", dcore.getName(), e.getMessage(), e);
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to create core [" + dcore.getName() + "]", e);
} catch (Throwable t) {
SolrException e = new SolrException(ErrorCode.SERVER_ERROR, "JVM Error creating core [" + dcore.getName() + "]: " + t.getMessage(), t);
log.error("Error creating core [{}]: {}", dcore.getName(), t.getMessage(), t);
coreInitFailures.put(dcore.getName(), new CoreLoadFailure(dcore, e));
throw t;
}
}
/**
* @return a Collection of registered SolrCores
*/
public Collection<SolrCore> getCores() {
return solrCores.getCores();
}
/**
* @return a Collection of the names that cores are mapped to
*/
public Collection<String> getCoreNames() {
return solrCores.getCoreNames();
}
/** This method is currently experimental.
* @return a Collection of the names that a specific core is mapped to.
*/
public Collection<String> getCoreNames(SolrCore core) {
return solrCores.getCoreNames(core);
}
/**
* get a list of all the cores that are currently loaded
* @return a list of al lthe available core names in either permanent or transient core lists.
*/
public Collection<String> getAllCoreNames() {
return solrCores.getAllCoreNames();
}
/**
* Returns an immutable Map of Exceptions that occured when initializing
* SolrCores (either at startup, or do to runtime requests to create cores)
* keyed off of the name (String) of the SolrCore that had the Exception
* during initialization.
* <p>
* While the Map returned by this method is immutable and will not change
* once returned to the client, the source data used to generate this Map
* can be changed as various SolrCore operations are performed:
* </p>
* <ul>
* <li>Failed attempts to create new SolrCores will add new Exceptions.</li>
* <li>Failed attempts to re-create a SolrCore using a name already contained in this Map will replace the Exception.</li>
* <li>Failed attempts to reload a SolrCore will cause an Exception to be added to this list -- even though the existing SolrCore with that name will continue to be available.</li>
* <li>Successful attempts to re-created a SolrCore using a name already contained in this Map will remove the Exception.</li>
* <li>Registering an existing SolrCore with a name already contained in this Map (ie: ALIAS or SWAP) will remove the Exception.</li>
* </ul>
*/
public Map<String, CoreLoadFailure> getCoreInitFailures() {
return ImmutableMap.copyOf(coreInitFailures);
}
// ---------------- Core name related methods ---------------
/**
* Recreates a SolrCore.
* While the new core is loading, requests will continue to be dispatched to
* and processed by the old core
*
* @param name the name of the SolrCore to reload
*/
public void reload(String name) {
SolrCore core = solrCores.getCoreFromAnyList(name, false);
if (core == null)
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "No such core: " + name );
CoreDescriptor cd = core.getCoreDescriptor();
try {
solrCores.waitAddPendingCoreOps(name);
ConfigSet coreConfig = coreConfigService.getConfig(cd);
log.info("Reloading SolrCore '{}' using configuration from {}", cd.getName(), coreConfig.getName());
SolrCore newCore = core.reload(coreConfig);
registerCore(name, newCore, false);
}
catch (Exception e) {
coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, e));
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to reload core [" + cd.getName() + "]", e);
}
finally {
solrCores.removeFromPendingOps(name);
}
}
/**
* Swaps two SolrCore descriptors.
*/
public void swap(String n0, String n1) {
if( n0 == null || n1 == null ) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Can not swap unnamed cores." );
}
solrCores.swap(n0, n1);
coresLocator.swap(this, solrCores.getCoreDescriptor(n0), solrCores.getCoreDescriptor(n1));
log.info("swapped: "+n0 + " with " + n1);
}
/**
* Unload a core from this container, leaving all files on disk
* @param name the name of the core to unload
*/
public void unload(String name) {
unload(name, false, false, false);
}
/**
* Unload a core from this container, optionally removing the core's data and configuration
*
* @param name the name of the core to unload
* @param deleteIndexDir if true, delete the core's index on close
* @param deleteDataDir if true, delete the core's data directory on close
* @param deleteInstanceDir if true, delete the core's instance directory on close
*/
public void unload(String name, boolean deleteIndexDir, boolean deleteDataDir, boolean deleteInstanceDir) {
// check for core-init errors first
CoreLoadFailure loadFailure = coreInitFailures.remove(name);
if (loadFailure != null) {
// getting the index directory requires opening a DirectoryFactory with a SolrConfig, etc,
// which we may not be able to do because of the init error. So we just go with what we
// can glean from the CoreDescriptor - datadir and instancedir
SolrCore.deleteUnloadedCore(loadFailure.cd, deleteDataDir, deleteInstanceDir);
return;
}
CoreDescriptor cd = solrCores.getCoreDescriptor(name);
if (cd == null)
throw new SolrException(ErrorCode.BAD_REQUEST, "Cannot unload non-existent core [" + name + "]");
boolean close = solrCores.isLoadedNotPendingClose(name);
SolrCore core = solrCores.remove(name);
coresLocator.delete(this, cd);
if (core == null) {
// transient core
SolrCore.deleteUnloadedCore(cd, deleteDataDir, deleteInstanceDir);
return;
}
if (zkSys.getZkController() != null) {
// cancel recovery in cloud mode
core.getSolrCoreState().cancelRecovery();
}
String configSetZkPath = core.getResourceLoader() instanceof ZkSolrResourceLoader ? ((ZkSolrResourceLoader)core.getResourceLoader()).getConfigSetZkPath() : null;
core.unloadOnClose(deleteIndexDir, deleteDataDir, deleteInstanceDir);
if (close)
core.close();
if (zkSys.getZkController() != null) {
try {
zkSys.getZkController().unregister(name, cd, configSetZkPath);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SolrException(ErrorCode.SERVER_ERROR, "Interrupted while unregistering core [" + name + "] from cloud state");
} catch (KeeperException e) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Error unregistering core [" + name + "] from cloud state", e);
}
}
}
public void rename(String name, String toName) {
try (SolrCore core = getCore(name)) {
if (core != null) {
registerCore(toName, core, true);
SolrCore old = solrCores.remove(name);
coresLocator.rename(this, old.getCoreDescriptor(), core.getCoreDescriptor());
}
}
}
/**
* Get the CoreDescriptors for all cores managed by this container
* @return a List of CoreDescriptors
*/
public List<CoreDescriptor> getCoreDescriptors() {
return solrCores.getCoreDescriptors();
}
public CoreDescriptor getCoreDescriptor(String coreName) {
// TODO make this less hideous!
for (CoreDescriptor cd : getCoreDescriptors()) {
if (cd.getName().equals(coreName))
return cd;
}
return null;
}
public String getCoreRootDirectory() {
return cfg.getCoreRootDirectory();
}
/**
* Gets a core by name and increase its refcount.
*
* @see SolrCore#close()
* @param name the core name
* @return the core if found, null if a SolrCore by this name does not exist
* @exception SolrException if a SolrCore with this name failed to be initialized
*/
public SolrCore getCore(String name) {
// Do this in two phases since we don't want to lock access to the cores over a load.
SolrCore core = solrCores.getCoreFromAnyList(name, true);
if (core != null) {
return core;
}
// OK, it's not presently in any list, is it in the list of dynamic cores but not loaded yet? If so, load it.
CoreDescriptor desc = solrCores.getDynamicDescriptor(name);
if (desc == null) { //Nope, no transient core with this name
// if there was an error initalizing this core, throw a 500
// error with the details for clients attempting to access it.
CoreLoadFailure loadFailure = getCoreInitFailures().get(name);
if (null != loadFailure) {
throw new SolrException(ErrorCode.SERVER_ERROR, "SolrCore '" + name +
"' is not available due to init failure: " +
loadFailure.exception.getMessage(), loadFailure.exception);
}
// otherwise the user is simply asking for something that doesn't exist.
return null;
}
// This will put an entry in pending core ops if the core isn't loaded
core = solrCores.waitAddPendingCoreOps(name);
if (isShutDown) return null; // We're quitting, so stop. This needs to be after the wait above since we may come off
// the wait as a consequence of shutting down.
try {
if (core == null) {
if (zkSys.getZkController() != null) {
zkSys.getZkController().throwErrorIfReplicaReplaced(desc);
}
core = create(desc); // This should throw an error if it fails.
}
core.open();
}
finally {
solrCores.removeFromPendingOps(name);
}
return core;
}
public JarRepository getJarRepository(){
return jarRepository;
}
// ---------------- CoreContainer request handlers --------------
protected <T> T createHandler(String handlerClass, Class<T> clazz) {
return loader.newInstance(handlerClass, clazz, null, new Class[] { CoreContainer.class }, new Object[] { this });
}
public CoreAdminHandler getMultiCoreHandler() {
return coreAdminHandler;
}
public CollectionsHandler getCollectionsHandler() {
return collectionsHandler;
}
public InfoHandler getInfoHandler() {
return infoHandler;
}
public String getHostName() {
return this.hostName;
}
/**
* Gets the alternate path for multicore handling:
* This is used in case there is a registered unnamed core (aka name is "") to
* declare an alternate way of accessing named cores.
* This can also be used in a pseudo single-core environment so admins can prepare
* a new version before swapping.
*/
public String getManagementPath() {
return cfg.getManagementPath();
}
public LogWatcher getLogging() {
return logging;
}
/**
* Determines whether the core is already loaded or not but does NOT load the core
*
*/
public boolean isLoaded(String name) {
return solrCores.isLoaded(name);
}
public boolean isLoadedNotPendingClose(String name) {
return solrCores.isLoadedNotPendingClose(name);
}
/**
* Gets a solr core descriptor for a core that is not loaded. Note that if the caller calls this on a
* loaded core, the unloaded descriptor will be returned.
*
* @param cname - name of the unloaded core descriptor to load. NOTE:
* @return a coreDescriptor. May return null
*/
public CoreDescriptor getUnloadedCoreDescriptor(String cname) {
return solrCores.getUnloadedCoreDescriptor(cname);
}
public String getSolrHome() {
return solrHome;
}
public boolean isZooKeeperAware() {
return zkSys.getZkController() != null;
}
public ZkController getZkController() {
return zkSys.getZkController();
}
public ConfigSolr getConfig() {
return cfg;
}
/** The default ShardHandlerFactory used to communicate with other solr instances */
public ShardHandlerFactory getShardHandlerFactory() {
return shardHandlerFactory;
}
public UpdateShardHandler getUpdateShardHandler() {
return updateShardHandler;
}
public SolrResourceLoader getResourceLoader() {
return loader;
}
}
class CloserThread extends Thread {
CoreContainer container;
SolrCores solrCores;
ConfigSolr cfg;
CloserThread(CoreContainer container, SolrCores solrCores, ConfigSolr cfg) {
this.container = container;
this.solrCores = solrCores;
this.cfg = cfg;
}
// It's important that this be the _only_ thread removing things from pendingDynamicCloses!
// This is single-threaded, but I tried a multi-threaded approach and didn't see any performance gains, so
// there's no good justification for the complexity. I suspect that the locking on things like DefaultSolrCoreState
// essentially create a single-threaded process anyway.
@Override
public void run() {
while (! container.isShutDown()) {
synchronized (solrCores.getModifyLock()) { // need this so we can wait and be awoken.
try {
solrCores.getModifyLock().wait();
} catch (InterruptedException e) {
// Well, if we've been told to stop, we will. Otherwise, continue on and check to see if there are
// any cores to close.
}
}
for (SolrCore removeMe = solrCores.getCoreToClose();
removeMe != null && !container.isShutDown();
removeMe = solrCores.getCoreToClose()) {
try {
removeMe.close();
} finally {
solrCores.removeFromPendingOps(removeMe.getName());
}
}
}
}
}
| visouza/solr-5.0.0 | solr/core/src/java/org/apache/solr/core/CoreContainer.java | Java | apache-2.0 | 30,728 |
package gov.va.medora.mdws.emrsvc;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="fromDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="toDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="nNotes" type="{http://www.w3.org/2001/XMLSchema}int"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"fromDate",
"toDate",
"nNotes"
})
@XmlRootElement(name = "getDischargeSummaries")
public class GetDischargeSummaries {
protected String fromDate;
protected String toDate;
protected int nNotes;
/**
* Gets the value of the fromDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFromDate() {
return fromDate;
}
/**
* Sets the value of the fromDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFromDate(String value) {
this.fromDate = value;
}
/**
* Gets the value of the toDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getToDate() {
return toDate;
}
/**
* Sets the value of the toDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setToDate(String value) {
this.toDate = value;
}
/**
* Gets the value of the nNotes property.
*
*/
public int getNNotes() {
return nNotes;
}
/**
* Sets the value of the nNotes property.
*
*/
public void setNNotes(int value) {
this.nNotes = value;
}
}
| VHAINNOVATIONS/TheDailyPlan | LegacyApp/tdpWeb/src/main/java/gov/va/medora/mdws/emrsvc/GetDischargeSummaries.java | Java | apache-2.0 | 2,528 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import static java.util.regex.Matcher.quoteReplacement;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.log4j.Appender;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.SimpleLayout;
import org.apache.log4j.WriterAppender;
import org.apache.pig.ExecType;
import org.apache.pig.ExecTypeProvider;
import org.apache.pig.LoadCaster;
import org.apache.pig.PigException;
import org.apache.pig.PigServer;
import org.apache.pig.ResourceSchema.ResourceFieldSchema;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRCompiler;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
import org.apache.pig.backend.hadoop.executionengine.tez.TezResourceManager;
import org.apache.pig.backend.hadoop.executionengine.util.MapRedUtil;
import org.apache.pig.builtin.Utf8StorageConverter;
import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.DefaultBagFactory;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
import org.apache.pig.impl.util.LogUtils;
import org.apache.pig.newplan.logical.optimizer.LogicalPlanPrinter;
import org.apache.pig.newplan.logical.optimizer.SchemaResetter;
import org.apache.pig.newplan.logical.optimizer.UidResetter;
import org.apache.pig.newplan.logical.relational.LogToPhyTranslationVisitor;
import org.apache.pig.newplan.logical.relational.LogicalPlan;
import org.apache.pig.newplan.logical.relational.LogicalSchema;
import org.apache.pig.newplan.logical.relational.LogicalSchema.LogicalFieldSchema;
import org.apache.pig.newplan.logical.visitor.DanglingNestedNodeRemover;
import org.apache.pig.newplan.logical.visitor.SortInfoSetter;
import org.apache.pig.newplan.logical.visitor.StoreAliasSetter;
import org.apache.pig.parser.ParserException;
import org.apache.pig.parser.QueryParserDriver;
import org.apache.pig.tools.grunt.GruntParser;
import org.apache.pig.tools.pigstats.ScriptState;
import org.apache.spark.package$;
import org.junit.Assert;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
public class Util {
private static BagFactory mBagFactory = BagFactory.getInstance();
private static TupleFactory mTupleFactory = TupleFactory.getInstance();
// Commonly-checked system state
// =================
public static final boolean WINDOWS /* borrowed from Path.WINDOWS, Shell.WINDOWS */
= System.getProperty("os.name").startsWith("Windows");
public static final String TEST_DIR = System.getProperty("test.build.dir", "build/test");
// Helper Functions
// =================
static public Tuple loadFlatTuple(Tuple t, int[] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
t.set(i, new Integer(input[i]));
}
return t;
}
static public Tuple loadTuple(Tuple t, String[] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
t.set(i, input[i]);
}
return t;
}
static public Tuple loadTuple(Tuple t, DataByteArray[] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
t.set(i, input[i]);
}
return t;
}
static public Tuple loadNestTuple(Tuple t, int[] input) throws ExecException {
DataBag bag = BagFactory.getInstance().newDefaultBag();
for(int i = 0; i < input.length; i++) {
Tuple f = TupleFactory.getInstance().newTuple(1);
f.set(0, input[i]);
bag.add(f);
}
t.set(0, bag);
return t;
}
static public Tuple loadNestTuple(Tuple t, long[] input) throws ExecException {
DataBag bag = BagFactory.getInstance().newDefaultBag();
for(int i = 0; i < input.length; i++) {
Tuple f = TupleFactory.getInstance().newTuple(1);
f.set(0, new Long(input[i]));
bag.add(f);
}
t.set(0, bag);
return t;
}
// this one should handle String, DataByteArray, Long, Integer etc..
static public <T> Tuple loadNestTuple(Tuple t, T[] input) throws ExecException {
DataBag bag = BagFactory.getInstance().newDefaultBag();
for(int i = 0; i < input.length; i++) {
Tuple f = TupleFactory.getInstance().newTuple(1);
f.set(0, input[i]);
bag.add(f);
}
t.set(0, bag);
return t;
}
/**
* Create an array of tuple bags with specified size created by splitting
* the input array of primitive types
*
* @param input Array of primitive types
* @param bagSize The number of tuples to be split and copied into each bag
*
* @return an array of tuple bags with each bag containing bagSize tuples split from the input
*/
static public <T> Tuple[] splitCreateBagOfTuples(T[] input, int bagSize)
throws ExecException {
List<Tuple> result = new ArrayList<Tuple>();
for (int from = 0; from < input.length; from += bagSize) {
Tuple t = TupleFactory.getInstance().newTuple(1);
int to = from + bagSize < input.length ? from + bagSize
: input.length;
T[] array = Arrays.copyOfRange(input, from, to);
result.add(loadNestTuple(t, array));
}
return result.toArray(new Tuple[0]);
}
static public <T>void addToTuple(Tuple t, T[] b)
{
for(int i = 0; i < b.length; i++)
t.append(b[i]);
}
static public Tuple buildTuple(Object... args) throws ExecException {
return TupleFactory.getInstance().newTupleNoCopy(Lists.newArrayList(args));
}
static public Tuple buildBinTuple(final Object... args) throws IOException {
return TupleFactory.getInstance().newTuple(Lists.transform(
Lists.newArrayList(args), new Function<Object, DataByteArray>() {
@Override
public DataByteArray apply(Object o) {
if (o == null) {
return null;
}
try {
return new DataByteArray(DataType.toBytes(o));
} catch (ExecException e) {
return null;
}
}
}));
}
static public <T>Tuple createTuple(T[] s)
{
Tuple t = mTupleFactory.newTuple();
addToTuple(t, s);
return t;
}
static public DataBag createBag(Tuple[] t)
{
DataBag b = mBagFactory.newDefaultBag();
for(int i = 0; i < t.length; i++)b.add(t[i]);
return b;
}
static public<T> DataBag createBagOfOneColumn(T[] input) throws ExecException {
DataBag result = mBagFactory.newDefaultBag();
for (int i = 0; i < input.length; i++) {
Tuple t = mTupleFactory.newTuple(1);
t.set(0, input[i]);
result.add(t);
}
return result;
}
static public Map<String, Object> createMap(String[] contents)
{
Map<String, Object> m = new HashMap<String, Object>();
for(int i = 0; i < contents.length; ) {
m.put(contents[i], contents[i+1]);
i += 2;
}
return m;
}
static public<T> DataByteArray[] toDataByteArrays(T[] input) {
DataByteArray[] dbas = new DataByteArray[input.length];
for (int i = 0; i < input.length; i++) {
dbas[i] = (input[i] == null)?null:new DataByteArray(input[i].toString().getBytes());
}
return dbas;
}
static public Tuple loadNestTuple(Tuple t, int[][] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
DataBag bag = BagFactory.getInstance().newDefaultBag();
Tuple f = loadFlatTuple(TupleFactory.getInstance().newTuple(input[i].length), input[i]);
bag.add(f);
t.set(i, bag);
}
return t;
}
static public Tuple loadTuple(Tuple t, String[][] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
DataBag bag = BagFactory.getInstance().newDefaultBag();
Tuple f = loadTuple(TupleFactory.getInstance().newTuple(input[i].length), input[i]);
bag.add(f);
t.set(i, bag);
}
return t;
}
/**
* Helper to remove colons (if any exist) from paths to sanitize them for
* consumption by hdfs.
*
* @param origPath original path name
* @return String sanitized path with anything prior to : removed
* @throws IOException
*/
static public String removeColon(String origPath)
{
return origPath.replaceAll(":", "");
}
/**
* Helper to convert \r\n to \n for cross-platform string
* matching with checked-in baselines.
*
* @param origPath original string
* @return String newline-standardized string
* @throws IOException
*/
static public String standardizeNewline(String origPath)
{
return origPath.replaceAll("\r\n", "\n");
}
/**
* Helper to create a temporary file with given input data for use in test cases.
*
* @param tmpFilenamePrefix file-name prefix
* @param tmpFilenameSuffix file-name suffix
* @param inputData input for test cases, each string in inputData[] is written
* on one line
* @return {@link File} handle to the created temporary file
* @throws IOException
*/
static public File createInputFile(String tmpFilenamePrefix,
String tmpFilenameSuffix,
String[] inputData)
throws IOException {
File f = File.createTempFile(tmpFilenamePrefix, tmpFilenameSuffix);
f.deleteOnExit();
writeToFile(f, inputData);
return f;
}
static public File createLocalInputFile(String filename, String[] inputData)
throws IOException {
File f = new File(filename);
f.deleteOnExit();
writeToFile(f, inputData);
return f;
}
public static void writeToFile(File f, String[] inputData) throws
IOException {
PrintWriter pw = new PrintWriter(new OutputStreamWriter(new
FileOutputStream(f), "UTF-8"));
for (int i=0; i<inputData.length; i++){
pw.print(inputData[i]);
pw.print("\n");
}
pw.close();
}
/**
* Helper to create a dfs file on the Minicluster DFS with given
* input data for use in test cases.
*
* @param miniCluster reference to the Minicluster where the file should be created
* @param fileName pathname of the file to be created
* @param inputData input for test cases, each string in inputData[] is written
* on one line
* @throws IOException
*/
static public void createInputFile(MiniGenericCluster miniCluster, String fileName,
String[] inputData)
throws IOException {
FileSystem fs = miniCluster.getFileSystem();
createInputFile(fs, fileName, inputData);
}
static public void createInputFile(FileSystem fs, String fileName,
String[] inputData) throws IOException {
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
if(fs.exists(new Path(fileName))) {
throw new IOException("File " + fileName + " already exists on the FileSystem");
}
FSDataOutputStream stream = fs.create(new Path(fileName));
PrintWriter pw = new PrintWriter(new OutputStreamWriter(stream, "UTF-8"));
for (int i=0; i<inputData.length; i++){
pw.print(inputData[i]);
pw.print("\n");
}
pw.close();
}
static public String[] readOutput(FileSystem fs, String fileName) throws IOException {
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
Path path = new Path(fileName);
if(!fs.exists(path)) {
throw new IOException("Path " + fileName + " does not exist on the FileSystem");
}
FileStatus fileStatus = fs.getFileStatus(path);
FileStatus[] files;
if (fileStatus.isDirectory()) {
files = fs.listStatus(path, new PathFilter() {
@Override
public boolean accept(Path p) {
return !p.getName().startsWith("_");
}
});
} else {
files = new FileStatus[] { fileStatus };
}
List<String> result = new ArrayList<String>();
for (FileStatus f : files) {
FSDataInputStream stream = fs.open(f.getPath());
BufferedReader br = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
String line;
while ((line = br.readLine()) != null) {
result.add(line);
}
br.close();
}
return result.toArray(new String[result.size()]);
}
/**
* Helper to create a dfs file on the MiniCluster dfs. This returns an
* outputstream that can be used in test cases to write data.
*
* @param cluster
* reference to the MiniCluster where the file should be created
* @param fileName
* pathname of the file to be created
* @return OutputStream to write any data to the file created on the
* MiniCluster.
* @throws IOException
*/
static public OutputStream createInputFile(MiniGenericCluster cluster,
String fileName) throws IOException {
FileSystem fs = cluster.getFileSystem();
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
if (fs.exists(new Path(fileName))) {
throw new IOException("File " + fileName
+ " already exists on the minicluster");
}
return fs.create(new Path(fileName));
}
/**
* Helper to create an empty temp file on local file system
* which will be deleted on exit
* @param prefix
* @param suffix
* @return File denoting a newly-created empty file
* @throws IOException
*/
static public File createTempFileDelOnExit(String prefix, String suffix)
throws IOException {
File tmpFile = File.createTempFile(prefix, suffix);
tmpFile.deleteOnExit();
return tmpFile;
}
/**
* Helper to remove a dfs file from the minicluster DFS
*
* @param miniCluster reference to the Minicluster where the file should be deleted
* @param fileName pathname of the file to be deleted
* @throws IOException
*/
static public void deleteFile(MiniGenericCluster miniCluster, String fileName)
throws IOException {
FileSystem fs = miniCluster.getFileSystem();
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
fs.delete(new Path(fileName), true);
}
/**
* Deletes a dfs file from the MiniCluster DFS quietly
*
* @param miniCluster the MiniCluster where the file should be deleted
* @param fileName the path of the file to be deleted
*/
public static void deleteQuietly(MiniGenericCluster miniCluster, String fileName) {
try {
deleteFile(miniCluster, fileName);
} catch (IOException ignored) {
}
}
static public void deleteFile(PigContext pigContext, String fileName)
throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
FileSystem fs = FileSystem.get(conf);
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
fs.delete(new Path(fileName), true);
}
static public boolean exists(PigContext pigContext, String fileName)
throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
FileSystem fs = FileSystem.get(conf);
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
return fs.exists(new Path(fileName));
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results.
*
* @param actualResults Result of the executed Pig query
* @param expectedResults Expected results Array to validate against
*/
static public void checkQueryOutputs(Iterator<Tuple> actualResults,
Tuple[] expectedResults) {
checkQueryOutputs(actualResults, Arrays.asList(expectedResults));
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results.
*
* @param actualResults Result of the executed Pig query
* @param expectedResults Expected results List to validate against
*/
static public void checkQueryOutputs(Iterator<Tuple> actualResults,
List<Tuple> expectedResults) {
checkQueryOutputs(actualResults, expectedResults.iterator(), null );
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results.
*
* @param actualResults Result of the executed Pig query
* @param expectedResults Expected results List to validate against
*/
static public void checkQueryOutputs(Iterator<Tuple> actualResults,
Iterator<Tuple> expectedResults, Integer expectedRows) {
int count = 0;
while (expectedResults.hasNext()) {
Tuple expected = expectedResults.next();
Assert.assertTrue("Actual result has less records than expected results", actualResults.hasNext());
Tuple actual = actualResults.next();
// If this tuple contains any bags, bags will be sorted before comparisons
if( !expected.equals(actual) ) {
// Using string comparisons since error message is more readable
// (only showing the part which differs)
Assert.assertEquals(expected.toString(), actual.toString());
// if above goes through, simply failing with object comparisons
Assert.assertEquals(expected, actual);
}
count++;
}
Assert.assertFalse("Actual result has more records than expected results", actualResults.hasNext());
if (expectedRows != null) {
Assert.assertEquals((int)expectedRows, count);
}
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results. It sorts actual and expected results before comparison
*
* @param actualResultsIt Result of the executed Pig query
* @param expectedResList Expected results to validate against
*/
static public void checkQueryOutputsAfterSort(Iterator<Tuple> actualResultsIt,
List<Tuple> expectedResList) {
List<Tuple> actualResList = new ArrayList<Tuple>();
while(actualResultsIt.hasNext()){
actualResList.add(actualResultsIt.next());
}
checkQueryOutputsAfterSort(actualResList, expectedResList);
}
/**
* Helper function to check if the result of Pig Query is in line with expected results.
* It sorts actual and expected results before comparison.
* The tuple size in the tuple list can vary. Pass by a two-dimension array, it will be converted to be a tuple list.
* e.g. expectedTwoDimensionObjects is [{{10, "will_join", 10, "will_join"}, {11, "will_not_join", null}, {null, 12, "will_not_join"}}],
* the field size of these 3 tuples are [4,3,3]
*
* @param actualResultsIt
* @param expectedTwoDimensionObjects represents a tuple list, in which the tuple can have variable size.
*/
static public void checkQueryOutputsAfterSort(Iterator<Tuple> actualResultsIt,
Object[][] expectedTwoDimensionObjects) {
List<Tuple> expectedResTupleList = new ArrayList<Tuple>();
for (int i = 0; i < expectedTwoDimensionObjects.length; ++i) {
Tuple t = TupleFactory.getInstance().newTuple();
for (int j = 0; j < expectedTwoDimensionObjects[i].length; ++j) {
t.append(expectedTwoDimensionObjects[i][j]);
}
expectedResTupleList.add(t);
}
checkQueryOutputsAfterSort(actualResultsIt, expectedResTupleList);
}
static public void checkQueryOutputsAfterSort(
List<Tuple> actualResList, List<Tuple> expectedResList) {
Collections.sort(actualResList);
Collections.sort(expectedResList);
checkQueryOutputs(actualResList.iterator(), expectedResList);
}
/**
* Check if subStr is a subString of str . calls org.junit.Assert.fail if it is not
* @param str
* @param subStr
*/
static public void checkStrContainsSubStr(String str, String subStr){
if(!str.contains(subStr)){
fail("String '"+ subStr + "' is not a substring of '" + str + "'");
}
}
/**
* Check if query plan for alias argument produces exception with expected
* error message in expectedErr argument.
* @param query
* @param alias
* @param expectedErr
* @throws IOException
*/
static public void checkExceptionMessage(String query, String alias, String expectedErr)
throws IOException {
PigServer pig = new PigServer(ExecType.LOCAL);
boolean foundEx = false;
try{
Util.registerMultiLineQuery(pig, query);
pig.explain(alias, System.out);
}catch(FrontendException e){
foundEx = true;
checkMessageInException(e, expectedErr);
}
if(!foundEx)
fail("No exception thrown. Exception is expected.");
}
public static void checkMessageInException(FrontendException e,
String expectedErr) {
PigException pigEx = LogUtils.getPigException(e);
String message = pigEx.getMessage();
checkErrorMessageContainsExpected(message, expectedErr);
}
public static void checkErrorMessageContainsExpected(String message, String expectedMessage){
if(!message.contains(expectedMessage)){
String msg = "Expected error message containing '"
+ expectedMessage + "' but got '" + message + "'" ;
fail(msg);
}
}
static private String getFSMkDirCommand(String fileName) {
Path parentDir = new Path(fileName).getParent();
String mkdirCommand = parentDir.getName().isEmpty() ? "" : "fs -mkdir -p " + parentDir + "\n";
return mkdirCommand;
}
/**
* Utility method to copy a file form local filesystem to the dfs on
* the minicluster for testing in mapreduce mode
* @param cluster a reference to the minicluster
* @param localFileName the pathname of local file
* @param fileNameOnCluster the name with which the file should be created on the minicluster
* @throws IOException
*/
static public void copyFromLocalToCluster(MiniGenericCluster cluster,
String localFileName, String fileNameOnCluster) throws IOException {
if(Util.WINDOWS){
if (!localFileName.contains(":")) {
localFileName = localFileName.replace('\\','/');
} else {
localFileName = localFileName.replace('/','\\');
}
fileNameOnCluster = fileNameOnCluster.replace('\\','/');
}
PigServer ps = new PigServer(cluster.getExecType(), cluster.getProperties());
String script = getFSMkDirCommand(fileNameOnCluster) + "fs -put " + localFileName + " " + fileNameOnCluster;
GruntParser parser = new GruntParser(new StringReader(script), ps);
parser.setInteractive(false);
try {
parser.parseStopOnError();
} catch (org.apache.pig.tools.pigscript.parser.ParseException e) {
throw new IOException(e);
}
}
static public void copyFromLocalToLocal(String fromLocalFileName,
String toLocalFileName) throws IOException {
FileUtils.copyFile(new File(fromLocalFileName), new File(toLocalFileName));
}
static public void copyFromClusterToLocal(MiniGenericCluster cluster,
String fileNameOnCluster, String localFileName) throws IOException {
if(Util.WINDOWS){
fileNameOnCluster = fileNameOnCluster.replace('\\','/');
localFileName = localFileName.replace('\\','/');
}
File parent = new File(localFileName).getParentFile();
if (!parent.exists()) {
parent.mkdirs();
}
PrintWriter writer = new PrintWriter(new FileWriter(localFileName));
FileSystem fs = FileSystem.get(ConfigurationUtil.toConfiguration(
cluster.getProperties()));
if(!fs.exists(new Path(fileNameOnCluster))) {
throw new IOException("File " + fileNameOnCluster + " does not exists on the minicluster");
}
String line = null;
FileStatus fst = fs.getFileStatus(new Path(fileNameOnCluster));
if(fst.isDirectory()) {
throw new IOException("Only files from cluster can be copied locally," +
" " + fileNameOnCluster + " is a directory");
}
FSDataInputStream stream = fs.open(new Path(fileNameOnCluster));
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
while( (line = reader.readLine()) != null) {
writer.println(line);
}
reader.close();
writer.close();
}
static public void printQueryOutput(Iterator<Tuple> actualResults,
Tuple[] expectedResults) {
System.out.println("Expected :") ;
for (Tuple expected : expectedResults) {
System.out.println(expected.toString()) ;
}
System.out.println("---End----") ;
System.out.println("Actual :") ;
while (actualResults.hasNext()) {
System.out.println(actualResults.next().toString()) ;
}
System.out.println("---End----") ;
}
/**
* Helper method to replace all occurrences of "\" with "\\" in a
* string. This is useful to fix the file path string on Windows
* where "\" is used as the path separator.
*
* @param str Any string
* @return The resulting string
*/
public static String encodeEscape(String str) {
String regex = "\\\\";
String replacement = quoteReplacement("\\\\");
return str.replaceAll(regex, replacement);
}
public static String generateURI(String filename, PigContext context)
throws IOException {
if(Util.WINDOWS){
filename = filename.replace('\\','/');
}
if (context.getExecType() == ExecType.MAPREDUCE || context.getExecType().name().equals("TEZ") ||
context.getExecType().name().equals("SPARK")) {
return FileLocalizer.hadoopify(filename, context);
} else if (context.getExecType().isLocal()) {
return filename;
} else {
throw new IllegalStateException("ExecType: " + context.getExecType());
}
}
public static Object getPigConstant(String pigConstantAsString) throws ParserException {
QueryParserDriver queryParser = new QueryParserDriver( new PigContext(),
"util", new HashMap<String, String>() ) ;
return queryParser.parseConstant(pigConstantAsString);
}
/**
* Parse list of strings in to list of tuples, convert quoted strings into
* @param tupleConstants
* @return
* @throws ParserException
*/
public static List<Tuple> getTuplesFromConstantTupleStrings(String[] tupleConstants) throws ParserException {
List<Tuple> result = new ArrayList<Tuple>(tupleConstants.length);
for(int i = 0; i < tupleConstants.length; i++) {
result.add((Tuple) getPigConstant(tupleConstants[i]));
}
return result;
}
/**
* Parse list of strings in to list of tuples, convert quoted strings into
* DataByteArray
* @param tupleConstants
* @return
* @throws ParserException
* @throws ExecException
*/
public static List<Tuple> getTuplesFromConstantTupleStringAsByteArray(String[] tupleConstants)
throws ParserException, ExecException {
List<Tuple> tuples = getTuplesFromConstantTupleStrings(tupleConstants);
for(Tuple t : tuples){
convertStringToDataByteArray(t);
}
return tuples;
}
/**
* Convert String objects in argument t to DataByteArray objects
* @param t
* @throws ExecException
*/
private static void convertStringToDataByteArray(Tuple t) throws ExecException {
if(t == null)
return;
for(int i=0; i<t.size(); i++){
Object col = t.get(i);
if(col == null)
continue;
if(col instanceof String){
DataByteArray dba = (col == null) ?
null : new DataByteArray((String)col);
t.set(i, dba);
}else if(col instanceof Tuple){
convertStringToDataByteArray((Tuple)col);
}else if(col instanceof DataBag){
Iterator<Tuple> it = ((DataBag)col).iterator();
while(it.hasNext()){
convertStringToDataByteArray(it.next());
}
}
}
}
public static File createFile(String[] data) throws Exception{
return createFile(null,data);
}
public static File createFile(String filePath, String[] data) throws Exception {
File f;
if( null == filePath || filePath.isEmpty() ) {
f = File.createTempFile("tmp", "");
} else {
f = new File(filePath);
}
if (f.getParent() != null && !(new File(f.getParent())).exists()) {
(new File(f.getParent())).mkdirs();
}
f.deleteOnExit();
PrintWriter pw = new PrintWriter(f);
for (int i=0; i<data.length; i++){
pw.println(data[i]);
}
pw.close();
return f;
}
/**
* Run default set of optimizer rules on new logical plan
* @param lp
* @return optimized logical plan
* @throws FrontendException
*/
public static LogicalPlan optimizeNewLP(
LogicalPlan lp)
throws FrontendException{
DanglingNestedNodeRemover DanglingNestedNodeRemover = new DanglingNestedNodeRemover( lp );
DanglingNestedNodeRemover.visit();
UidResetter uidResetter = new UidResetter( lp );
uidResetter.visit();
SchemaResetter schemaResetter =
new SchemaResetter( lp, true /*disable duplicate uid check*/ );
schemaResetter.visit();
StoreAliasSetter storeAliasSetter = new StoreAliasSetter( lp );
storeAliasSetter.visit();
// run optimizer
org.apache.pig.newplan.logical.optimizer.LogicalPlanOptimizer optimizer =
new org.apache.pig.newplan.logical.optimizer.LogicalPlanOptimizer(lp, 100, null);
optimizer.optimize();
SortInfoSetter sortInfoSetter = new SortInfoSetter( lp );
sortInfoSetter.visit();
return lp;
}
/**
* migrate old LP(logical plan) to new LP, optimize it, and build physical
* plan
* @param lp
* @param pc PigContext
* @return physical plan
* @throws Exception
*/
public static PhysicalPlan buildPhysicalPlanFromNewLP(
LogicalPlan lp, PigContext pc)
throws Exception {
LogToPhyTranslationVisitor visitor = new LogToPhyTranslationVisitor(lp);
visitor.setPigContext(pc);
visitor.visit();
return visitor.getPhysicalPlan();
}
public static MROperPlan buildMRPlan(PhysicalPlan pp, PigContext pc) throws Exception{
MRCompiler comp = new MRCompiler(pp, pc);
comp.compile();
comp.aggregateScalarsFiles();
comp.connectSoftLink();
return comp.getMRPlan();
}
public static MROperPlan buildMRPlanWithOptimizer(PhysicalPlan pp, PigContext pc) throws Exception {
MapRedUtil.checkLeafIsStore(pp, pc);
MapReduceLauncher launcher = new MapReduceLauncher();
return launcher.compile(pp,pc);
}
public static MROperPlan buildMRPlan(String query, PigContext pc) throws Exception {
LogicalPlan lp = Util.parse(query, pc);
Util.optimizeNewLP(lp);
PhysicalPlan pp = Util.buildPhysicalPlanFromNewLP(lp, pc);
MROperPlan mrp = Util.buildMRPlanWithOptimizer(pp, pc);
return mrp;
}
public static void registerMultiLineQuery(PigServer pigServer, String query) throws IOException {
File f = File.createTempFile("tmp", "");
PrintWriter pw = new PrintWriter(f);
pw.println(query);
pw.close();
pigServer.registerScript(f.getCanonicalPath());
}
public static int executeJavaCommand(String cmd) throws Exception {
return executeJavaCommandAndReturnInfo(cmd).exitCode;
}
public static class ReadStream implements Runnable {
InputStream is;
Thread thread;
String message = "";
public ReadStream(InputStream is) {
this.is = is;
}
public void start () {
thread = new Thread (this);
thread.start ();
}
@Override
public void run () {
try {
InputStreamReader isr = new InputStreamReader (is);
BufferedReader br = new BufferedReader (isr);
while (true) {
String s = br.readLine ();
if (s == null) break;
if (!message.isEmpty()) {
message += "\n";
}
message += s;
}
is.close ();
} catch (Exception ex) {
ex.printStackTrace ();
}
}
public String getMessage() {
return message;
}
}
public static ProcessReturnInfo executeJavaCommandAndReturnInfo(String cmd)
throws Exception {
String javaHome = System.getenv("JAVA_HOME");
if(javaHome != null) {
String fileSeparator = System.getProperty("file.separator");
cmd = javaHome + fileSeparator + "bin" + fileSeparator + cmd;
}
Process cmdProc = Runtime.getRuntime().exec(cmd);
ProcessReturnInfo pri = new ProcessReturnInfo();
ReadStream stdoutStream = new ReadStream(cmdProc.getInputStream ());
ReadStream stderrStream = new ReadStream(cmdProc.getErrorStream ());
stdoutStream.start();
stderrStream.start();
cmdProc.waitFor();
pri.exitCode = cmdProc.exitValue();
pri.stdoutContents = stdoutStream.getMessage();
pri.stderrContents = stderrStream.getMessage();
return pri;
}
public static class ProcessReturnInfo {
public int exitCode;
public String stderrContents;
public String stdoutContents;
@Override
public String toString() {
return "[Exit code: " + exitCode + ", stdout: <" + stdoutContents + ">, " +
"stderr: <" + stderrContents + ">";
}
}
static public boolean deleteDirectory(File path) {
if(path.exists()) {
File[] files = path.listFiles();
for(int i=0; i<files.length; i++) {
if(files[i].isDirectory()) {
deleteDirectory(files[i]);
}
else {
files[i].delete();
}
}
}
return(path.delete());
}
/**
* @param pigContext
* @param fileName
* @param input
* @throws IOException
*/
public static void createInputFile(PigContext pigContext,
String fileName, String[] input) throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
createInputFile(FileSystem.get(conf), fileName, input);
}
public static String[] readOutput(PigContext pigContext,
String fileName) throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
return readOutput(FileSystem.get(conf), fileName);
}
public static void printPlan(LogicalPlan logicalPlan ) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(out);
LogicalPlanPrinter pp = new LogicalPlanPrinter(logicalPlan,ps);
pp.visit();
System.err.println(out.toString());
}
public static void printPlan(PhysicalPlan physicalPlan) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(out);
physicalPlan.explain(ps, "text", true);
System.err.println(out.toString());
}
public static List<Tuple> readFile2TupleList(String file, String delimiter) throws IOException{
List<Tuple> tuples=new ArrayList<Tuple>();
String line=null;
BufferedReader reader=new BufferedReader(new InputStreamReader(new FileInputStream(file)));
while((line=reader.readLine())!=null){
String[] tokens=line.split(delimiter);
Tuple tuple=TupleFactory.getInstance().newTuple(Arrays.asList(tokens));
tuples.add(tuple);
}
reader.close();
return tuples;
}
/**
* Delete the existing logFile for the class and set the logging to a
* use a new log file and set log level to DEBUG
* @param clazz class for which the log file is being set
* @param logFile current log file
* @return new log file
* @throws Exception
*/
public static File resetLog(Class<?> clazz, File logFile) throws Exception {
if (logFile != null)
logFile.delete();
Logger logger = Logger.getLogger(clazz);
logger.removeAllAppenders();
logger.setLevel(Level.DEBUG);
SimpleLayout layout = new SimpleLayout();
File newLogFile = File.createTempFile("log", "");
FileAppender appender = new FileAppender(layout, newLogFile.toString(),
false, false, 0);
logger.addAppender(appender);
return newLogFile;
}
/**
* Check if logFile (does not/)contains the given list of messages.
* @param logFile
* @param messages
* @param expected if true, the messages are expected in the logFile,
* otherwise messages should not be there in the log
*/
public static void checkLogFileMessage(File logFile, String[] messages, boolean expected) {
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(logFile));
String logMessage = "";
String line;
while ((line = reader.readLine()) != null) {
logMessage = logMessage + line + "\n";
}
reader.close();
for (int i = 0; i < messages.length; i++) {
boolean present = logMessage.contains(messages[i]);
if (expected) {
if(!present){
fail("The message " + messages[i] + " is not present in" +
"log file contents: " + logMessage);
}
}else{
if(present){
fail("The message " + messages[i] + " is present in" +
"log file contents: " + logMessage);
}
}
}
return ;
}
catch (IOException e) {
fail("caught exception while checking log message :" + e);
}
}
public static LogicalPlan buildLp(PigServer pigServer, String query)
throws Exception {
pigServer.setBatchOn();
pigServer.registerQuery( query );
java.lang.reflect.Method buildLp = pigServer.getClass().getDeclaredMethod("buildLp");
buildLp.setAccessible(true);
return (LogicalPlan ) buildLp.invoke( pigServer );
}
public static PhysicalPlan buildPp(PigServer pigServer, String query)
throws Exception {
LogicalPlan lp = buildLp( pigServer, query );
lp.optimize(pigServer.getPigContext());
return ((HExecutionEngine)pigServer.getPigContext().getExecutionEngine()).compile(lp,
pigServer.getPigContext().getProperties());
}
public static LogicalPlan parse(String query, PigContext pc) throws FrontendException {
Map<String, String> fileNameMap = new HashMap<String, String>();
QueryParserDriver parserDriver = new QueryParserDriver( pc, "test", fileNameMap );
org.apache.pig.newplan.logical.relational.LogicalPlan lp = parserDriver.parse( query );
lp.validate(pc, "test", false);
return lp;
}
public static LogicalPlan parseAndPreprocess(String query, PigContext pc) throws FrontendException {
Map<String, String> fileNameMap = new HashMap<String, String>();
QueryParserDriver parserDriver = new QueryParserDriver( pc, "test", fileNameMap );
org.apache.pig.newplan.logical.relational.LogicalPlan lp = parserDriver.parse( query );
lp.validate(pc, "test", false);
return lp;
}
/**
* Replaces any alias in given schema that has name that starts with
* "NullAlias" with null . it does a case insensitive comparison of
* the alias name
* @param sch
*/
public static void schemaReplaceNullAlias(Schema sch){
if(sch == null)
return ;
for(FieldSchema fs : sch.getFields()){
if(fs.alias != null && fs.alias.toLowerCase().startsWith("nullalias")){
fs.alias = null;
}
schemaReplaceNullAlias(fs.schema);
}
}
static public void checkQueryOutputsAfterSort(Iterator<Tuple> actualResultsIt,
Tuple[] expectedResArray) {
List<Tuple> list = new ArrayList<Tuple>();
Collections.addAll(list, expectedResArray);
checkQueryOutputsAfterSort(actualResultsIt, list);
}
static public void convertBagToSortedBag(Tuple t) {
for (int i=0;i<t.size();i++) {
Object obj = null;
try {
obj = t.get(i);
} catch (ExecException e) {
// shall not happen
}
if (obj instanceof DataBag) {
DataBag bag = (DataBag)obj;
Iterator<Tuple> iter = bag.iterator();
DataBag sortedBag = DefaultBagFactory.getInstance().newSortedBag(null);
while (iter.hasNext()) {
Tuple t2 = iter.next();
sortedBag.add(t2);
convertBagToSortedBag(t2);
}
try {
t.set(i, sortedBag);
} catch (ExecException e) {
// shall not happen
}
}
}
}
static public void checkQueryOutputsAfterSortRecursive(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, String schemaString) throws IOException {
LogicalSchema resultSchema = org.apache.pig.impl.util.Utils.parseSchema(schemaString);
checkQueryOutputsAfterSortRecursive(actualResultsIt, expectedResArray, resultSchema);
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results. It sorts actual and expected string results before comparison
*
* @param actualResultsIt Result of the executed Pig query
* @param expectedResArray Expected string results to validate against
* @param schema fieldSchema of expecteResArray
* @throws IOException
*/
static public void checkQueryOutputsAfterSortRecursive(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, LogicalSchema schema) throws IOException {
LogicalFieldSchema fs = new LogicalFieldSchema("tuple", schema, DataType.TUPLE);
ResourceFieldSchema rfs = new ResourceFieldSchema(fs);
LoadCaster caster = new Utf8StorageConverter();
List<Tuple> actualResList = new ArrayList<Tuple>();
while(actualResultsIt.hasNext()){
actualResList.add(actualResultsIt.next());
}
List<Tuple> expectedResList = new ArrayList<Tuple>();
for (String str : expectedResArray) {
Tuple newTuple = caster.bytesToTuple(str.getBytes(), rfs);
expectedResList.add(newTuple);
}
for (Tuple t : actualResList) {
convertBagToSortedBag(t);
}
for (Tuple t : expectedResList) {
convertBagToSortedBag(t);
}
Collections.sort(actualResList);
Collections.sort(expectedResList);
Assert.assertEquals("Comparing actual and expected results. ",
expectedResList, actualResList);
}
public static String readFile(File file) throws IOException {
BufferedReader reader = new BufferedReader(new FileReader(file));
String result = "";
String line;
while ((line=reader.readLine())!=null) {
result += line;
result += "\n";
}
reader.close();
return result;
}
/**
* this removes the signature from the serialized plan changing the way the
* unique signature is generated should not break this test
* @param plan the plan to canonicalize
* @return the cleaned up plan
*/
public static String removeSignature(String plan) {
return plan.replaceAll("','','[^']*','scope','true'\\)\\)", "','','','scope','true'))");
}
public static boolean isHadoop203plus() {
String version = org.apache.hadoop.util.VersionInfo.getVersion();
if (version.matches("\\b0\\.20\\.2\\b"))
return false;
return true;
}
public static boolean isHadoop205() {
String version = org.apache.hadoop.util.VersionInfo.getVersion();
if (version.matches("\\b0\\.20\\.205\\..+"))
return true;
return false;
}
public static boolean isHadoop1_x() {
String version = org.apache.hadoop.util.VersionInfo.getVersion();
if (version.matches("\\b1\\.*\\..+"))
return true;
return false;
}
public static boolean isSpark2_2_plus() throws IOException {
String sparkVersion = package$.MODULE$.SPARK_VERSION();
return sparkVersion != null && sparkVersion.matches("2\\.([\\d&&[^01]]|[\\d]{2,})\\..*");
}
public static void sortQueryOutputsIfNeed(List<Tuple> actualResList, boolean toSort){
if( toSort == true) {
for (Tuple t : actualResList) {
Util.convertBagToSortedBag(t);
}
Collections.sort(actualResList);
}
}
public static void checkQueryOutputs(Iterator<Tuple> actualResults, List<Tuple> expectedResults, boolean checkAfterSort) {
if (checkAfterSort) {
checkQueryOutputsAfterSort(actualResults, expectedResults);
} else {
checkQueryOutputs(actualResults, expectedResults);
}
}
static public void checkQueryOutputs(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, LogicalSchema schema, boolean
checkAfterSort) throws IOException {
if (checkAfterSort) {
checkQueryOutputsAfterSortRecursive(actualResultsIt,
expectedResArray, schema);
} else {
checkQueryOutputs(actualResultsIt,
expectedResArray, schema);
}
}
static void checkQueryOutputs(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, LogicalSchema schema) throws IOException {
LogicalFieldSchema fs = new LogicalFieldSchema("tuple", schema, DataType.TUPLE);
ResourceFieldSchema rfs = new ResourceFieldSchema(fs);
LoadCaster caster = new Utf8StorageConverter();
List<Tuple> actualResList = new ArrayList<Tuple>();
while (actualResultsIt.hasNext()) {
actualResList.add(actualResultsIt.next());
}
List<Tuple> expectedResList = new ArrayList<Tuple>();
for (String str : expectedResArray) {
Tuple newTuple = caster.bytesToTuple(str.getBytes(), rfs);
expectedResList.add(newTuple);
}
for (Tuple t : actualResList) {
convertBagToSortedBag(t);
}
for (Tuple t : expectedResList) {
convertBagToSortedBag(t);
}
Assert.assertEquals("Comparing actual and expected results. ",
expectedResList, actualResList);
}
public static void assertParallelValues(long defaultParallel,
long requestedParallel,
long estimatedParallel,
long runtimeParallel,
Configuration conf) {
assertConfLong(conf, "pig.info.reducers.default.parallel", defaultParallel);
assertConfLong(conf, "pig.info.reducers.requested.parallel", requestedParallel);
assertConfLong(conf, "pig.info.reducers.estimated.parallel", estimatedParallel);
assertConfLong(conf, MRConfiguration.REDUCE_TASKS, runtimeParallel);
}
public static void assertConfLong(Configuration conf, String param, long expected) {
assertEquals("Unexpected value found in configs for " + param, expected, conf.getLong(param, -1));
}
/**
* Returns a PathFilter that filters out filenames that start with _.
* @return PathFilter
*/
public static PathFilter getSuccessMarkerPathFilter() {
return new PathFilter() {
@Override
public boolean accept(Path p) {
return !p.getName().startsWith("_");
}
};
}
/**
*
* @param expected
* Exception class that is expected to be thrown
* @param found
* Exception that occurred in the test
* @param message
* expected String to verify against
*/
public static void assertExceptionAndMessage(Class<?> expected,
Exception found, String message) {
assertEquals(expected, found.getClass());
assertEquals(found.getMessage(), message);
}
/**
* Called to reset ThreadLocal or static states that PigServer depends on
* when a test suite has testcases switching between LOCAL and MAPREDUCE/TEZ
* execution modes
*/
public static void resetStateForExecModeSwitch() {
FileLocalizer.setInitialized(false);
// For tez testing, we want to avoid TezResourceManager/LocalResource reuse
// (when switching between local and mapreduce/tez)
TezResourceManager.dropInstance();
// TODO: once we have Tez local mode, we can get rid of this. For now,
// if we run this test suite in Tez mode and there are some tests
// in LOCAL mode, we need to set ScriptState to
// null to force ScriptState gets initialized every time.
ScriptState.start(null);
}
public static boolean isMapredExecType(ExecType execType) {
return execType == ExecType.MAPREDUCE;
}
public static boolean isTezExecType(ExecType execType) {
if (execType.name().toLowerCase().startsWith("tez")) {
return true;
}
return false;
}
public static boolean isSparkExecType(ExecType execType) {
if (execType.name().toLowerCase().startsWith("spark")) {
return true;
}
return false;
}
public static String findPigJarName() {
final String suffix = System.getProperty("hadoopversion").equals("20") ? "1" : "2";
File baseDir = new File(".");
String[] jarNames = baseDir.list(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
if (!name.matches("pig.*h" + suffix + "\\.jar")) {
return false;
}
if (name.contains("all")) {
return false;
}
return true;
}
});
if (jarNames==null || jarNames.length!=1) {
throw new RuntimeException("Cannot find pig.jar");
}
return jarNames[0];
}
public static ExecType getLocalTestMode() throws Exception {
String execType = System.getProperty("test.exec.type");
if (execType != null) {
if (execType.equals("tez")) {
return ExecTypeProvider.fromString("tez_local");
} else if (execType.equals("spark")) {
return ExecTypeProvider.fromString("spark_local");
}
}
return ExecTypeProvider.fromString("local");
}
public static void createLogAppender(String appenderName, Writer writer, Class...clazzes) {
WriterAppender writerAppender = new WriterAppender(new PatternLayout("%d [%t] %-5p %c %x - %m%n"), writer);
writerAppender.setName(appenderName);
for (Class clazz : clazzes) {
Logger logger = Logger.getLogger(clazz);
logger.addAppender(writerAppender);
}
}
public static void removeLogAppender(String appenderName, Class...clazzes) {
for (Class clazz : clazzes) {
Logger logger = Logger.getLogger(clazz);
Appender appender = logger.getAppender(appenderName);
appender.close();
logger.removeAppender(appenderName);
}
}
public static Path getFirstPartFile(Path path) throws Exception {
FileStatus[] parts = FileSystem.get(path.toUri(), new Configuration()).listStatus(path,
new PathFilter() {
@Override
public boolean accept(Path path) {
return path.getName().startsWith("part-");
}
});
return parts[0].getPath();
}
public static File getFirstPartFile(File dir) throws Exception {
File[] parts = dir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith("part-");
};
});
return parts[0];
}
@SuppressWarnings("rawtypes")
public static String getTestDirectory(Class testClass) {
return TEST_DIR + Path.SEPARATOR + "testdata" + Path.SEPARATOR +testClass.getSimpleName();
}
}
| apache/pig | test/org/apache/pig/test/Util.java | Java | apache-2.0 | 58,248 |
package org.devspark.aws.tools;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.devspark.aws.lambdasupport.endpoint.annotations.apigateway.ApiGateway;
import org.devspark.aws.lambdasupport.endpoint.annotations.apigateway.Resource;
import org.devspark.aws.lambdasupport.endpoint.annotations.apigateway.ResourceMethod;
import org.devspark.aws.tools.model.resources.EndpointResource;
import org.devspark.aws.tools.model.resources.EndpointResourceMethod;
import org.devspark.aws.tools.model.resources.EndpointResourceMethodParameter;
import org.devspark.aws.tools.model.resources.EndpointResponse;
import org.devspark.aws.tools.model.resources.EndpointResponseHeader;
import org.devspark.aws.tools.model.resources.EndpointResponseSchema;
import org.devspark.aws.tools.swagger.SwaggerFileWriter;
import org.devspark.aws.tools.swagger.VelocitySwaggerFileWriter;
import org.reflections.ReflectionUtils;
import org.reflections.Reflections;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.scanners.TypeAnnotationsScanner;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.ConfigurationBuilder;
@Mojo(name = "apigateway-deployer")
public class AWSAPIGatewayDeployer extends AbstractMojo {
@Parameter(property = "base-package")
private String basePackage;
private SwaggerFileWriter fileWriter = new VelocitySwaggerFileWriter();
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
Reflections reflections = new Reflections(new ConfigurationBuilder()
.setUrls(ClasspathHelper.forPackage(basePackage)).setScanners(
new SubTypesScanner(), new TypeAnnotationsScanner()));
Set<Class<?>> resources = reflections
.getTypesAnnotatedWith(Resource.class);
Set<Class<?>> apis = reflections
.getTypesAnnotatedWith(ApiGateway.class);
Map<String, EndpointResource> endpointResources = getEndpointResources(resources);
String apiName = getApiName(apis);
fileWriter.createSwaggerFile(new ArrayList<EndpointResource>(endpointResources.values()), apiName);
}
private String getApiName(Set<Class<?>> apis) {
if (apis.size() != 1) {
getLog().warn("Invalid number of @ApiGateway found.");
}
return apis.iterator().next().getAnnotationsByType(ApiGateway.class)[0].name();
}
@SuppressWarnings("unchecked")
private Map<String, EndpointResource> getEndpointResources(Set<Class<?>> resources) {
Map<String, EndpointResource> endpointResources = new HashMap<String, EndpointResource>();
for (Class<?> type : resources) {
Set<Method> resourceMethods = ReflectionUtils.getAllMethods(type,
ReflectionUtils.withAnnotation(ResourceMethod.class));
if (resourceMethods.isEmpty()) {
getLog().warn(
"No methods annotated with @Resource found in type: "
+ type.getName());
continue;
}
for (Method method : resourceMethods) {
Resource methodResource = method.getAnnotation(Resource.class);
String resourceName = type.getAnnotationsByType(Resource.class)[0].name();
if(methodResource != null) {
resourceName = resourceName + "/" + methodResource.name();
}
EndpointResourceMethod endpointMethod = createMethodResource(method, resourceName);
EndpointResource endpointResource = endpointResources.get(resourceName);
if (endpointResource == null) {
endpointResource = new EndpointResource();
endpointResource.setName(resourceName);
endpointResource.setMethods(new ArrayList<EndpointResourceMethod>());
endpointResources.put(resourceName, endpointResource);
}
endpointResource.getMethods().add(endpointMethod);
}
}
return endpointResources;
}
private EndpointResourceMethod createMethodResource(Method method, String resourceName) {
EndpointResourceMethod endpointMethod = new EndpointResourceMethod();
ResourceMethod resourceMethod = method.getAnnotation(ResourceMethod.class);
endpointMethod.setVerb(resourceMethod.httpMethod().name());
endpointMethod.setParameters(getParameters(resourceName));
endpointMethod.setProduces(Arrays.asList("application/json"));
endpointMethod.setResponses(getMethodResponses());
return endpointMethod;
}
//TODO: Replace mocked list with the generation of the responses of the method.
private List<EndpointResponse> getMethodResponses() {
List<EndpointResponse> responses = new ArrayList<EndpointResponse>();
EndpointResponse sucessfulResponse = new EndpointResponse();
sucessfulResponse.setHttpStatus("200");
sucessfulResponse.setDescription("200 response");
sucessfulResponse.setHeaders(new EndpointResponseHeader());
EndpointResponseSchema schema = new EndpointResponseSchema();
schema.setRef("#/definitions/Empty");
sucessfulResponse.setSchema(schema);
return responses;
}
private List<EndpointResourceMethodParameter> getParameters(String resourceName) {
String pattern = "\\{[a-zA-A]*\\}";
Pattern r = Pattern.compile(pattern);
List<EndpointResourceMethodParameter> parameters = new ArrayList<EndpointResourceMethodParameter>();
Matcher m = r.matcher(resourceName);
while(m.find()){
EndpointResourceMethodParameter parameter = new EndpointResourceMethodParameter();
parameter.setName(m.group(0).replaceAll("\\{*\\}*", ""));
//TODO: Review how to populate the parameter metadata.
parameter.setRequired(true);
parameter.setType("string");
parameter.setIn("path");
parameters.add(parameter);
}
return parameters;
}
}
| devspark-com/aws-lambda-deploy | src/main/java/org/devspark/aws/tools/AWSAPIGatewayDeployer.java | Java | apache-2.0 | 5,913 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.actions;
import com.intellij.CommonBundle;
import com.intellij.history.LocalHistory;
import com.intellij.history.LocalHistoryAction;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.application.WriteActionAware;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.SmartPointerManager;
import com.intellij.psi.SmartPsiElementPointer;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* @author peter
*/
public abstract class ElementCreator implements WriteActionAware {
private static final Logger LOG = Logger.getInstance(ElementCreator.class);
private final Project myProject;
private final @NlsContexts.DialogTitle String myErrorTitle;
protected ElementCreator(Project project, @NotNull @NlsContexts.DialogTitle String errorTitle) {
myProject = project;
myErrorTitle = errorTitle;
}
protected abstract PsiElement @NotNull [] create(@NotNull String newName) throws Exception;
@NlsContexts.Command
@NotNull
protected abstract String getActionName(@NotNull String newName);
public @NotNull PsiElement @NotNull [] tryCreate(@NotNull final String inputString) {
if (inputString.isEmpty()) {
Messages.showMessageDialog(myProject, IdeBundle.message("error.name.should.be.specified"), CommonBundle.getErrorTitle(),
Messages.getErrorIcon());
return PsiElement.EMPTY_ARRAY;
}
Ref<List<SmartPsiElementPointer<?>>> createdElements = Ref.create();
Exception exception = executeCommand(getActionName(inputString), () -> {
PsiElement[] psiElements = create(inputString);
SmartPointerManager manager = SmartPointerManager.getInstance(myProject);
createdElements.set(ContainerUtil.map(psiElements, manager::createSmartPsiElementPointer));
});
if (exception != null) {
handleException(exception);
return PsiElement.EMPTY_ARRAY;
}
return ContainerUtil.mapNotNull(createdElements.get(), SmartPsiElementPointer::getElement).toArray(PsiElement.EMPTY_ARRAY);
}
@Nullable
private Exception executeCommand(@NotNull @NlsContexts.Command String commandName, @NotNull ThrowableRunnable<? extends Exception> invokeCreate) {
final Exception[] exception = new Exception[1];
CommandProcessor.getInstance().executeCommand(myProject, () -> {
LocalHistoryAction action = LocalHistory.getInstance().startAction(commandName);
try {
if (startInWriteAction()) {
WriteAction.run(invokeCreate);
}
else {
invokeCreate.run();
}
}
catch (Exception ex) {
exception[0] = ex;
}
finally {
action.finish();
}
}, commandName, null, UndoConfirmationPolicy.REQUEST_CONFIRMATION);
return exception[0];
}
private void handleException(Exception t) {
LOG.info(t);
String errorMessage = getErrorMessage(t);
Messages.showMessageDialog(myProject, errorMessage, myErrorTitle, Messages.getErrorIcon());
}
public static @NlsContexts.DialogMessage String getErrorMessage(Throwable t) {
String errorMessage = CreateElementActionBase.filterMessage(t.getMessage());
if (StringUtil.isEmpty(errorMessage)) {
errorMessage = t.toString();
}
return errorMessage;
}
}
| jwren/intellij-community | platform/lang-api/src/com/intellij/ide/actions/ElementCreator.java | Java | apache-2.0 | 4,462 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.huaweicloud.smn;
import java.util.HashMap;
import com.huaweicloud.sdk.core.auth.BasicCredentials;
import com.huaweicloud.sdk.core.http.HttpConfig;
import com.huaweicloud.sdk.smn.v2.SmnClient;
import com.huaweicloud.sdk.smn.v2.model.PublishMessageRequest;
import com.huaweicloud.sdk.smn.v2.model.PublishMessageRequestBody;
import com.huaweicloud.sdk.smn.v2.model.PublishMessageResponse;
import org.apache.camel.Exchange;
import org.apache.camel.component.huaweicloud.smn.constants.SmnConstants;
import org.apache.camel.component.huaweicloud.smn.constants.SmnOperations;
import org.apache.camel.component.huaweicloud.smn.constants.SmnProperties;
import org.apache.camel.component.huaweicloud.smn.constants.SmnServices;
import org.apache.camel.component.huaweicloud.smn.models.ClientConfigurations;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SimpleNotificationProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(SimpleNotificationProducer.class);
private SmnClient smnClient;
private ClientConfigurations clientConfigurations;
public SimpleNotificationProducer(SimpleNotificationEndpoint endpoint) {
super(endpoint);
}
@Override
protected void doStart() throws Exception {
super.doStart();
validateAndInitializeSmnClient((SimpleNotificationEndpoint) super.getEndpoint());
}
public void process(Exchange exchange) throws Exception {
String service = ((SimpleNotificationEndpoint) super.getEndpoint()).getSmnService();
if (!ObjectHelper.isEmpty(service)) {
switch (service) {
case SmnServices.PUBLISH_MESSAGE:
if (LOG.isDebugEnabled()) {
LOG.debug("Using message publishing service");
}
performPublishMessageServiceOperations((SimpleNotificationEndpoint) super.getEndpoint(), exchange);
if (LOG.isDebugEnabled()) {
LOG.debug("Completed publishing message");
}
break;
default:
if (LOG.isErrorEnabled()) {
LOG.error("Unsupported service name {}", service);
}
throw new UnsupportedOperationException(String.format("service %s is not a supported service", service));
}
} else {
if (LOG.isErrorEnabled()) {
LOG.error("Service name is null/empty");
}
throw new IllegalStateException("service name cannot be null/empty");
}
}
/**
* Publish message service operations
*
* @param endpoint
* @param exchange
*/
private void performPublishMessageServiceOperations(SimpleNotificationEndpoint endpoint, Exchange exchange) {
PublishMessageResponse response;
PublishMessageRequestBody apiBody;
this.clientConfigurations = validateServiceConfigurations(endpoint, exchange);
if (LOG.isDebugEnabled()) {
LOG.debug("Checking operation name");
}
switch (clientConfigurations.getOperation()) {
case SmnOperations.PUBLISH_AS_TEXT_MESSAGE:
if (LOG.isDebugEnabled()) {
LOG.debug("Publishing as text message");
}
apiBody = new PublishMessageRequestBody()
.withMessage(exchange.getMessage().getBody(String.class))
.withSubject(clientConfigurations.getSubject())
.withTimeToLive(String.valueOf(clientConfigurations.getMessageTtl()));
response = smnClient.publishMessage(new PublishMessageRequest()
.withBody(apiBody)
.withTopicUrn(clientConfigurations.getTopicUrn()));
break;
case SmnOperations.PUBLISH_AS_TEMPLATED_MESSAGE:
if (LOG.isDebugEnabled()) {
LOG.debug("Publishing as templated message");
}
apiBody = new PublishMessageRequestBody()
.withMessage(exchange.getMessage().getBody(String.class))
.withSubject(clientConfigurations.getSubject())
.withTimeToLive(String.valueOf(clientConfigurations.getMessageTtl()))
.withMessageTemplateName((String) exchange.getProperty(SmnProperties.TEMPLATE_NAME))
.withTags((HashMap<String, String>) exchange.getProperty(SmnProperties.TEMPLATE_TAGS))
.withTimeToLive(String.valueOf(clientConfigurations.getMessageTtl()));
response = smnClient.publishMessage(new PublishMessageRequest()
.withBody(apiBody)
.withTopicUrn(clientConfigurations.getTopicUrn()));
break;
default:
throw new UnsupportedOperationException(
String.format("operation %s not supported in publishMessage service",
clientConfigurations.getOperation()));
}
setResponseParameters(exchange, response);
}
/**
* maps api response parameters as exchange property
*
* @param exchange
* @param response
*/
private void setResponseParameters(Exchange exchange, PublishMessageResponse response) {
if (response == null) {
return; // mapping is not required if response object is null
}
if (!ObjectHelper.isEmpty(response.getMessageId())) {
exchange.setProperty(SmnProperties.SERVICE_MESSAGE_ID, response.getMessageId());
}
if (!ObjectHelper.isEmpty(response.getRequestId())) {
exchange.setProperty(SmnProperties.SERVICE_REQUEST_ID, response.getRequestId());
}
}
/**
* validation and initialization of SmnClient object
*
* @param simpleNotificationEndpoint
*/
private void validateAndInitializeSmnClient(SimpleNotificationEndpoint simpleNotificationEndpoint) {
if (simpleNotificationEndpoint.getSmnClient() != null) {
if (LOG.isWarnEnabled()) {
LOG.warn(
"Instance of SmnClient was set on the endpoint. Skipping creation of SmnClient from endpoint parameters");
}
this.smnClient = simpleNotificationEndpoint.getSmnClient();
return;
}
this.clientConfigurations = new ClientConfigurations();
//checking for cloud SK (secret key)
if (ObjectHelper.isEmpty(simpleNotificationEndpoint.getSecretKey()) &&
ObjectHelper.isEmpty(simpleNotificationEndpoint.getServiceKeys())) {
if (LOG.isErrorEnabled()) {
LOG.error("secret key (SK) not found");
}
throw new IllegalArgumentException("authentication parameter 'secret key (SK)' not found");
} else {
clientConfigurations.setSecretKey(simpleNotificationEndpoint.getSecretKey() != null
? simpleNotificationEndpoint.getSecretKey() : simpleNotificationEndpoint.getServiceKeys().getSecretKey());
}
//checking for cloud AK (auth key)
if (ObjectHelper.isEmpty(simpleNotificationEndpoint.getAuthKey()) &&
ObjectHelper.isEmpty(simpleNotificationEndpoint.getServiceKeys())) {
if (LOG.isErrorEnabled()) {
LOG.error("authentication key (AK) not found");
}
throw new IllegalArgumentException("authentication parameter 'authentication key (AK)' not found");
} else {
clientConfigurations.setAuthenticationkey(simpleNotificationEndpoint.getAuthKey() != null
? simpleNotificationEndpoint.getAuthKey()
: simpleNotificationEndpoint.getServiceKeys().getAuthenticationKey());
}
//checking for project ID
if (ObjectHelper.isEmpty(simpleNotificationEndpoint.getProjectId())) {
if (LOG.isErrorEnabled()) {
LOG.error("Project ID not found");
}
throw new IllegalArgumentException("project ID not found");
} else {
clientConfigurations.setProjectId(simpleNotificationEndpoint.getProjectId());
}
//checking for region
String endpointUrl = SimpleNotificationUtils.resolveSmnServiceEndpoint(simpleNotificationEndpoint.getRegion());
if (endpointUrl == null) {
if (LOG.isErrorEnabled()) {
LOG.error("Valid region not found");
}
throw new IllegalArgumentException("enter a valid region");
} else {
clientConfigurations.setServiceEndpoint(endpointUrl);
}
//checking for ignore ssl verification
boolean ignoreSslVerification = simpleNotificationEndpoint.isIgnoreSslVerification();
if (ignoreSslVerification) {
if (LOG.isWarnEnabled()) {
LOG.warn("SSL verification is ignored. This is unsafe in production environment");
}
clientConfigurations.setIgnoreSslVerification(ignoreSslVerification);
}
//checking if http proxy authentication is used
if (simpleNotificationEndpoint.getProxyHost() != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Reading http proxy configurations");
}
clientConfigurations.setProxyHost(simpleNotificationEndpoint.getProxyHost());
clientConfigurations.setProxyPort(simpleNotificationEndpoint.getProxyPort());
clientConfigurations.setProxyUser(simpleNotificationEndpoint.getProxyUser());
clientConfigurations.setProxyPassword(simpleNotificationEndpoint.getProxyPassword());
}
this.smnClient = initializeClient(clientConfigurations);
}
/**
* initialization of smn client. this is lazily initialized on the first message
*
* @param clientConfigurations
* @return
*/
private SmnClient initializeClient(ClientConfigurations clientConfigurations) {
if (LOG.isDebugEnabled()) {
LOG.debug("Initializing Smn client");
}
HttpConfig httpConfig = null;
if (clientConfigurations.getProxyHost() != null) {
httpConfig = HttpConfig.getDefaultHttpConfig();
httpConfig.withProxyHost(clientConfigurations.getProxyHost())
.withProxyPort(clientConfigurations.getProxyPort())
.setIgnoreSSLVerification(clientConfigurations.isIgnoreSslVerification());
if (clientConfigurations.getProxyUser() != null) {
httpConfig.withProxyUsername(clientConfigurations.getProxyUser());
httpConfig.withProxyPassword(clientConfigurations.getProxyPassword());
}
}
BasicCredentials credentials = new BasicCredentials()
.withAk(clientConfigurations.getAuthenticationkey())
.withSk(clientConfigurations.getSecretKey())
.withProjectId(clientConfigurations.getProjectId());
if (LOG.isDebugEnabled()) {
LOG.debug("Building Smn client");
}
// building smn client object
SmnClient smnClient = SmnClient.newBuilder()
.withCredential(credentials)
.withHttpConfig(httpConfig)
.withEndpoint(clientConfigurations.getServiceEndpoint())
.build();
if (LOG.isDebugEnabled()) {
LOG.debug("Successfully initialized Smn client");
}
return smnClient;
}
/**
* validation of all user inputs before attempting to invoke a service operation
*
* @param simpleNotificationEndpoint
* @param exchange
* @return
*/
private ClientConfigurations validateServiceConfigurations(
SimpleNotificationEndpoint simpleNotificationEndpoint, Exchange exchange) {
ClientConfigurations clientConfigurations = new ClientConfigurations();
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting exchange body");
}
// verifying if exchange has valid body content. this is mandatory for 'publish as text' operation
if (ObjectHelper.isEmpty(exchange.getMessage().getBody())) {
if (simpleNotificationEndpoint.getOperation().equals("publishAsTextMessage")) {
if (LOG.isErrorEnabled()) {
LOG.error("Found null/empty body. Cannot perform publish as text operation");
}
throw new IllegalArgumentException("exchange body cannot be null / empty");
}
}
// checking for mandatory field 'operation name'
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting operation name");
}
if (ObjectHelper.isEmpty(exchange.getProperty(SmnProperties.SMN_OPERATION))
&& ObjectHelper.isEmpty(simpleNotificationEndpoint.getOperation())) {
if (LOG.isErrorEnabled()) {
LOG.error("Found null/empty operation name. Cannot proceed with Smn operations");
}
throw new IllegalArgumentException("operation name not found");
} else {
clientConfigurations.setOperation(exchange.getProperty(SmnProperties.SMN_OPERATION) != null
? (String) exchange.getProperty(SmnProperties.SMN_OPERATION) : simpleNotificationEndpoint.getOperation());
}
// checking for mandatory field 'topic name'
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting topic name");
}
if (ObjectHelper.isEmpty(exchange.getProperty(SmnProperties.NOTIFICATION_TOPIC_NAME))) {
if (LOG.isErrorEnabled()) {
LOG.error("Found null/empty topic name");
}
throw new IllegalArgumentException("topic name not found");
} else {
clientConfigurations.setTopicUrn(String.format(SmnConstants.TOPIC_URN_FORMAT,
simpleNotificationEndpoint.getRegion(), simpleNotificationEndpoint.getProjectId(),
exchange.getProperty(SmnProperties.NOTIFICATION_TOPIC_NAME)));
}
// checking for optional field 'message subject'
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting notification subject value");
}
if (ObjectHelper.isEmpty(exchange.getProperty(SmnProperties.NOTIFICATION_SUBJECT))) {
if (LOG.isWarnEnabled()) {
LOG.warn("notification subject not found. defaulting to 'DEFAULT_SUBJECT'");
}
clientConfigurations.setSubject("DEFAULT_SUBJECT");
} else {
clientConfigurations.setSubject((String) exchange.getProperty(SmnProperties.NOTIFICATION_SUBJECT));
}
// checking for optional field 'message ttl'
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting TTL");
}
if (ObjectHelper.isEmpty(exchange.getProperty(SmnProperties.NOTIFICATION_TTL))) {
if (LOG.isWarnEnabled()) {
LOG.warn("TTL not found. defaulting to default value {}", simpleNotificationEndpoint.getMessageTtl());
}
clientConfigurations.setMessageTtl(simpleNotificationEndpoint.getMessageTtl());
} else {
clientConfigurations.setMessageTtl((int) exchange.getProperty(SmnProperties.NOTIFICATION_TTL));
}
return clientConfigurations;
}
}
| pmoerenhout/camel | components/camel-huaweicloud-smn/src/main/java/org/apache/camel/component/huaweicloud/smn/SimpleNotificationProducer.java | Java | apache-2.0 | 16,562 |
package com.zswxsqxt.wf.dao;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Repository;
import cn.org.rapid_framework.page.Page;
import com.opendata.common.base.BaseHibernateDao;
import com.zswxsqxt.wf.model.WfActivity;
import com.zswxsqxt.wf.model.WfProject;
import com.zswxsqxt.wf.query.WfActivityQuery;
/**
describe:流程节点表Dao
*/
@Repository
public class WfActivityDao extends BaseHibernateDao<WfActivity,String>
{
public Class getEntityClass()
{
return WfActivity.class;
}
/**
通过WfActivityQuery对象,查询流程节点表
*/
public Page findPage(WfActivityQuery query,int pageSize,int pageNum)
{
StringBuilder hql=new StringBuilder();
hql.append(" from WfActivity ett where 1=1");
List param=new ArrayList();
if(query!=null)
{
if(!StringUtils.isEmpty(query.getId()))
{
hql.append(" and ett.id=?");
param.add(query.getId());
}
if(!StringUtils.isEmpty(query.getName()))
{
hql.append(" and ett.name like ?");
param.add("%"+query.getName()+"%");
}
if(query.getOrderNum()!=null)
{
hql.append(" and ett.orderNum=?");
param.add(query.getOrderNum());
}
if(query.getActType()!=null)
{
hql.append(" and ett.actType=?");
param.add(query.getActType());
}
if(query.getActFlag()!=null)
{
hql.append(" and ett.actFlag=?");
param.add(query.getActFlag());
}
if(!StringUtils.isEmpty(query.getDescription()))
{
hql.append(" and ett.description=?");
param.add(query.getDescription());
}
if(!StringUtils.isEmpty(query.getUrl()))
{
hql.append(" and ett.url=?");
param.add(query.getUrl());
}
if(!StringUtils.isEmpty(query.getGroupFlag()))
{
hql.append(" and ett.groupFlag=?");
param.add(query.getGroupFlag());
}
if(!StringUtils.isEmpty(query.getExtFiled3()))
{
hql.append(" and ett.extFiled3=?");
param.add(query.getExtFiled3());
}
if(query.getTs()!=null)
{
hql.append(" and ett.ts=?");
param.add(query.getTs());
}
if(query.getWfProject()!=null)
{
hql.append(" and ett.wfProject.id=?");
param.add(query.getWfProject().getId());
}
if(query.getWfInstance()!=null)
{
hql.append(" and ett.wfInstance=?");
param.add(query.getWfInstance());
}
}
if(!StringUtils.isEmpty(query.getSortColumns())){
if(!query.getSortColumns().equals("ts")){
hql.append(" order by ett."+query.getSortColumns()+" , ett.ts desc ");
}else{
hql.append(" order by ett.orderNum asc ");
}
}else{
hql.append(" order by ett.orderNum asc ");
}
return super.findByHql(hql.toString(), pageSize, pageNum, param.toArray());
}
/**
* 根据流程id得到流程下所有节点,并按照节点顺序排序
* @param proId
* @return
*/
public List<WfActivity> getWfActivity(String proId){
String hql = "from WfActivity where wfProject.id = ? order by orderNum asc";
List<WfActivity> list = super.findFastByHql(hql, proId);
if(list.size()>0){
return list;
}else{
return null;
}
}
}
| TuWei1992/zswxsqxt | src/main/zswxsqxt/com/zswxsqxt/wf/dao/WfActivityDao.java | Java | apache-2.0 | 3,115 |
/**
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.plugin.maven;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.eclipse.aether.repository.RemoteRepository;
import org.wildfly.swarm.bootstrap.util.BootstrapProperties;
import org.wildfly.swarm.fractionlist.FractionList;
import org.wildfly.swarm.spi.api.SwarmProperties;
import org.wildfly.swarm.tools.ArtifactSpec;
import org.wildfly.swarm.tools.BuildTool;
import org.wildfly.swarm.tools.DependencyManager;
import org.wildfly.swarm.tools.FractionDescriptor;
import org.wildfly.swarm.tools.FractionUsageAnalyzer;
import org.wildfly.swarm.tools.exec.SwarmExecutor;
import org.wildfly.swarm.tools.exec.SwarmProcess;
/**
* @author Bob McWhirter
* @author Ken Finnigan
*/
@Mojo(name = "start",
requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME,
requiresDependencyCollection = ResolutionScope.COMPILE_PLUS_RUNTIME)
public class StartMojo extends AbstractSwarmMojo {
@Parameter(alias = "stdoutFile", property = "swarm.stdout")
public File stdoutFile;
@Parameter(alias = "stderrFile", property = "swarm.stderr" )
public File stderrFile;
@Parameter(alias = "useUberJar", defaultValue = "${wildfly-swarm.useUberJar}")
public boolean useUberJar;
@Parameter(alias = "debug", property = SwarmProperties.DEBUG_PORT)
public Integer debugPort;
@Parameter(alias = "jvmArguments", property = "swarm.jvmArguments")
public List<String> jvmArguments = new ArrayList<>();
@Parameter(alias = "arguments" )
public List<String> arguments = new ArrayList<>();
@Parameter(property = "swarm.arguments", defaultValue = "")
public String argumentsProp;
boolean waitForProcess;
@SuppressWarnings({"unchecked", "ThrowableResultOfMethodCallIgnored"})
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
initProperties(true);
initEnvironment();
final SwarmExecutor executor;
if (this.useUberJar) {
executor = uberJarExecutor();
} else if (this.project.getPackaging().equals("war")) {
executor = warExecutor();
} else if (this.project.getPackaging().equals("jar")) {
executor = jarExecutor();
} else {
throw new MojoExecutionException("Unsupported packaging: " + this.project.getPackaging());
}
executor.withJVMArguments( this.jvmArguments );
if ( this.argumentsProp != null ) {
StringTokenizer args = new StringTokenizer(this.argumentsProp);
while ( args.hasMoreTokens() ) {
this.arguments.add( args.nextToken() );
}
}
executor.withArguments( this.arguments );
final SwarmProcess process;
try {
process = executor.withDebug(debugPort)
.withProperties(this.properties)
.withStdoutFile(this.stdoutFile != null ? this.stdoutFile.toPath() : null)
.withStderrFile(this.stderrFile != null ? this.stderrFile.toPath() : null)
.withEnvironment(this.environment)
.withWorkingDirectory(this.project.getBasedir().toPath())
.withProperty("remote.maven.repo",
String.join(",",
this.project.getRemoteProjectRepositories().stream()
.map(RemoteRepository::getUrl)
.collect(Collectors.toList())))
.execute();
Runtime.getRuntime().addShutdownHook( new Thread(()->{
try {
// Sleeping for a few millis will give time to shutdown gracefully
Thread.sleep(100L);
process.stop( 10, TimeUnit.SECONDS );
} catch (InterruptedException e) {
}
}));
process.awaitReadiness(2, TimeUnit.MINUTES);
if (!process.isAlive()) {
throw new MojoFailureException("Process failed to start");
}
if (process.getError() != null) {
throw new MojoFailureException("Error starting process", process.getError());
}
} catch (IOException e) {
throw new MojoFailureException("unable to execute", e);
} catch (InterruptedException e) {
throw new MojoFailureException("Error waiting for deployment", e);
}
List<SwarmProcess> procs = (List<SwarmProcess>) getPluginContext().get("swarm-process");
if (procs == null) {
procs = new ArrayList<>();
getPluginContext().put("swarm-process", procs);
}
procs.add(process);
if (waitForProcess) {
try {
process.waitFor();
} catch (InterruptedException e) {
try {
process.stop( 10, TimeUnit.SECONDS );
} catch (InterruptedException ie) {
// Do nothing
}
} finally {
process.destroyForcibly();
}
}
}
protected SwarmExecutor uberJarExecutor() throws MojoFailureException {
getLog().info("Starting -swarm.jar");
String finalName = this.project.getBuild().getFinalName();
if (finalName.endsWith(".war") || finalName.endsWith(".jar")) {
finalName = finalName.substring(0, finalName.length() - 4);
}
return new SwarmExecutor()
.withExecutableJar(Paths.get(this.projectBuildDir, finalName + "-swarm.jar"));
}
protected SwarmExecutor warExecutor() throws MojoFailureException {
getLog().info("Starting .war");
String finalName = this.project.getBuild().getFinalName();
if (!finalName.endsWith(".war")) {
finalName = finalName + ".war";
}
return executor(Paths.get(this.projectBuildDir, finalName), finalName, false);
}
protected SwarmExecutor jarExecutor() throws MojoFailureException {
getLog().info("Starting .jar");
final String finalName = this.project.getBuild().getFinalName();
return executor(Paths.get(this.project.getBuild().getOutputDirectory()),
finalName.endsWith(".jar") ? finalName : finalName + ".jar",
true);
}
protected SwarmExecutor executor(final Path appPath, final String name,
final boolean scanDependencies) throws MojoFailureException {
final SwarmExecutor executor = new SwarmExecutor()
.withModules(expandModules())
.withProperty(BootstrapProperties.APP_NAME, name)
.withClassPathEntries(dependencies(appPath, scanDependencies));
if (this.mainClass != null) {
executor.withMainClass(this.mainClass);
} else {
executor.withDefaultMainClass();
}
return executor;
}
List<Path> findNeededFractions(final Set<Artifact> existingDeps,
final Path source,
final boolean scanDeps) throws MojoFailureException {
getLog().info("Scanning for needed WildFly Swarm fractions with mode: " + fractionDetectMode);
final Set<String> existingDepGASet = existingDeps.stream()
.map(d -> String.format("%s:%s", d.getGroupId(), d.getArtifactId()))
.collect(Collectors.toSet());
final Set<FractionDescriptor> fractions;
final FractionUsageAnalyzer analyzer = new FractionUsageAnalyzer(FractionList.get()).source(source);
if (scanDeps) {
existingDeps.forEach(d -> analyzer.source(d.getFile()));
}
final Predicate<FractionDescriptor> notExistingDep =
d -> !existingDepGASet.contains(String.format("%s:%s", d.getGroupId(), d.getArtifactId()));
try {
fractions = analyzer.detectNeededFractions().stream()
.filter(notExistingDep)
.collect(Collectors.toSet());
} catch (IOException e) {
throw new MojoFailureException("failed to scan for fractions", e);
}
getLog().info("Detected fractions: " + String.join(", ", fractions.stream()
.map(FractionDescriptor::av)
.sorted()
.collect(Collectors.toList())));
fractions.addAll(this.additionalFractions.stream()
.map(f -> FractionDescriptor.fromGav(FractionList.get(), f))
.collect(Collectors.toSet()));
final Set<FractionDescriptor> allFractions = new HashSet<>(fractions);
allFractions.addAll(fractions.stream()
.flatMap(f -> f.getDependencies().stream())
.filter(notExistingDep)
.collect(Collectors.toSet()));
getLog().info("Using fractions: " +
String.join(", ", allFractions.stream()
.map(FractionDescriptor::gavOrAv)
.sorted()
.collect(Collectors.toList())));
final Set<ArtifactSpec> specs = new HashSet<>();
specs.addAll(existingDeps.stream()
.map(this::artifactToArtifactSpec)
.collect(Collectors.toList()));
specs.addAll(allFractions.stream()
.map(FractionDescriptor::toArtifactSpec)
.collect(Collectors.toList()));
try {
return mavenArtifactResolvingHelper().resolveAll(specs).stream()
.map(s -> s.file.toPath())
.collect(Collectors.toList());
} catch (Exception e) {
throw new MojoFailureException("failed to resolve fraction dependencies", e);
}
}
List<Path> dependencies(final Path archiveContent,
final boolean scanDependencies) throws MojoFailureException {
final List<Path> elements = new ArrayList<>();
final Set<Artifact> artifacts = this.project.getArtifacts();
boolean hasSwarmDeps = false;
for (Artifact each : artifacts) {
if (each.getGroupId().equals(DependencyManager.WILDFLY_SWARM_GROUP_ID)
&& each.getArtifactId().equals(DependencyManager.WILDFLY_SWARM_BOOTSTRAP_ARTIFACT_ID)) {
hasSwarmDeps = true;
}
if (each.getGroupId().equals("org.jboss.logmanager")
&& each.getArtifactId().equals("jboss-logmanager")) {
continue;
}
if (each.getScope().equals("provided")) {
continue;
}
elements.add(each.getFile().toPath());
}
elements.add(Paths.get(this.project.getBuild().getOutputDirectory()));
if (fractionDetectMode != BuildTool.FractionDetectionMode.never) {
if (fractionDetectMode == BuildTool.FractionDetectionMode.force ||
!hasSwarmDeps) {
List<Path> fractionDeps = findNeededFractions(artifacts, archiveContent, scanDependencies);
for(Path p : fractionDeps) {
if(!elements.contains(p))
elements.add(p);
}
}
} else if (!hasSwarmDeps) {
getLog().warn("No WildFly Swarm dependencies found and fraction detection disabled");
}
return elements;
}
List<Path> expandModules() {
return this.additionalModules.stream()
.map(m -> Paths.get(this.project.getBuild().getOutputDirectory(), m))
.collect(Collectors.toList());
}
} | bobmcwhirter/wildfly-swarm | plugin/src/main/java/org/wildfly/swarm/plugin/maven/StartMojo.java | Java | apache-2.0 | 13,234 |
/*
* Copyright (C) 2007-2015 Peter Monks.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file is part of an unsupported extension to Alfresco.
*
*/
package org.alfresco.extension.bulkimport.source.fs;
import java.io.File;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.util.Pair;
import org.alfresco.extension.bulkimport.source.BulkImportSourceStatus;
import static org.alfresco.extension.bulkimport.util.LogUtils.*;
import static org.alfresco.extension.bulkimport.source.fs.FilesystemSourceUtils.*;
/**
* This interface defines a directory analyser. This is the process by which
* the contents of a source directory are grouped together into a list of
* <code>FilesystemBulkImportItem</code>s.
*
* @author Peter Monks (pmonks@gmail.com)
*/
public final class DirectoryAnalyser
{
private final static Log log = LogFactory.getLog(DirectoryAnalyser.class);
// Status counters
private final static String COUNTER_NAME_FILES_SCANNED = "Files scanned";
private final static String COUNTER_NAME_DIRECTORIES_SCANNED = "Directories scanned";
private final static String COUNTER_NAME_UNREADABLE_ENTRIES = "Unreadable entries";
private final static String[] COUNTER_NAMES = { COUNTER_NAME_FILES_SCANNED,
COUNTER_NAME_DIRECTORIES_SCANNED,
COUNTER_NAME_UNREADABLE_ENTRIES };
private final ServiceRegistry serviceRegistry;
private final ContentStore configuredContentStore;
private final MetadataLoader metadataLoader;
private BulkImportSourceStatus importStatus;
public DirectoryAnalyser(final ServiceRegistry serviceRegistry,
final ContentStore configuredContentStore,
final MetadataLoader metadataLoader)
{
// PRECONDITIONS
assert serviceRegistry != null : "serviceRegistry must not be null.";
assert configuredContentStore != null : "configuredContentStore must not be null.";
assert metadataLoader != null : "metadataLoader must not be null.";
assert importStatus != null : "importStatus must not be null.";
// Body
this.serviceRegistry = serviceRegistry;
this.configuredContentStore = configuredContentStore;
this.metadataLoader = metadataLoader;
}
public void init(final BulkImportSourceStatus importStatus)
{
this.importStatus = importStatus;
importStatus.preregisterSourceCounters(COUNTER_NAMES);
}
/**
* Analyses the given directory.
*
* @param sourceDirectory The source directory for the entire import (note: <u>must</u> be a directory) <i>(must not be null)</i>.
* @param directory The directory to analyse (note: <u>must</u> be a directory) <i>(must not be null)</i>.
* @return An <code>AnalysedDirectory</code> object <i>(will not be null)</i>.
* @throws InterruptedException If the thread executing the method is interrupted.
*/
public Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> analyseDirectory(final File sourceDirectory, final File directory)
throws InterruptedException
{
// PRECONDITIONS
if (sourceDirectory == null) throw new IllegalArgumentException("sourceDirectory cannot be null.");
if (directory == null) throw new IllegalArgumentException("directory cannot be null.");
// Body
if (debug(log)) debug(log, "Analysing directory " + getFileName(directory) + "...");
Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> result = null;
File[] directoryListing = null;
long analysisStart = 0L;
long analysisEnd = 0L;
long start = 0L;
long end = 0L;
String sourceRelativeParentDirectory = sourceDirectory.toPath().relativize(directory.toPath()).toString(); // Note: JDK 1.7 specific
// List the directory
start = System.nanoTime();
analysisStart = start;
directoryListing = directory.listFiles();
end = System.nanoTime();
if (trace(log)) trace(log, "List directory (" + directoryListing.length + " entries) took: " + (float)(end - start) / (1000 * 1000 * 1000) + "s.");
// Build up the list of items from the directory listing
start = System.nanoTime();
result = analyseDirectory(sourceRelativeParentDirectory, directoryListing);
end = System.nanoTime();
if (trace(log)) trace(log, "Convert directory listing to set of filesystem import items took: " + (float)(end - start) / (1000 * 1000 * 1000) + "s.");
analysisEnd = end;
if (debug(log)) debug(log, "Finished analysing directory " + getFileName(directory) + ", in " + (float)(analysisEnd - analysisStart) / (1000 * 1000 * 1000) + "s.");
return(result);
}
private Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> analyseDirectory(final String sourceRelativeParentDirectory, final File[] directoryListing)
{
Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> result = null;
if (directoryListing != null)
{
// This needs some Clojure, desperately...
Map<String, SortedMap<BigDecimal, Pair<File, File>>> categorisedFiles = categoriseFiles(directoryListing);
if (debug(log)) debug(log, "Categorised files: " + String.valueOf(categorisedFiles));
result = constructImportItems(sourceRelativeParentDirectory, categorisedFiles);
}
return(result);
}
private Map<String, SortedMap<BigDecimal, Pair<File, File>>> categoriseFiles(final File[] directoryListing)
{
Map<String, SortedMap<BigDecimal, Pair<File, File>>> result = null;
if (directoryListing != null)
{
result = new HashMap<String, SortedMap<BigDecimal, Pair<File, File>>>();
for (final File file : directoryListing)
{
categoriseFile(result, file);
}
}
return(result);
}
/*
* This method does the hard work of figuring out where the file belongs (which parent item, and where in that item's
* version history).
*/
private void categoriseFile(final Map<String, SortedMap<BigDecimal, Pair<File, File>>> categorisedFiles, final File file)
{
if (file != null)
{
if (file.canRead())
{
final String fileName = file.getName();
final String parentName = getParentName(metadataLoader, fileName);
final boolean isMetadata = isMetadataFile(metadataLoader, fileName);
final BigDecimal versionNumber = getVersionNumber(fileName);
SortedMap<BigDecimal, Pair<File, File>> versions = categorisedFiles.get(parentName);
// Find the item
if (versions == null)
{
versions = new TreeMap<BigDecimal, Pair<File, File>>();
categorisedFiles.put(parentName, versions);
}
// Find the version within the item
Pair<File, File> version = versions.get(versionNumber);
if (version == null)
{
version = new Pair<File, File>(null, null);
}
// Categorise the incoming file in that version of the item
if (isMetadata)
{
version = new Pair<File, File>(version.getFirst(), file);
}
else
{
version = new Pair<File, File>(file, version.getSecond());
}
versions.put(versionNumber, version);
if (file.isDirectory())
{
importStatus.incrementSourceCounter(COUNTER_NAME_DIRECTORIES_SCANNED);
}
else
{
importStatus.incrementSourceCounter(COUNTER_NAME_FILES_SCANNED);
}
}
else
{
if (warn(log)) warn(log, "Skipping '" + getFileName(file) + "' as Alfresco does not have permission to read it.");
importStatus.incrementSourceCounter(COUNTER_NAME_UNREADABLE_ENTRIES);
}
}
}
private Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> constructImportItems(final String sourceRelativeParentDirectory,
final Map<String, SortedMap<BigDecimal,Pair<File,File>>> categorisedFiles)
{
Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> result = null;
if (categorisedFiles != null)
{
final List<FilesystemBulkImportItem> directoryItems = new ArrayList<FilesystemBulkImportItem>();
final List<FilesystemBulkImportItem> fileItems = new ArrayList<FilesystemBulkImportItem>();
result = new Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>>(directoryItems, fileItems);
for (final String parentName : categorisedFiles.keySet())
{
final SortedMap<BigDecimal,Pair<File,File>> itemVersions = categorisedFiles.get(parentName);
final NavigableSet<FilesystemBulkImportItemVersion> versions = constructImportItemVersions(itemVersions);
final boolean isDirectory = versions.last().isDirectory();
final FilesystemBulkImportItem item = new FilesystemBulkImportItem(parentName,
isDirectory,
sourceRelativeParentDirectory,
versions);
if (isDirectory)
{
directoryItems.add(item);
}
else
{
fileItems.add(item);
}
}
}
return(result);
}
private final NavigableSet<FilesystemBulkImportItemVersion> constructImportItemVersions(final SortedMap<BigDecimal,Pair<File,File>> itemVersions)
{
// PRECONDITIONS
if (itemVersions == null) throw new IllegalArgumentException("itemVersions cannot be null.");
if (itemVersions.size() <= 0) throw new IllegalArgumentException("itemVersions cannot be empty.");
// Body
final NavigableSet<FilesystemBulkImportItemVersion> result = new TreeSet<FilesystemBulkImportItemVersion>();
for (final BigDecimal versionNumber : itemVersions.keySet())
{
final Pair<File,File> contentAndMetadataFiles = itemVersions.get(versionNumber);
final FilesystemBulkImportItemVersion version = new FilesystemBulkImportItemVersion(serviceRegistry,
configuredContentStore,
metadataLoader,
versionNumber,
contentAndMetadataFiles.getFirst(),
contentAndMetadataFiles.getSecond());
result.add(version);
}
return(result);
}
}
| aureg/alfresco-bulk-import | amp/src/main/java/org/alfresco/extension/bulkimport/source/fs/DirectoryAnalyser.java | Java | apache-2.0 | 13,939 |
/*
* Copyright 2015-2018 Jeeva Kandasamy (jkandasa@gmail.com)
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mycontroller.standalone.api.jaxrs.mixins.deserializers;
import java.io.IOException;
import org.mycontroller.standalone.timer.TimerUtils.FREQUENCY_TYPE;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
/**
* @author Jeeva Kandasamy (jkandasa)
* @since 0.0.2
*/
public class FrequencyTypeDeserializer extends JsonDeserializer<FREQUENCY_TYPE> {
@Override
public FREQUENCY_TYPE deserialize(JsonParser parser, DeserializationContext context)
throws IOException, JsonProcessingException {
final String nodeType = parser.getText();
if (nodeType != null) {
return FREQUENCY_TYPE.fromString(nodeType);
} else {
return null;
}
}
}
| pgh70/mycontroller | modules/core/src/main/java/org/mycontroller/standalone/api/jaxrs/mixins/deserializers/FrequencyTypeDeserializer.java | Java | apache-2.0 | 1,580 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.mskcc.shenkers.data.interval;
import htsjdk.tribble.Feature;
import htsjdk.tribble.annotation.Strand;
import htsjdk.tribble.bed.FullBEDFeature;
import java.awt.Color;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
*
* @author sol
*/
public interface IntervalFeature<T> extends Feature {
Strand getStrand();
T getValue();
}
| shenkers/CrossBrowse | src/main/java/org/mskcc/shenkers/data/interval/IntervalFeature.java | Java | apache-2.0 | 585 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.console.configcreator;
import java.io.File;
import javax.enterprise.deploy.shared.factories.DeploymentFactoryManager;
import javax.enterprise.deploy.spi.DeploymentManager;
import javax.enterprise.deploy.spi.Target;
import javax.enterprise.deploy.spi.status.ProgressObject;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.portlet.PortletException;
import javax.portlet.PortletRequest;
import org.apache.geronimo.deployment.plugin.jmx.CommandContext;
import org.apache.geronimo.deployment.plugin.jmx.JMXDeploymentManager;
import org.apache.geronimo.deployment.plugin.local.DistributeCommand;
import org.apache.geronimo.j2ee.deployment.ApplicationInfo;
import org.apache.geronimo.j2ee.deployment.EARConfigBuilder;
import org.apache.geronimo.kernel.Kernel;
import org.apache.geronimo.kernel.KernelRegistry;
/**
* Util class for JSR-88 related functions
*
* @version $Rev$ $Date$
*/
public class JSR88_Util {
/*private static List getEjbClassLoaders(PortletRequest request) {
List deployedEjbs = JSR77_Util.getDeployedEJBs(request);
List configurations = new ArrayList();
for (int i = 0; i < deployedEjbs.size(); i++) {
String ejbPatternName = ((ReferredData) deployedEjbs.get(i)).getPatternName();
configurations.add(getDependencyString(ejbPatternName));
}
return getConfigClassLoaders(configurations);
}
private static List getConfigClassLoaders(List configurationNames) {
List classLoaders = new ArrayList();
ConfigurationManager configurationManager = PortletManager.getConfigurationManager();
for (int i = 0; i < configurationNames.size(); i++) {
Artifact configurationId = Artifact.create((String) configurationNames.get(i));
classLoaders.add(configurationManager.getConfiguration(configurationId).getConfigurationClassLoader());
}
return classLoaders;
}*/
public static ApplicationInfo createApplicationInfo(PortletRequest actionRequest, File moduleFile) {
ApplicationInfo applicationInfo = null;
EARConfigBuilder.createPlanMode.set(Boolean.TRUE);
try {
DeploymentFactoryManager dfm = DeploymentFactoryManager.getInstance();
DeploymentManager mgr = dfm.getDeploymentManager("deployer:geronimo:inVM", null, null);
if (mgr instanceof JMXDeploymentManager) {
((JMXDeploymentManager) mgr).setLogConfiguration(false, true);
}
Target[] targets = mgr.getTargets();
if (null == targets) {
throw new IllegalStateException("No target to distribute to");
}
targets = new Target[] { targets[0] };
DistributeCommand command = new DistributeCommand(getKernel(), targets, moduleFile, null);
CommandContext commandContext = new CommandContext(true, true, null, null, false);
commandContext.setUsername("system");
commandContext.setPassword("manager");
command.setCommandContext(commandContext);
command.doDeploy(targets[0], true);
} catch (Exception e) {
// Any better ideas?
if(EARConfigBuilder.appInfo.get() == null) throw new RuntimeException(e);
} finally {
EARConfigBuilder.createPlanMode.set(Boolean.FALSE);
applicationInfo = EARConfigBuilder.appInfo.get();
EARConfigBuilder.appInfo.set(null);
}
return applicationInfo;
}
private static Kernel getKernel() {
// todo: consider making this configurable; we could easily connect to a remote kernel if we wanted to
Kernel kernel = null;
try {
kernel = (Kernel) new InitialContext().lookup("java:comp/GeronimoKernel");
} catch (NamingException e) {
// log.error("Unable to look up kernel in JNDI", e);
}
if (kernel == null) {
// log.debug("Unable to find kernel in JNDI; using KernelRegistry instead");
kernel = KernelRegistry.getSingleKernel();
}
return kernel;
}
public static String[] deploy(PortletRequest actionRequest, File moduleFile, File planFile)
throws PortletException {
// TODO this is a duplicate of the code from
// org.apache.geronimo.console.configmanager.DeploymentPortlet.processAction()
// TODO need to eliminate this duplicate code
DeploymentFactoryManager dfm = DeploymentFactoryManager.getInstance();
String[] statusMsgs = new String[2];
try {
DeploymentManager mgr = dfm.getDeploymentManager("deployer:geronimo:inVM", null, null);
try {
if (mgr instanceof JMXDeploymentManager) {
((JMXDeploymentManager) mgr).setLogConfiguration(false, true);
}
Target[] targets = mgr.getTargets();
if (null == targets) {
throw new IllegalStateException("No target to distribute to");
}
targets = new Target[] { targets[0] };
ProgressObject progress = mgr.distribute(targets, moduleFile, planFile);
while (progress.getDeploymentStatus().isRunning()) {
Thread.sleep(100);
}
if (progress.getDeploymentStatus().isCompleted()) {
progress = mgr.start(progress.getResultTargetModuleIDs());
while (progress.getDeploymentStatus().isRunning()) {
Thread.sleep(100);
}
statusMsgs[0] = "infoMsg01";
} else {
statusMsgs[0] = "errorMsg02";
statusMsgs[1] = progress.getDeploymentStatus().getMessage();
}
} finally {
mgr.release();
}
} catch (Exception e) {
throw new PortletException(e);
}
return statusMsgs;
}
}
| apache/geronimo | plugins/plancreator/plancreator-portlets/src/main/java/org/apache/geronimo/console/configcreator/JSR88_Util.java | Java | apache-2.0 | 6,904 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.widgets.containers;
import com.eas.core.XElement;
import com.google.gwt.dom.client.Style;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.RequiresResize;
import com.google.gwt.user.client.ui.Widget;
/**
*
* @author mg
*/
public class FlowGapPanel extends FlowPanel implements RequiresResize {
protected int hgap;
protected int vgap;
public FlowGapPanel() {
super();
getElement().<XElement>cast().addResizingTransitionEnd(this);
getElement().getStyle().setLineHeight(0, Style.Unit.PX);
}
public int getHgap() {
return hgap;
}
public void setHgap(int aValue) {
hgap = aValue;
for (int i = 0; i < getWidgetCount(); i++) {
Widget w = getWidget(i);
w.getElement().getStyle().setMarginLeft(hgap, Style.Unit.PX);
}
}
public int getVgap() {
return vgap;
}
public void setVgap(int aValue) {
vgap = aValue;
for (int i = 0; i < getWidgetCount(); i++) {
Widget w = getWidget(i);
w.getElement().getStyle().setMarginTop(vgap, Style.Unit.PX);
}
}
@Override
public void add(Widget w) {
w.getElement().getStyle().setMarginLeft(hgap, Style.Unit.PX);
w.getElement().getStyle().setMarginTop(vgap, Style.Unit.PX);
w.getElement().getStyle().setDisplay(Style.Display.INLINE_BLOCK);
w.getElement().getStyle().setVerticalAlign(Style.VerticalAlign.BOTTOM);
super.add(w);
}
@Override
public void onResize() {
// reserved for future use.
}
}
| jskonst/PlatypusJS | web-client/src/platypus/src/com/eas/widgets/containers/FlowGapPanel.java | Java | apache-2.0 | 1,696 |
package org.cohorte.herald.core.utils;
import java.util.Iterator;
import org.cohorte.herald.Message;
import org.cohorte.herald.MessageReceived;
import org.jabsorb.ng.JSONSerializer;
import org.jabsorb.ng.serializer.MarshallException;
import org.jabsorb.ng.serializer.UnmarshallException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class MessageUtils {
/** The Jabsorb serializer */
private static JSONSerializer pSerializer = new JSONSerializer();
static {
try {
pSerializer.registerDefaultSerializers();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static String toJSON(Message aMsg) throws MarshallException {
JSONObject json = new JSONObject();
try {
// headers
JSONObject headers = new JSONObject();
for (String key : aMsg.getHeaders().keySet()) {
headers.put(key, aMsg.getHeaders().get(key));
}
json.put(Message.MESSAGE_HEADERS, headers);
// subject
json.put(Message.MESSAGE_SUBJECT, aMsg.getSubject());
// content
if (aMsg.getContent() != null) {
if (aMsg.getContent() instanceof String) {
json.put(Message.MESSAGE_CONTENT, aMsg.getContent());
} else {
JSONObject content = new JSONObject(pSerializer.toJSON(aMsg.getContent()));
json.put(Message.MESSAGE_CONTENT, content);
}
}
// metadata
JSONObject metadata = new JSONObject();
for (String key : aMsg.getMetadata().keySet()) {
metadata.put(key, aMsg.getMetadata().get(key));
}
json.put(Message.MESSAGE_METADATA, metadata);
} catch (JSONException e) {
e.printStackTrace();
return null;
}
return json.toString();
}
@SuppressWarnings("unchecked")
public static MessageReceived fromJSON(String json) throws UnmarshallException {
try {
JSONObject wParsedMsg = new JSONObject(json);
{
try {
// check if valid herald message (respects herald specification version)
int heraldVersion = -1;
JSONObject jHeader = wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS);
if (jHeader != null) {
if (jHeader.has(Message.MESSAGE_HERALD_VERSION)) {
heraldVersion = jHeader.getInt(Message.MESSAGE_HERALD_VERSION);
}
}
if (heraldVersion != Message.HERALD_SPECIFICATION_VERSION) {
throw new JSONException("Herald specification of the received message is not supported!");
}
MessageReceived wMsg = new MessageReceived(
wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS).getString(Message.MESSAGE_HEADER_UID),
wParsedMsg.getString(Message.MESSAGE_SUBJECT),
null,
null,
null,
null,
null,
null);
// content
Object cont = wParsedMsg.opt(Message.MESSAGE_CONTENT);
if (cont != null) {
if (cont instanceof JSONObject || cont instanceof JSONArray) {
wMsg.setContent(pSerializer.fromJSON(cont.toString()));
} else
wMsg.setContent(cont);
} else {
wMsg.setContent(null);
}
// headers
Iterator<String> wKeys;
if (wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS) != null) {
wKeys = wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS).keys();
while(wKeys.hasNext()) {
String key = wKeys.next();
wMsg.addHeader(key, wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS).get(key));
}
}
// metadata
Iterator<String> wKeys2;
if (wParsedMsg.getJSONObject(Message.MESSAGE_METADATA) != null) {
wKeys2 = wParsedMsg.getJSONObject(Message.MESSAGE_METADATA).keys();
while(wKeys2.hasNext()) {
String key = wKeys2.next();
wMsg.addMetadata(key, wParsedMsg.getJSONObject(Message.MESSAGE_METADATA).get(key));
}
}
return wMsg;
} catch (JSONException e) {
e.printStackTrace();
return null;
}
}
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
}
| isandlaTech/cohorte-herald | java/org.cohorte.herald.core/src/org/cohorte/herald/core/utils/MessageUtils.java | Java | apache-2.0 | 4,164 |
/*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cache.annotation;
import java.util.Collection;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.CacheManager;
import org.springframework.cache.interceptor.KeyGenerator;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.ImportAware;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.type.AnnotationMetadata;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
/**
* Abstract base {@code @Configuration} class providing common structure for enabling
* Spring's annotation-driven cache management capability.
*
* @author Chris Beams
* @since 3.1
* @see EnableCaching
*/
@Configuration
public abstract class AbstractCachingConfiguration implements ImportAware {
protected AnnotationAttributes enableCaching;
protected CacheManager cacheManager;
protected KeyGenerator keyGenerator;
@Autowired(required=false)
private Collection<CacheManager> cacheManagerBeans;
@Autowired(required=false)
private Collection<CachingConfigurer> cachingConfigurers;
@Override
public void setImportMetadata(AnnotationMetadata importMetadata) {
this.enableCaching = AnnotationAttributes.fromMap(
importMetadata.getAnnotationAttributes(EnableCaching.class.getName(), false));
Assert.notNull(this.enableCaching,
"@EnableCaching is not present on importing class " +
importMetadata.getClassName());
}
/**
* Determine which {@code CacheManager} bean to use. Prefer the result of
* {@link CachingConfigurer#cacheManager()} over any by-type matching. If none, fall
* back to by-type matching on {@code CacheManager}.
* @throws IllegalArgumentException if no CacheManager can be found; if more than one
* CachingConfigurer implementation exists; if multiple CacheManager beans and no
* CachingConfigurer exists to disambiguate.
*/
@PostConstruct
protected void reconcileCacheManager() {
if (!CollectionUtils.isEmpty(cachingConfigurers)) {
int nConfigurers = cachingConfigurers.size();
if (nConfigurers > 1) {
throw new IllegalStateException(nConfigurers + " implementations of " +
"CachingConfigurer were found when only 1 was expected. " +
"Refactor the configuration such that CachingConfigurer is " +
"implemented only once or not at all.");
}
CachingConfigurer cachingConfigurer = cachingConfigurers.iterator().next();
this.cacheManager = cachingConfigurer.cacheManager();
this.keyGenerator = cachingConfigurer.keyGenerator();
}
else if (!CollectionUtils.isEmpty(cacheManagerBeans)) {
int nManagers = cacheManagerBeans.size();
if (nManagers > 1) {
throw new IllegalStateException(nManagers + " beans of type CacheManager " +
"were found when only 1 was expected. Remove all but one of the " +
"CacheManager bean definitions, or implement CachingConfigurer " +
"to make explicit which CacheManager should be used for " +
"annotation-driven cache management.");
}
CacheManager cacheManager = cacheManagerBeans.iterator().next();
this.cacheManager = cacheManager;
// keyGenerator remains null; will fall back to default within CacheInterceptor
}
else {
throw new IllegalStateException("No bean of type CacheManager could be found. " +
"Register a CacheManager bean or remove the @EnableCaching annotation " +
"from your configuration.");
}
}
}
| sunpy1106/SpringBeanLifeCycle | src/main/java/org/springframework/cache/annotation/AbstractCachingConfiguration.java | Java | apache-2.0 | 4,164 |
package org.apache.cocoon.transformation;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipEntry;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.cocoon.ProcessingException;
import org.apache.cocoon.environment.SourceResolver;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
/**
* This transformer downloads a new file to disk.
* <p>
* It triggers for elements in the namespace "http://apache.org/cocoon/download/1.0".
* Attributes:
* @src : the file that should be downloaded
* @target (optional): path where the file should be stored (includes filename)
* @target-dir (optional): directory where the file should be stored
* @unzip (optional): if "true" then unzip file after downloading.
* If there is no @target or @target-dir attribute a temporary file is created.
* <p>
* Example XML input:
* <pre>
* {@code
* <download:download src="http://some.server.com/zipfile.zip"
* target="/tmp/zipfile.zip" unzip="true"/>
* }
* </pre>
* The @src specifies the file that should be downloaded. The
* @target specifies where the file should be stored. @unzip is true, so the
* file will be unzipped immediately.
* <p>
* The result is
* <pre>
* {@code
* <download:result unzipped="/path/to/unzipped/file/on/disk">/path/to/file/on/disk</download:result>
* }
* </pre>
* (@unzipped is only present when @unzip="true") or
* <pre>
* {@code
* <download:error>The error message</download:file>
* }
* </pre>
* if an error (other than a HTTP error) occurs.
* HTTP errors are thrown.
* Define this transformer in the sitemap:
* <pre>
* {@code
* <map:components>
* <map:transformers>
* <map:transformer name="download" logger="sitemap.transformer.download"
* src="org.apache.cocoon.transformation.DownloadTransformer"/>
* ...
* }
* </pre>
* Use this transformer:
* <pre>
* {@code
* <map:transform type="download"/>
* }
* </pre>
*
*
* @author <a href="mailto:maarten.kroon@koop.overheid.nl">Maarten Kroon</a>
* @author <a href="mailto:hhv@x-scale.nl">Huib Verweij</a>
*/
public class DownloadTransformer extends AbstractSAXTransformer {
public static final String DOWNLOAD_NS = "http://apache.org/cocoon/download/1.0";
public static final String DOWNLOAD_ELEMENT = "download";
private static final String DOWNLOAD_PREFIX = "download";
public static final String RESULT_ELEMENT = "result";
public static final String ERROR_ELEMENT = "error";
public static final String SRC_ATTRIBUTE = "src";
public static final String TARGET_ATTRIBUTE = "target";
public static final String TARGETDIR_ATTRIBUTE = "target-dir";
public static final String UNZIP_ATTRIBUTE = "unzip";
public static final String RECURSIVE_UNZIP_ATTRIBUTE = "recursive-unzip";
public static final String UNZIPPED_ATTRIBUTE = "unzipped";
public DownloadTransformer() {
this.defaultNamespaceURI = DOWNLOAD_NS;
}
@Override
public void setup(SourceResolver resolver, Map objectModel, String src,
Parameters params) throws ProcessingException, SAXException, IOException {
super.setup(resolver, objectModel, src, params);
}
@Override
public void startTransformingElement(String uri, String localName,
String qName, Attributes attributes) throws SAXException, ProcessingException, IOException {
if (DOWNLOAD_NS.equals(uri) && DOWNLOAD_ELEMENT.equals(localName)) {
try {
File[] downloadResult = download(
attributes.getValue(SRC_ATTRIBUTE),
attributes.getValue(TARGETDIR_ATTRIBUTE),
attributes.getValue(TARGET_ATTRIBUTE),
attributes.getValue(UNZIP_ATTRIBUTE),
attributes.getValue(RECURSIVE_UNZIP_ATTRIBUTE)
);
File downloadedFile = downloadResult[0];
File unzipDir = downloadResult[1];
String absPath = downloadedFile.getCanonicalPath();
AttributesImpl attrsImpl = new AttributesImpl();
if (unzipDir != null) {
attrsImpl.addAttribute("", UNZIPPED_ATTRIBUTE, UNZIPPED_ATTRIBUTE, "CDATA", unzipDir.getAbsolutePath());
}
xmlConsumer.startElement(uri, RESULT_ELEMENT, String.format("%s:%s", DOWNLOAD_PREFIX, RESULT_ELEMENT), attrsImpl);
xmlConsumer.characters(absPath.toCharArray(), 0, absPath.length());
xmlConsumer.endElement(uri, RESULT_ELEMENT, String.format("%s:%s", DOWNLOAD_PREFIX, RESULT_ELEMENT));
} catch (Exception e) {
// throw new SAXException("Error downloading file", e);
xmlConsumer.startElement(uri, ERROR_ELEMENT, qName, attributes);
String message = e.getMessage();
xmlConsumer.characters(message.toCharArray(), 0, message.length());
xmlConsumer.endElement(uri, ERROR_ELEMENT, qName);
}
} else {
super.startTransformingElement(uri, localName, qName, attributes);
}
}
@Override
public void endTransformingElement(String uri, String localName, String qName)
throws SAXException, ProcessingException, IOException {
if (DOWNLOAD_NS.equals(namespaceURI) && DOWNLOAD_ELEMENT.equals(localName)) {
return;
}
super.endTransformingElement(uri, localName, qName);
}
private File[] download(String sourceUri, String targetDir, String target, String unzip, String recursiveUnzip)
throws ProcessingException, IOException, SAXException {
File targetFile;
File unZipped = null;
if (null != target && !target.equals("")) {
targetFile = new File(target);
} else if (null != targetDir && !targetDir.equals("")) {
targetFile = new File(targetDir);
} else {
String baseName = FilenameUtils.getBaseName(sourceUri);
String extension = FilenameUtils.getExtension(sourceUri);
targetFile = File.createTempFile(baseName, "." + extension);
}
if (!targetFile.getParentFile().exists()) {
targetFile.getParentFile().mkdirs();
}
boolean unzipFile = (null != unzip && unzip.equals("true")) ||
(null != recursiveUnzip && recursiveUnzip.equals("true"));
String absPath = targetFile.getAbsolutePath();
String unzipDir = unzipFile ? FilenameUtils.removeExtension(absPath) : "";
HttpClient httpClient = new HttpClient();
httpClient.setConnectionTimeout(60000);
httpClient.setTimeout(60000);
if (System.getProperty("http.proxyHost") != null) {
// getLogger().warn("PROXY: "+System.getProperty("http.proxyHost"));
String nonProxyHostsRE = System.getProperty("http.nonProxyHosts", "");
if (nonProxyHostsRE.length() > 0) {
String[] pHosts = nonProxyHostsRE.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*").split("\\|");
nonProxyHostsRE = "";
for (String pHost : pHosts) {
nonProxyHostsRE += "|(^https?://" + pHost + ".*$)";
}
nonProxyHostsRE = nonProxyHostsRE.substring(1);
}
if (nonProxyHostsRE.length() == 0 || !sourceUri.matches(nonProxyHostsRE)) {
try {
HostConfiguration hostConfiguration = httpClient.getHostConfiguration();
hostConfiguration.setProxy(System.getProperty("http.proxyHost"), Integer.parseInt(System.getProperty("http.proxyPort", "80")));
httpClient.setHostConfiguration(hostConfiguration);
} catch (Exception e) {
throw new ProcessingException("Cannot set proxy!", e);
}
}
}
HttpMethod httpMethod = new GetMethod(sourceUri);
try {
int responseCode = httpClient.executeMethod(httpMethod);
if (responseCode < 200 || responseCode >= 300) {
throw new ProcessingException(String.format("Received HTTP status code %d (%s)", responseCode, httpMethod.getStatusText()));
}
OutputStream os = new BufferedOutputStream(new FileOutputStream(targetFile));
try {
IOUtils.copyLarge(httpMethod.getResponseBodyAsStream(), os);
} finally {
os.close();
}
} finally {
httpMethod.releaseConnection();
}
if (!"".equals(unzipDir)) {
unZipped = unZipIt(targetFile, unzipDir, recursiveUnzip);
}
return new File[] {targetFile, unZipped};
}
/**
* Unzip it
* @param zipFile input zip file
* @param outputFolder zip file output folder
*/
private File unZipIt(File zipFile, String outputFolder, String recursiveUnzip){
byte[] buffer = new byte[4096];
File folder = null;
try{
//create output directory is not exists
folder = new File(outputFolder);
if (!folder.exists()){
folder.mkdir();
}
//get the zipped file list entry
try (
//get the zip file content
ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFile))) {
//get the zipped file list entry
ZipEntry ze = zis.getNextEntry();
while(ze != null){
String fileName = ze.getName();
File newFile = new File(outputFolder + File.separator + fileName);
// System.out.println("file unzip : "+ newFile.getAbsoluteFile());
// create all non existing folders
// else you will hit FileNotFoundException for compressed folder
new File(newFile.getParent()).mkdirs();
try (FileOutputStream fos = new FileOutputStream(newFile)) {
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
}
if ((null != recursiveUnzip && "true".equals(recursiveUnzip)) && FilenameUtils.getExtension(fileName).equals("zip")) {
unZipIt(newFile, FilenameUtils.concat(outputFolder, FilenameUtils.getBaseName(fileName)), recursiveUnzip);
}
ze = zis.getNextEntry();
}
zis.closeEntry();
}
// System.out.println("Done unzipping.");
} catch(IOException ex){
ex.printStackTrace();
}
return folder;
}
}
| nverwer/cocooncomponents | src/org/apache/cocoon/transformation/DownloadTransformer.java | Java | apache-2.0 | 11,508 |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.testing.sync;
import java.io.IOException;
import org.apache.hc.client5.http.HttpRoute;
import org.apache.hc.client5.http.UserTokenHandler;
import org.apache.hc.client5.http.classic.methods.HttpGet;
import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
import org.apache.hc.client5.http.protocol.HttpClientContext;
import org.apache.hc.core5.http.ClassicHttpRequest;
import org.apache.hc.core5.http.ClassicHttpResponse;
import org.apache.hc.core5.http.EndpointDetails;
import org.apache.hc.core5.http.HttpException;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.http.HttpStatus;
import org.apache.hc.core5.http.io.HttpRequestHandler;
import org.apache.hc.core5.http.io.entity.EntityUtils;
import org.apache.hc.core5.http.io.entity.StringEntity;
import org.apache.hc.core5.http.protocol.BasicHttpContext;
import org.apache.hc.core5.http.protocol.HttpContext;
import org.junit.Assert;
import org.junit.Test;
/**
* Test cases for state-ful connections.
*/
public class TestStatefulConnManagement extends LocalServerTestBase {
private static class SimpleService implements HttpRequestHandler {
public SimpleService() {
super();
}
@Override
public void handle(
final ClassicHttpRequest request,
final ClassicHttpResponse response,
final HttpContext context) throws HttpException, IOException {
response.setCode(HttpStatus.SC_OK);
final StringEntity entity = new StringEntity("Whatever");
response.setEntity(entity);
}
}
@Test
public void testStatefulConnections() throws Exception {
final int workerCount = 5;
final int requestCount = 5;
this.server.registerHandler("*", new SimpleService());
this.connManager.setMaxTotal(workerCount);
this.connManager.setDefaultMaxPerRoute(workerCount);
final UserTokenHandler userTokenHandler = new UserTokenHandler() {
@Override
public Object getUserToken(final HttpRoute route, final HttpContext context) {
final String id = (String) context.getAttribute("user");
return id;
}
};
this.clientBuilder.setUserTokenHandler(userTokenHandler);
final HttpHost target = start();
final HttpClientContext[] contexts = new HttpClientContext[workerCount];
final HttpWorker[] workers = new HttpWorker[workerCount];
for (int i = 0; i < contexts.length; i++) {
final HttpClientContext context = HttpClientContext.create();
contexts[i] = context;
workers[i] = new HttpWorker(
"user" + i,
context, requestCount, target, this.httpclient);
}
for (final HttpWorker worker : workers) {
worker.start();
}
for (final HttpWorker worker : workers) {
worker.join(LONG_TIMEOUT.toMillis());
}
for (final HttpWorker worker : workers) {
final Exception ex = worker.getException();
if (ex != null) {
throw ex;
}
Assert.assertEquals(requestCount, worker.getCount());
}
for (final HttpContext context : contexts) {
final String state0 = (String) context.getAttribute("r0");
Assert.assertNotNull(state0);
for (int r = 1; r < requestCount; r++) {
Assert.assertEquals(state0, context.getAttribute("r" + r));
}
}
}
static class HttpWorker extends Thread {
private final String uid;
private final HttpClientContext context;
private final int requestCount;
private final HttpHost target;
private final CloseableHttpClient httpclient;
private volatile Exception exception;
private volatile int count;
public HttpWorker(
final String uid,
final HttpClientContext context,
final int requestCount,
final HttpHost target,
final CloseableHttpClient httpclient) {
super();
this.uid = uid;
this.context = context;
this.requestCount = requestCount;
this.target = target;
this.httpclient = httpclient;
this.count = 0;
}
public int getCount() {
return this.count;
}
public Exception getException() {
return this.exception;
}
@Override
public void run() {
try {
this.context.setAttribute("user", this.uid);
for (int r = 0; r < this.requestCount; r++) {
final HttpGet httpget = new HttpGet("/");
final ClassicHttpResponse response = this.httpclient.execute(
this.target,
httpget,
this.context);
this.count++;
final EndpointDetails endpointDetails = this.context.getEndpointDetails();
final String connuid = Integer.toHexString(System.identityHashCode(endpointDetails));
this.context.setAttribute("r" + r, connuid);
EntityUtils.consume(response.getEntity());
}
} catch (final Exception ex) {
this.exception = ex;
}
}
}
@Test
public void testRouteSpecificPoolRecylcing() throws Exception {
// This tests what happens when a maxed connection pool needs
// to kill the last idle connection to a route to build a new
// one to the same route.
final int maxConn = 2;
this.server.registerHandler("*", new SimpleService());
this.connManager.setMaxTotal(maxConn);
this.connManager.setDefaultMaxPerRoute(maxConn);
final UserTokenHandler userTokenHandler = new UserTokenHandler() {
@Override
public Object getUserToken(final HttpRoute route, final HttpContext context) {
return context.getAttribute("user");
}
};
this.clientBuilder.setUserTokenHandler(userTokenHandler);
final HttpHost target = start();
// Bottom of the pool : a *keep alive* connection to Route 1.
final HttpContext context1 = new BasicHttpContext();
context1.setAttribute("user", "stuff");
final ClassicHttpResponse response1 = this.httpclient.execute(
target, new HttpGet("/"), context1);
EntityUtils.consume(response1.getEntity());
// The ConnPoolByRoute now has 1 free connection, out of 2 max
// The ConnPoolByRoute has one RouteSpcfcPool, that has one free connection
// for [localhost][stuff]
Thread.sleep(100);
// Send a very simple HTTP get (it MUST be simple, no auth, no proxy, no 302, no 401, ...)
// Send it to another route. Must be a keepalive.
final HttpContext context2 = new BasicHttpContext();
final ClassicHttpResponse response2 = this.httpclient.execute(
new HttpHost("127.0.0.1", this.server.getPort()), new HttpGet("/"), context2);
EntityUtils.consume(response2.getEntity());
// ConnPoolByRoute now has 2 free connexions, out of its 2 max.
// The [localhost][stuff] RouteSpcfcPool is the same as earlier
// And there is a [127.0.0.1][null] pool with 1 free connection
Thread.sleep(100);
// This will put the ConnPoolByRoute to the targeted state :
// [localhost][stuff] will not get reused because this call is [localhost][null]
// So the ConnPoolByRoute will need to kill one connection (it is maxed out globally).
// The killed conn is the oldest, which means the first HTTPGet ([localhost][stuff]).
// When this happens, the RouteSpecificPool becomes empty.
final HttpContext context3 = new BasicHttpContext();
final ClassicHttpResponse response3 = this.httpclient.execute(
target, new HttpGet("/"), context3);
// If the ConnPoolByRoute did not behave coherently with the RouteSpecificPool
// this may fail. Ex : if the ConnPool discared the route pool because it was empty,
// but still used it to build the request3 connection.
EntityUtils.consume(response3.getEntity());
}
}
| UlrichColby/httpcomponents-client | httpclient5-testing/src/test/java/org/apache/hc/client5/testing/sync/TestStatefulConnManagement.java | Java | apache-2.0 | 9,740 |
package droidkit.app;
import android.content.Intent;
import android.net.Uri;
import android.support.annotation.NonNull;
import java.util.Locale;
/**
* @author Daniel Serdyukov
*/
public final class MapsIntent {
private static final String MAPS_URL = "https://maps.google.com/maps";
private MapsIntent() {
}
@NonNull
public static Intent openMaps() {
return new Intent(Intent.ACTION_VIEW, Uri.parse(MAPS_URL));
}
@NonNull
public static Intent openMaps(double lat, double lng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL + "?q=%f,%f", lat, lng)));
}
@NonNull
public static Intent route(double lat, double lng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL + "?daddr=%f,%f", lat, lng)));
}
@NonNull
public static Intent route(double fromLat, double fromLng, double toLat, double toLng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL +
"?saddr=%f,%f&daddr=%f,%f", fromLat, fromLng, toLat, toLng)));
}
@NonNull
public static Intent search(@NonNull String query) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(MAPS_URL + "?q=" + query));
}
}
| DanielSerdyukov/droidkit-4.x | library/src/main/java/droidkit/app/MapsIntent.java | Java | apache-2.0 | 1,295 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.test;
import com.google.common.base.Joiner;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.replication.ReplicationAdmin;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
/**
* This is an integration test for replication. It is derived off
* {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList} that creates a large circular
* linked list in one cluster and verifies that the data is correct in a sink cluster. The test
* handles creating the tables and schema and setting up the replication.
*/
public class IntegrationTestReplication extends IntegrationTestBigLinkedList {
protected String sourceClusterIdString;
protected String sinkClusterIdString;
protected int numIterations;
protected int numMappers;
protected long numNodes;
protected String outputDir;
protected int numReducers;
protected int generateVerifyGap;
protected Integer width;
protected Integer wrapMultiplier;
protected boolean noReplicationSetup = false;
private final String SOURCE_CLUSTER_OPT = "sourceCluster";
private final String DEST_CLUSTER_OPT = "destCluster";
private final String ITERATIONS_OPT = "iterations";
private final String NUM_MAPPERS_OPT = "numMappers";
private final String OUTPUT_DIR_OPT = "outputDir";
private final String NUM_REDUCERS_OPT = "numReducers";
private final String NO_REPLICATION_SETUP_OPT = "noReplicationSetup";
/**
* The gap (in seconds) from when data is finished being generated at the source
* to when it can be verified. This is the replication lag we are willing to tolerate
*/
private final String GENERATE_VERIFY_GAP_OPT = "generateVerifyGap";
/**
* The width of the linked list.
* See {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList} for more details
*/
private final String WIDTH_OPT = "width";
/**
* The number of rows after which the linked list points to the first row.
* See {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList} for more details
*/
private final String WRAP_MULTIPLIER_OPT = "wrapMultiplier";
/**
* The number of nodes in the test setup. This has to be a multiple of WRAP_MULTIPLIER * WIDTH
* in order to ensure that the linked list can is complete.
* See {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList} for more details
*/
private final String NUM_NODES_OPT = "numNodes";
private final int DEFAULT_NUM_MAPPERS = 1;
private final int DEFAULT_NUM_REDUCERS = 1;
private final int DEFAULT_NUM_ITERATIONS = 1;
private final int DEFAULT_GENERATE_VERIFY_GAP = 60;
private final int DEFAULT_WIDTH = 1000000;
private final int DEFAULT_WRAP_MULTIPLIER = 25;
private final int DEFAULT_NUM_NODES = DEFAULT_WIDTH * DEFAULT_WRAP_MULTIPLIER;
/**
* Wrapper around an HBase ClusterID allowing us
* to get admin connections and configurations for it
*/
protected class ClusterID {
private final Configuration configuration;
private Connection connection = null;
/**
* This creates a new ClusterID wrapper that will automatically build connections and
* configurations to be able to talk to the specified cluster
*
* @param base the base configuration that this class will add to
* @param key the cluster key in the form of zk_quorum:zk_port:zk_parent_node
*/
public ClusterID(Configuration base,
String key) {
configuration = new Configuration(base);
String[] parts = key.split(":");
configuration.set(HConstants.ZOOKEEPER_QUORUM, parts[0]);
configuration.set(HConstants.ZOOKEEPER_CLIENT_PORT, parts[1]);
configuration.set(HConstants.ZOOKEEPER_ZNODE_PARENT, parts[2]);
}
@Override
public String toString() {
return Joiner.on(":").join(configuration.get(HConstants.ZOOKEEPER_QUORUM),
configuration.get(HConstants.ZOOKEEPER_CLIENT_PORT),
configuration.get(HConstants.ZOOKEEPER_ZNODE_PARENT));
}
public Configuration getConfiguration() {
return this.configuration;
}
public Connection getConnection() throws Exception {
if (this.connection == null) {
this.connection = ConnectionFactory.createConnection(this.configuration);
}
return this.connection;
}
public void closeConnection() throws Exception {
this.connection.close();
this.connection = null;
}
public boolean equals(ClusterID other) {
return this.toString().equalsIgnoreCase(other.toString());
}
}
/**
* The main runner loop for the test. It uses
* {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList}
* for the generation and verification of the linked list. It is heavily based on
* {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList.Loop}
*/
protected class VerifyReplicationLoop extends Configured implements Tool {
private final Log LOG = LogFactory.getLog(VerifyReplicationLoop.class);
protected ClusterID source;
protected ClusterID sink;
IntegrationTestBigLinkedList integrationTestBigLinkedList;
/**
* This tears down any tables that existed from before and rebuilds the tables and schemas on
* the source cluster. It then sets up replication from the source to the sink cluster by using
* the {@link org.apache.hadoop.hbase.client.replication.ReplicationAdmin}
* connection.
*
* @throws Exception
*/
protected void setupTablesAndReplication() throws Exception {
TableName tableName = getTableName(source.getConfiguration());
ClusterID[] clusters = {source, sink};
// delete any old tables in the source and sink
for (ClusterID cluster : clusters) {
Admin admin = cluster.getConnection().getAdmin();
if (admin.tableExists(tableName)) {
if (admin.isTableEnabled(tableName)) {
admin.disableTable(tableName);
}
/**
* TODO: This is a work around on a replication bug (HBASE-13416)
* When we recreate a table against that has recently been
* deleted, the contents of the logs are replayed even though
* they should not. This ensures that we flush the logs
* before the table gets deleted. Eventually the bug should be
* fixed and this should be removed.
*/
Set<ServerName> regionServers = new TreeSet<>();
for (HRegionLocation rl :
cluster.getConnection().getRegionLocator(tableName).getAllRegionLocations()) {
regionServers.add(rl.getServerName());
}
for (ServerName server : regionServers) {
source.getConnection().getAdmin().rollWALWriter(server);
}
admin.deleteTable(tableName);
}
}
// create the schema
Generator generator = new Generator();
generator.setConf(source.getConfiguration());
generator.createSchema();
// setup the replication on the source
if (!source.equals(sink)) {
ReplicationAdmin replicationAdmin = new ReplicationAdmin(source.getConfiguration());
// remove any old replication peers
for (String oldPeer : replicationAdmin.listPeerConfigs().keySet()) {
replicationAdmin.removePeer(oldPeer);
}
// set the sink to be the target
ReplicationPeerConfig peerConfig = new ReplicationPeerConfig();
peerConfig.setClusterKey(sink.toString());
// set the test table to be the table to replicate
HashMap<TableName, ArrayList<String>> toReplicate = new HashMap<>();
toReplicate.put(tableName, new ArrayList<String>(0));
replicationAdmin.addPeer("TestPeer", peerConfig, toReplicate);
replicationAdmin.enableTableRep(tableName);
replicationAdmin.close();
}
for (ClusterID cluster : clusters) {
cluster.closeConnection();
}
}
protected void waitForReplication() throws Exception {
// TODO: we shouldn't be sleeping here. It would be better to query the region servers
// and wait for them to report 0 replication lag.
Thread.sleep(generateVerifyGap * 1000);
}
/**
* Run the {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList.Generator} in the
* source cluster. This assumes that the tables have been setup via setupTablesAndReplication.
*
* @throws Exception
*/
protected void runGenerator() throws Exception {
Path outputPath = new Path(outputDir);
UUID uuid = UUID.randomUUID(); //create a random UUID.
Path generatorOutput = new Path(outputPath, uuid.toString());
Generator generator = new Generator();
generator.setConf(source.getConfiguration());
int retCode = generator.run(numMappers, numNodes, generatorOutput, width, wrapMultiplier);
if (retCode > 0) {
throw new RuntimeException("Generator failed with return code: " + retCode);
}
}
/**
* Run the {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList.Verify}
* in the sink cluster. If replication is working properly the data written at the source
* cluster should be available in the sink cluster after a reasonable gap
*
* @param expectedNumNodes the number of nodes we are expecting to see in the sink cluster
* @throws Exception
*/
protected void runVerify(long expectedNumNodes) throws Exception {
Path outputPath = new Path(outputDir);
UUID uuid = UUID.randomUUID(); //create a random UUID.
Path iterationOutput = new Path(outputPath, uuid.toString());
Verify verify = new Verify();
verify.setConf(sink.getConfiguration());
int retCode = verify.run(iterationOutput, numReducers);
if (retCode > 0) {
throw new RuntimeException("Verify.run failed with return code: " + retCode);
}
if (!verify.verify(expectedNumNodes)) {
throw new RuntimeException("Verify.verify failed");
}
LOG.info("Verify finished with success. Total nodes=" + expectedNumNodes);
}
/**
* The main test runner
*
* This test has 4 steps:
* 1: setupTablesAndReplication
* 2: generate the data into the source cluster
* 3: wait for replication to propagate
* 4: verify that the data is available in the sink cluster
*
* @param args should be empty
* @return 0 on success
* @throws Exception on an error
*/
@Override
public int run(String[] args) throws Exception {
source = new ClusterID(getConf(), sourceClusterIdString);
sink = new ClusterID(getConf(), sinkClusterIdString);
if (!noReplicationSetup) {
setupTablesAndReplication();
}
int expectedNumNodes = 0;
for (int i = 0; i < numIterations; i++) {
LOG.info("Starting iteration = " + i);
expectedNumNodes += numMappers * numNodes;
runGenerator();
waitForReplication();
runVerify(expectedNumNodes);
}
/**
* we are always returning 0 because exceptions are thrown when there is an error
* in the verification step.
*/
return 0;
}
}
@Override
protected void addOptions() {
super.addOptions();
addRequiredOptWithArg("s", SOURCE_CLUSTER_OPT,
"Cluster ID of the source cluster (e.g. localhost:2181:/hbase)");
addRequiredOptWithArg("r", DEST_CLUSTER_OPT,
"Cluster ID of the sink cluster (e.g. localhost:2182:/hbase)");
addRequiredOptWithArg("d", OUTPUT_DIR_OPT,
"Temporary directory where to write keys for the test");
addOptWithArg("nm", NUM_MAPPERS_OPT,
"Number of mappers (default: " + DEFAULT_NUM_MAPPERS + ")");
addOptWithArg("nr", NUM_REDUCERS_OPT,
"Number of reducers (default: " + DEFAULT_NUM_MAPPERS + ")");
addOptNoArg("nrs", NO_REPLICATION_SETUP_OPT,
"Don't setup tables or configure replication before starting test");
addOptWithArg("n", NUM_NODES_OPT,
"Number of nodes. This should be a multiple of width * wrapMultiplier." +
" (default: " + DEFAULT_NUM_NODES + ")");
addOptWithArg("i", ITERATIONS_OPT, "Number of iterations to run (default: " +
DEFAULT_NUM_ITERATIONS + ")");
addOptWithArg("t", GENERATE_VERIFY_GAP_OPT,
"Gap between generate and verify steps in seconds (default: " +
DEFAULT_GENERATE_VERIFY_GAP + ")");
addOptWithArg("w", WIDTH_OPT,
"Width of the linked list chain (default: " + DEFAULT_WIDTH + ")");
addOptWithArg("wm", WRAP_MULTIPLIER_OPT, "How many times to wrap around (default: " +
DEFAULT_WRAP_MULTIPLIER + ")");
}
@Override
protected void processOptions(CommandLine cmd) {
processBaseOptions(cmd);
sourceClusterIdString = cmd.getOptionValue(SOURCE_CLUSTER_OPT);
sinkClusterIdString = cmd.getOptionValue(DEST_CLUSTER_OPT);
outputDir = cmd.getOptionValue(OUTPUT_DIR_OPT);
/** This uses parseInt from {@link org.apache.hadoop.hbase.util.AbstractHBaseTool} */
numMappers = parseInt(cmd.getOptionValue(NUM_MAPPERS_OPT,
Integer.toString(DEFAULT_NUM_MAPPERS)),
1, Integer.MAX_VALUE);
numReducers = parseInt(cmd.getOptionValue(NUM_REDUCERS_OPT,
Integer.toString(DEFAULT_NUM_REDUCERS)),
1, Integer.MAX_VALUE);
numNodes = parseInt(cmd.getOptionValue(NUM_NODES_OPT, Integer.toString(DEFAULT_NUM_NODES)),
1, Integer.MAX_VALUE);
generateVerifyGap = parseInt(cmd.getOptionValue(GENERATE_VERIFY_GAP_OPT,
Integer.toString(DEFAULT_GENERATE_VERIFY_GAP)),
1, Integer.MAX_VALUE);
numIterations = parseInt(cmd.getOptionValue(ITERATIONS_OPT,
Integer.toString(DEFAULT_NUM_ITERATIONS)),
1, Integer.MAX_VALUE);
width = parseInt(cmd.getOptionValue(WIDTH_OPT, Integer.toString(DEFAULT_WIDTH)),
1, Integer.MAX_VALUE);
wrapMultiplier = parseInt(cmd.getOptionValue(WRAP_MULTIPLIER_OPT,
Integer.toString(DEFAULT_WRAP_MULTIPLIER)),
1, Integer.MAX_VALUE);
if (cmd.hasOption(NO_REPLICATION_SETUP_OPT)) {
noReplicationSetup = true;
}
if (numNodes % (width * wrapMultiplier) != 0) {
throw new RuntimeException("numNodes must be a multiple of width and wrap multiplier");
}
}
@Override
public int runTestFromCommandLine() throws Exception {
VerifyReplicationLoop tool = new VerifyReplicationLoop();
tool.integrationTestBigLinkedList = this;
return ToolRunner.run(getConf(), tool, null);
}
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
IntegrationTestingUtility.setUseDistributedCluster(conf);
int ret = ToolRunner.run(conf, new IntegrationTestReplication(), args);
System.exit(ret);
}
}
| juwi/hbase | hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java | Java | apache-2.0 | 17,081 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.test;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.StringAndPos;
import org.apache.calcite.sql.test.AbstractSqlTester;
import org.apache.calcite.sql.test.SqlTestFactory;
import org.apache.calcite.sql.test.SqlTests;
import org.apache.calcite.sql.validate.SqlValidator;
import org.checkerframework.checker.nullness.qual.Nullable;
import static org.junit.jupiter.api.Assertions.assertNotNull;
/**
* Tester of {@link SqlValidator} and runtime execution of the input SQL.
*/
class SqlRuntimeTester extends AbstractSqlTester {
SqlRuntimeTester() {
}
@Override public void checkFails(SqlTestFactory factory, StringAndPos sap,
String expectedError, boolean runtime) {
final StringAndPos sap2 =
StringAndPos.of(runtime ? buildQuery2(factory, sap.addCarets())
: buildQuery(sap.addCarets()));
assertExceptionIsThrown(factory, sap2, expectedError, runtime);
}
@Override public void checkAggFails(SqlTestFactory factory,
String expr,
String[] inputValues,
String expectedError,
boolean runtime) {
String query =
SqlTests.generateAggQuery(expr, inputValues);
final StringAndPos sap = StringAndPos.of(query);
assertExceptionIsThrown(factory, sap, expectedError, runtime);
}
@Override public void assertExceptionIsThrown(SqlTestFactory factory,
StringAndPos sap, @Nullable String expectedMsgPattern) {
assertExceptionIsThrown(factory, sap, expectedMsgPattern, false);
}
public void assertExceptionIsThrown(SqlTestFactory factory,
StringAndPos sap, @Nullable String expectedMsgPattern, boolean runtime) {
final SqlNode sqlNode;
try {
sqlNode = parseQuery(factory, sap.sql);
} catch (Throwable e) {
checkParseEx(e, expectedMsgPattern, sap);
return;
}
Throwable thrown = null;
final SqlTests.Stage stage;
final SqlValidator validator = factory.createValidator();
if (runtime) {
stage = SqlTests.Stage.RUNTIME;
SqlNode validated = validator.validate(sqlNode);
assertNotNull(validated);
try {
check(factory, sap.sql, SqlTests.ANY_TYPE_CHECKER,
SqlTests.ANY_PARAMETER_CHECKER, SqlTests.ANY_RESULT_CHECKER);
} catch (Throwable ex) {
// get the real exception in runtime check
thrown = ex;
}
} else {
stage = SqlTests.Stage.VALIDATE;
try {
validator.validate(sqlNode);
} catch (Throwable ex) {
thrown = ex;
}
}
SqlTests.checkEx(thrown, expectedMsgPattern, sap, stage);
}
}
| apache/calcite | testkit/src/main/java/org/apache/calcite/test/SqlRuntimeTester.java | Java | apache-2.0 | 3,405 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator.scalar;
import com.facebook.presto.spi.type.ArrayType;
import com.facebook.presto.spi.type.RowType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.testng.annotations.Test;
import java.util.Optional;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.IntegerType.INTEGER;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.spi.type.VarcharType.createVarcharType;
import static com.facebook.presto.type.UnknownType.UNKNOWN;
import static com.facebook.presto.util.StructuralTestUtil.mapType;
import static java.util.Arrays.asList;
public class TestZipWithFunction
extends AbstractTestFunctions
{
@Test
public void testRetainedSizeBounded()
{
assertCachedInstanceHasBoundedRetainedSize("zip_with(ARRAY [25, 26, 27], ARRAY [1, 2, 3], (x, y) -> x + y)");
}
@Test
public void testSameLength()
{
assertFunction("zip_with(ARRAY[], ARRAY[], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(UNKNOWN, UNKNOWN), Optional.empty())),
ImmutableList.of());
assertFunction("zip_with(ARRAY[1, 2], ARRAY['a', 'b'], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(createVarcharType(1), INTEGER), Optional.empty())),
ImmutableList.of(ImmutableList.of("a", 1), ImmutableList.of("b", 2)));
assertFunction("zip_with(ARRAY[1, 2], ARRAY[CAST('a' AS VARCHAR), CAST('b' AS VARCHAR)], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(VARCHAR, INTEGER), Optional.empty())),
ImmutableList.of(ImmutableList.of("a", 1), ImmutableList.of("b", 2)));
assertFunction("zip_with(ARRAY[1, 1], ARRAY[1, 2], (x, y) -> x + y)",
new ArrayType(INTEGER),
ImmutableList.of(2, 3));
assertFunction("zip_with(CAST(ARRAY[3, 5] AS ARRAY(BIGINT)), CAST(ARRAY[1, 2] AS ARRAY(BIGINT)), (x, y) -> x * y)",
new ArrayType(BIGINT),
ImmutableList.of(3L, 10L));
assertFunction("zip_with(ARRAY[true, false], ARRAY[false, true], (x, y) -> x OR y)",
new ArrayType(BOOLEAN),
ImmutableList.of(true, true));
assertFunction("zip_with(ARRAY['a', 'b'], ARRAY['c', 'd'], (x, y) -> concat(x, y))",
new ArrayType(VARCHAR),
ImmutableList.of("ac", "bd"));
assertFunction("zip_with(ARRAY[MAP(ARRAY[CAST ('a' AS VARCHAR)], ARRAY[1]), MAP(ARRAY[CAST('b' AS VARCHAR)], ARRAY[2])], ARRAY[MAP(ARRAY['c'], ARRAY[3]), MAP()], (x, y) -> map_concat(x, y))",
new ArrayType(mapType(VARCHAR, INTEGER)),
ImmutableList.of(ImmutableMap.of("a", 1, "c", 3), ImmutableMap.of("b", 2)));
}
@Test
public void testDifferentLength()
{
assertInvalidFunction("zip_with(ARRAY[1], ARRAY['a', 'b'], (x, y) -> (y, x))", "Arrays must have the same length");
assertInvalidFunction("zip_with(ARRAY[NULL, 2], ARRAY['a'], (x, y) -> (y, x))", "Arrays must have the same length");
assertInvalidFunction("zip_with(ARRAY[1, NULL], ARRAY[NULL, 2, 1], (x, y) -> x + y)", "Arrays must have the same length");
}
@Test
public void testWithNull()
{
assertFunction("zip_with(CAST(NULL AS ARRAY(UNKNOWN)), ARRAY[], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(UNKNOWN, UNKNOWN), Optional.empty())),
null);
assertFunction("zip_with(ARRAY[NULL], ARRAY[NULL], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(UNKNOWN, UNKNOWN), Optional.empty())),
ImmutableList.of(asList(null, null)));
assertFunction("zip_with(ARRAY[NULL], ARRAY[NULL], (x, y) -> x IS NULL AND y IS NULL)",
new ArrayType(BOOLEAN),
ImmutableList.of(true));
assertFunction("zip_with(ARRAY['a', NULL], ARRAY[NULL, 1], (x, y) -> x IS NULL OR y IS NULL)",
new ArrayType(BOOLEAN),
ImmutableList.of(true, true));
assertFunction("zip_with(ARRAY[1, NULL], ARRAY[3, 4], (x, y) -> x + y)",
new ArrayType(INTEGER),
asList(4, null));
assertFunction("zip_with(ARRAY['a', 'b'], ARRAY[1, 3], (x, y) -> NULL)",
new ArrayType(UNKNOWN),
asList(null, null));
}
}
| Teradata/presto | presto-main/src/test/java/com/facebook/presto/operator/scalar/TestZipWithFunction.java | Java | apache-2.0 | 5,166 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.admin.jmx.internal;
import javax.management.ObjectName;
import javax.management.modelmbean.ModelMBean;
import org.apache.geode.admin.internal.SystemMemberCacheImpl;
import org.apache.geode.cache.Region;
import org.apache.geode.internal.admin.GemFireVM;
/**
* MBean representation of {@link org.apache.geode.admin.SystemMemberRegion}.
*
* @since GemFire 3.5
*/
public class SystemMemberRegionJmxImpl
extends org.apache.geode.admin.internal.SystemMemberRegionImpl
implements org.apache.geode.admin.jmx.internal.ManagedResource {
/** The object name of this managed resource */
private ObjectName objectName;
// -------------------------------------------------------------------------
// Constructor(s)
// -------------------------------------------------------------------------
/**
* Constructs an instance of SystemMemberRegionJmxImpl.
*
* @param cache the cache this region belongs to
* @param region internal region to delegate real work to
*/
public SystemMemberRegionJmxImpl(SystemMemberCacheImpl cache, Region region)
throws org.apache.geode.admin.AdminException {
super(cache, region);
initializeMBean(cache);
}
/** Create and register the MBean to manage this resource */
private void initializeMBean(SystemMemberCacheImpl cache)
throws org.apache.geode.admin.AdminException {
GemFireVM vm = cache.getVM();
mbeanName = "GemFire.Cache:" + "path="
+ MBeanUtils.makeCompliantMBeanNameProperty(getFullPath()) + ",name="
+ MBeanUtils.makeCompliantMBeanNameProperty(cache.getName()) + ",id="
+ cache.getId() + ",owner="
+ MBeanUtils.makeCompliantMBeanNameProperty(vm.getId().toString())
+ ",type=Region";
objectName = MBeanUtils.createMBean(this);
}
// -------------------------------------------------------------------------
// ManagedResource implementation
// -------------------------------------------------------------------------
/** The name of the MBean that will manage this resource */
private String mbeanName;
/** The ModelMBean that is configured to manage this resource */
private ModelMBean modelMBean;
@Override
public String getMBeanName() {
return mbeanName;
}
@Override
public ModelMBean getModelMBean() {
return modelMBean;
}
@Override
public void setModelMBean(ModelMBean modelMBean) {
this.modelMBean = modelMBean;
}
@Override
public ObjectName getObjectName() {
return objectName;
}
@Override
public ManagedResourceType getManagedResourceType() {
return ManagedResourceType.SYSTEM_MEMBER_REGION;
}
@Override
public void cleanupResource() {}
/**
* Checks equality of the given object with <code>this</code> based on the type (Class) and the
* MBean Name returned by <code>getMBeanName()</code> methods.
*
* @param obj object to check equality with
* @return true if the given object is if the same type and its MBean Name is same as
* <code>this</code> object's MBean Name, false otherwise
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SystemMemberRegionJmxImpl)) {
return false;
}
SystemMemberRegionJmxImpl other = (SystemMemberRegionJmxImpl) obj;
return getMBeanName().equals(other.getMBeanName());
}
/**
* Returns hash code for <code>this</code> object which is based on the MBean Name generated.
*
* @return hash code for <code>this</code> object
*/
@Override
public int hashCode() {
return getMBeanName().hashCode();
}
}
| jdeppe-pivotal/geode | geode-core/src/main/java/org/apache/geode/admin/jmx/internal/SystemMemberRegionJmxImpl.java | Java | apache-2.0 | 4,385 |
/*
* Copyright (c) 2013 Houbrechts IT
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.houbie.lesscss.engine;
import com.github.houbie.lesscss.LessParseException;
import com.github.houbie.lesscss.resourcereader.ResourceReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.script.Invocable;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.SequenceInputStream;
import java.util.Map;
import static com.github.houbie.lesscss.LessCompiler.CompilationDetails;
/**
* LessCompilationEngine implementation that uses a standard {@link javax.script.ScriptEngine} implementation.
*/
public class ScriptEngineLessCompilationEngine implements LessCompilationEngine {
private static Logger logger = LoggerFactory.getLogger(ScriptEngineLessCompilationEngine.class);
private static final String JS_ALL_MIN_JS = "js/all-min.js";
private static final String LESS_SCRIPT = "js/less-rhino-1.7.0-mod.js";
private static final String MINIFY_SCRIPT = "js/cssmin.js";
private static final String COMPILE_SCRIPT = "js/compile.js";
private static final boolean MINIFIED = true;
private ScriptEngine scriptEngine;
/**
* @param scriptEngineName the name of the underlying ScriptEngine (e.g. "nashorn", "rhino", ...)
*/
public ScriptEngineLessCompilationEngine(String scriptEngineName) {
logger.info("creating new NashornEngine");
ScriptEngineManager factory = new ScriptEngineManager();
scriptEngine = factory.getEngineByName(scriptEngineName);
if (scriptEngine == null) {
throw new RuntimeException("The ScriptEngine " + scriptEngineName + " could not be loaded");
}
}
/**
* @param scriptEngine the underlying ScriptEngine
*/
public ScriptEngineLessCompilationEngine(ScriptEngine scriptEngine) {
logger.info("creating new engine with {}", scriptEngine.getClass());
this.scriptEngine = scriptEngine;
}
@Override
public void initialize(Reader customJavaScriptReader) {
try {
if (customJavaScriptReader != null) {
scriptEngine.eval(customJavaScriptReader);
}
scriptEngine.eval(getLessScriptReader());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private Reader getLessScriptReader() {
ClassLoader cl = getClass().getClassLoader();
InputStream concatenatedScripts;
if (MINIFIED) {
concatenatedScripts = cl.getResourceAsStream(JS_ALL_MIN_JS);
} else {
concatenatedScripts = new SequenceInputStream(cl.getResourceAsStream(LESS_SCRIPT), new SequenceInputStream(cl.getResourceAsStream(MINIFY_SCRIPT), cl.getResourceAsStream(COMPILE_SCRIPT)));
}
return new InputStreamReader(concatenatedScripts);
}
@Override
public CompilationDetails compile(String less, CompilationOptions compilationOptions, ResourceReader resourceReader) {
Map result;
try {
result = (Map) ((Invocable) scriptEngine).invokeFunction("compile", less, compilationOptions, resourceReader);
} catch (Exception e) {
throw new RuntimeException("Exception while compiling less", e);
}
if (result.get("parseException") != null) {
throw new LessParseException((String) result.get("parseException"));
}
return new CompilationDetails((String) result.get("css"), (String) result.get("sourceMapContent"));
}
public ScriptEngine getScriptEngine() {
return scriptEngine;
}
}
| houbie/lesscss | src/main/java/com/github/houbie/lesscss/engine/ScriptEngineLessCompilationEngine.java | Java | apache-2.0 | 4,246 |
package issues.issue130;
public class Impl_0 {
public int a = 0;
protected void printMe(String s) {
System.out.println(s);
}
}
| intrigus/jtransc | jtransc-main/test/issues/issue130/Impl_0.java | Java | apache-2.0 | 134 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.export;
import java.io.IOException;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.OrdinalMap;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
class StringValue implements SortValue {
private final SortedDocValues globalDocValues;
private final OrdinalMap ordinalMap;
private final String field;
private final IntComp comp;
protected LongValues toGlobal = LongValues.IDENTITY; // this segment to global ordinal. NN;
protected SortedDocValues docValues;
public int currentOrd;
protected int lastDocID;
private boolean present;
private BytesRef lastBytes;
private String lastString;
private int lastOrd = -1;
private int leafOrd = -1;
public StringValue(SortedDocValues globalDocValues, String field, IntComp comp) {
this.globalDocValues = globalDocValues;
this.docValues = globalDocValues;
if (globalDocValues instanceof MultiDocValues.MultiSortedDocValues) {
this.ordinalMap = ((MultiDocValues.MultiSortedDocValues) globalDocValues).mapping;
} else {
this.ordinalMap = null;
}
this.field = field;
this.comp = comp;
this.currentOrd = comp.resetValue();
this.present = false;
}
public String getLastString() {
return this.lastString;
}
public void setLastString(String lastString) {
this.lastString = lastString;
}
public StringValue copy() {
StringValue copy = new StringValue(globalDocValues, field, comp);
return copy;
}
public void setCurrentValue(int docId) throws IOException {
// System.out.println(docId +":"+lastDocID);
/*
if (docId < lastDocID) {
throw new AssertionError("docs were sent out-of-order: lastDocID=" + lastDocID + " vs doc=" + docId);
}
lastDocID = docId;
*/
if (docId > docValues.docID()) {
docValues.advance(docId);
}
if (docId == docValues.docID()) {
present = true;
currentOrd = docValues.ordValue();
} else {
present = false;
currentOrd = -1;
}
}
@Override
public boolean isPresent() {
return present;
}
public void setCurrentValue(SortValue sv) {
StringValue v = (StringValue) sv;
this.currentOrd = v.currentOrd;
this.present = v.present;
this.leafOrd = v.leafOrd;
this.lastOrd = v.lastOrd;
this.toGlobal = v.toGlobal;
}
public Object getCurrentValue() throws IOException {
assert present == true;
if (currentOrd != lastOrd) {
lastBytes = docValues.lookupOrd(currentOrd);
lastOrd = currentOrd;
lastString = null;
}
return lastBytes;
}
public void toGlobalValue(SortValue previousValue) {
lastOrd = currentOrd;
StringValue sv = (StringValue) previousValue;
if (sv.lastOrd == currentOrd) {
// Take the global ord from the previousValue unless we are a -1 which is the same in both
// global and leaf ordinal
if (this.currentOrd != -1) {
this.currentOrd = sv.currentOrd;
}
} else {
if (this.currentOrd > -1) {
this.currentOrd = (int) toGlobal.get(this.currentOrd);
}
}
}
public String getField() {
return field;
}
public void setNextReader(LeafReaderContext context) throws IOException {
leafOrd = context.ord;
if (ordinalMap != null) {
toGlobal = ordinalMap.getGlobalOrds(context.ord);
}
docValues = DocValues.getSorted(context.reader(), field);
lastDocID = 0;
}
public void reset() {
this.currentOrd = comp.resetValue();
this.present = false;
lastDocID = 0;
}
public int compareTo(SortValue o) {
StringValue sv = (StringValue) o;
return comp.compare(currentOrd, sv.currentOrd);
}
public String toString() {
return Integer.toString(this.currentOrd);
}
}
| apache/solr | solr/core/src/java/org/apache/solr/handler/export/StringValue.java | Java | apache-2.0 | 4,760 |
package at.jku.sea.cloud.rest.pojo.stream.provider;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
import com.fasterxml.jackson.annotation.JsonTypeName;
import at.jku.sea.cloud.rest.pojo.PojoCollectionArtifact;
@JsonTypeInfo(use = Id.NAME, property = "__type")
@JsonTypeName(value = "CollectionArtifactProvider")
public class PojoCollectionArtifactProvider extends PojoProvider {
private PojoCollectionArtifact collectionArtifact;
public PojoCollectionArtifactProvider() {
}
public PojoCollectionArtifactProvider(PojoCollectionArtifact collectionArtifact) {
this.collectionArtifact = collectionArtifact;
}
public PojoCollectionArtifact getCollectionArtifact() {
return collectionArtifact;
}
public void setCollectionArtifact(PojoCollectionArtifact collectionArtifact) {
this.collectionArtifact = collectionArtifact;
}
}
| OnurKirkizoglu/master_thesis | at.jku.sea.cloud.rest/src/main/java/at/jku/sea/cloud/rest/pojo/stream/provider/PojoCollectionArtifactProvider.java | Java | apache-2.0 | 932 |
package com.sebastian_daschner.scalable_coffee_shop.beans.boundary;
import javax.inject.Inject;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
@Path("beans")
public class BeansResource {
@Inject
BeanCommandService commandService;
@Inject
BeanQueryService queryService;
@GET
public JsonObject getBeans() {
final JsonObjectBuilder builder = Json.createObjectBuilder();
queryService.getStoredBeans()
.entrySet().forEach(e -> builder.add(e.getKey(), e.getValue()));
return builder.build();
}
@POST
public void storeBeans(JsonObject object) {
final String beanOrigin = object.getString("beanOrigin", null);
final int amount = object.getInt("amount", 0);
if (beanOrigin == null || amount == 0)
throw new BadRequestException();
commandService.storeBeans(beanOrigin, amount);
}
}
| sdaschner/scalable-coffee-shop | beans/src/main/java/com/sebastian_daschner/scalable_coffee_shop/beans/boundary/BeansResource.java | Java | apache-2.0 | 1,063 |
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.execution.configurations;
import com.intellij.execution.ExecutionBundle;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.ui.ValidationInfo;
import com.intellij.openapi.util.NlsContexts.DialogMessage;
import com.intellij.util.ThrowableRunnable;
import javax.swing.*;
import static com.intellij.openapi.util.NlsContexts.DialogTitle;
public class RuntimeConfigurationException extends ConfigurationException {
public RuntimeConfigurationException(@DialogMessage String message, @DialogTitle String title) {
super(message, title);
}
public RuntimeConfigurationException(@DialogMessage String message) {
super(message, ExecutionBundle.message("run.configuration.error.dialog.title"));
}
public RuntimeConfigurationException(@DialogMessage String message, Throwable cause) {
super(message, cause, ExecutionBundle.message("run.configuration.error.dialog.title"));
}
public static <T extends Throwable> ValidationInfo validate(JComponent component, ThrowableRunnable<T> runnable) {
try {
runnable.run();
return new ValidationInfo("", component);
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Throwable t) {
return new ValidationInfo(t.getMessage(), component);
}
}
} | jwren/intellij-community | platform/execution/src/com/intellij/execution/configurations/RuntimeConfigurationException.java | Java | apache-2.0 | 1,497 |
package com.inmobi.messaging;
/*
* #%L
* messaging-client-core
* %%
* Copyright (C) 2012 - 2014 InMobi
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.nio.ByteBuffer;
/**
* Message class holding the data.
*
*/
public final class Message implements MessageBase {
private ByteBuffer data;
public Message() {
}
/**
* Create new message with {@link ByteBuffer}
*
* @param data The {@link ByteBuffer}
*/
public Message(ByteBuffer data) {
this.data = data;
}
/**
* Create new message with byte array
*
* @param data The byte array.
*/
public Message(byte[] data) {
this.data = ByteBuffer.wrap(data);
}
/**
* Get the data associated with message.
*
* @return {@link ByteBuffer} holding the data.
*/
public ByteBuffer getData() {
return data;
}
public synchronized void set(ByteBuffer data) {
this.data = data;
}
public synchronized void clear() {
data.clear();
}
public long getSize() {
return data.limit();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((data == null) ? 0 : data.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Message other = (Message) obj;
if (data == null) {
if (other.data != null) {
return false;
}
} else if (!data.equals(other.data)) {
return false;
}
return true;
}
@Override
public Message clone() {
Message m = new Message(data.duplicate());
return m;
}
}
| sreedishps/pintail | messaging-client-core/src/main/java/com/inmobi/messaging/Message.java | Java | apache-2.0 | 2,267 |
package org.jboss.resteasy.client.core;
import org.jboss.resteasy.client.ClientExecutor;
import org.jboss.resteasy.client.ClientRequest;
import org.jboss.resteasy.client.ClientResponse;
import org.jboss.resteasy.client.ProxyConfig;
import org.jboss.resteasy.client.core.extractors.ClientErrorHandler;
import org.jboss.resteasy.client.core.extractors.ClientRequestContext;
import org.jboss.resteasy.client.core.extractors.EntityExtractor;
import org.jboss.resteasy.client.core.extractors.EntityExtractorFactory;
import org.jboss.resteasy.client.core.marshallers.ClientMarshallerFactory;
import org.jboss.resteasy.client.core.marshallers.Marshaller;
import org.jboss.resteasy.client.exception.mapper.ClientExceptionMapper;
import org.jboss.resteasy.resteasy_jaxrs.i18n.Messages;
import org.jboss.resteasy.specimpl.ResteasyUriBuilder;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
import org.jboss.resteasy.util.MediaTypeHelper;
import javax.ws.rs.Path;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.ext.Providers;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
@SuppressWarnings("unchecked")
public class ClientInvoker extends ClientInterceptorRepositoryImpl implements MethodInvoker
{
protected ResteasyProviderFactory providerFactory;
protected String httpMethod;
protected ResteasyUriBuilder uri;
protected Method method;
protected Class declaring;
protected MediaType accepts;
protected Marshaller[] marshallers;
protected ClientExecutor executor;
protected boolean followRedirects;
protected EntityExtractor extractor;
protected EntityExtractorFactory extractorFactory;
protected URI baseUri;
protected Map<String, Object> attributes = new HashMap<String, Object>();
public ClientInvoker(URI baseUri, Class declaring, Method method, ResteasyProviderFactory providerFactory, ClientExecutor executor, EntityExtractorFactory extractorFactory)
{
this(baseUri, declaring, method, new ProxyConfig(null, executor, providerFactory, extractorFactory, null, null, null));
}
public ClientInvoker(URI baseUri, Class declaring, Method method, ProxyConfig config)
{
this.declaring = declaring;
this.method = method;
this.marshallers = ClientMarshallerFactory.createMarshallers(declaring, method, providerFactory, config.getServerConsumes());
this.providerFactory = config.getProviderFactory();
this.executor = config.getExecutor();
accepts = MediaTypeHelper.getProduces(declaring, method, config.getServerProduces());
this.uri = new ResteasyUriBuilder();
this.baseUri = baseUri;
uri.uri(baseUri);
if (declaring.isAnnotationPresent(Path.class)) uri.path(declaring);
if (method.isAnnotationPresent(Path.class)) uri.path(method);
this.extractorFactory = config.getExtractorFactory();
this.extractor = extractorFactory.createExtractor(method);
}
public Map<String, Object> getAttributes()
{
return attributes;
}
public MediaType getAccepts()
{
return accepts;
}
public Method getMethod()
{
return method;
}
public Class getDeclaring()
{
return declaring;
}
public ResteasyProviderFactory getProviderFactory()
{
return providerFactory;
}
public Object invoke(Object[] args)
{
boolean isProvidersSet = ResteasyProviderFactory.getContextData(Providers.class) != null;
if (!isProvidersSet) ResteasyProviderFactory.pushContext(Providers.class, providerFactory);
try
{
if (uri == null) throw new RuntimeException(Messages.MESSAGES.baseURINotSetForClientProxy());
ClientRequest request = createRequest(args);
BaseClientResponse clientResponse = null;
try
{
clientResponse = (BaseClientResponse) request.httpMethod(httpMethod);
}
catch (Exception e)
{
ClientExceptionMapper<Exception> mapper = providerFactory.getClientExceptionMapper(Exception.class);
if (mapper != null)
{
throw mapper.toException(e);
}
throw new RuntimeException(e);
}
ClientErrorHandler errorHandler = new ClientErrorHandler(providerFactory.getClientErrorInterceptors());
clientResponse.setAttributeExceptionsTo(method.toString());
clientResponse.setAnnotations(method.getAnnotations());
ClientRequestContext clientRequestContext = new ClientRequestContext(request, clientResponse, errorHandler, extractorFactory, baseUri);
return extractor.extractEntity(clientRequestContext);
}
finally
{
if (!isProvidersSet) ResteasyProviderFactory.popContextData(Providers.class);
}
}
protected ClientRequest createRequest(Object[] args)
{
ClientRequest request = new ClientRequest(uri, executor, providerFactory);
request.getAttributes().putAll(attributes);
if (accepts != null) request.header(HttpHeaders.ACCEPT, accepts.toString());
this.copyClientInterceptorsTo(request);
boolean isClientResponseResult = ClientResponse.class.isAssignableFrom(method.getReturnType());
request.followRedirects(!isClientResponseResult || this.followRedirects);
for (int i = 0; i < marshallers.length; i++)
{
marshallers[i].build(request, args[i]);
}
return request;
}
public String getHttpMethod()
{
return httpMethod;
}
public void setHttpMethod(String httpMethod)
{
this.httpMethod = httpMethod;
}
public boolean isFollowRedirects()
{
return followRedirects;
}
public void setFollowRedirects(boolean followRedirects)
{
this.followRedirects = followRedirects;
}
public void followRedirects()
{
setFollowRedirects(true);
}
} | psakar/Resteasy | resteasy-jaxrs/src/main/java/org/jboss/resteasy/client/core/ClientInvoker.java | Java | apache-2.0 | 6,212 |
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.geocities.com/kpdus/jad.html
// Decompiler options: braces fieldsfirst space lnc
package cn.com.smartdevices.bracelet.view;
import android.animation.Animator;
// Referenced classes of package cn.com.smartdevices.bracelet.view:
// RoundProgressBar
class s
implements android.animation.Animator.AnimatorListener
{
final RoundProgressBar a;
s(RoundProgressBar roundprogressbar)
{
a = roundprogressbar;
super();
}
public void onAnimationCancel(Animator animator)
{
}
public void onAnimationEnd(Animator animator)
{
if (RoundProgressBar.a(a) < RoundProgressBar.b(a) && RoundProgressBar.c(a) < RoundProgressBar.b(a))
{
RoundProgressBar.a(a, RoundProgressBar.b(a));
RoundProgressBar.a(a, RoundProgressBar.a(a) - RoundProgressBar.c(a), RoundProgressBar.c(a), RoundProgressBar.a(a));
}
}
public void onAnimationRepeat(Animator animator)
{
}
public void onAnimationStart(Animator animator)
{
}
}
| vishnudevk/MiBandDecompiled | Original Files/source/src/cn/com/smartdevices/bracelet/view/s.java | Java | apache-2.0 | 1,132 |
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.design.widget;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.DrawableContainer;
import android.util.Log;
import java.lang.reflect.Method;
/** Caution. Gross hacks ahead. */
class DrawableUtils {
private static final String LOG_TAG = "DrawableUtils";
private static Method sSetConstantStateMethod;
private static boolean sSetConstantStateMethodFetched;
private DrawableUtils() {}
static boolean setContainerConstantState(
DrawableContainer drawable, Drawable.ConstantState constantState) {
// We can use getDeclaredMethod() on v9+
return setContainerConstantStateV9(drawable, constantState);
}
private static boolean setContainerConstantStateV9(
DrawableContainer drawable, Drawable.ConstantState constantState) {
if (!sSetConstantStateMethodFetched) {
try {
sSetConstantStateMethod =
DrawableContainer.class.getDeclaredMethod(
"setConstantState", DrawableContainer.DrawableContainerState.class);
sSetConstantStateMethod.setAccessible(true);
} catch (NoSuchMethodException e) {
Log.e(LOG_TAG, "Could not fetch setConstantState(). Oh well.");
}
sSetConstantStateMethodFetched = true;
}
if (sSetConstantStateMethod != null) {
try {
sSetConstantStateMethod.invoke(drawable, constantState);
return true;
} catch (Exception e) {
Log.e(LOG_TAG, "Could not invoke setConstantState(). Oh well.");
}
}
return false;
}
}
| WeRockStar/iosched | third_party/material-components-android/lib/src/android/support/design/widget/DrawableUtils.java | Java | apache-2.0 | 2,166 |
package com.artemis;
import static org.junit.Assert.assertEquals;
import java.util.NoSuchElementException;
import com.artemis.systems.EntityProcessingSystem;
import com.artemis.utils.IntBag;
import org.junit.Test;
import com.artemis.utils.ImmutableBag;
/**
* Created by obartley on 6/9/14.
*/
public class EntitySystemTest {
@SuppressWarnings("static-method")
@Test(expected = NoSuchElementException.class)
public void test_process_one_inactive() {
World w = new World(new WorldConfiguration()
.setSystem(new IteratorTestSystem(0)));
Entity e = w.createEntity();
e.edit().add(new C());
e.disable();
w.process();
}
@SuppressWarnings("static-method")
@Test
public void test_process_one_active() {
World w = new World(new WorldConfiguration()
.setSystem(new IteratorTestSystem(1)));
Entity e = w.createEntity();
e.edit().add(new C());
w.process();
}
@Test
public void aspect_exclude_only() {
ExcludingSystem es1 = new ExcludingSystem();
EmptySystem es2 = new EmptySystem();
World w = new World(new WorldConfiguration()
.setSystem(es1)
.setSystem(es2));
Entity e = w.createEntity();
w.process();
assertEquals(1, es1.getActives().size());
assertEquals(1, es2.getActives().size());
}
public static class C extends Component {}
public static class C2 extends Component {}
public static class IteratorTestSystem extends EntitySystem {
public int expectedSize;
@SuppressWarnings("unchecked")
public IteratorTestSystem(int expectedSize) {
super(Aspect.all(C.class));
this.expectedSize = expectedSize;
}
@Override
protected void processSystem() {
assertEquals(expectedSize, subscription.getEntities().size());
getActives().iterator().next();
}
@Override
protected boolean checkProcessing() {
return true;
}
}
public static class ExcludingSystem extends EntityProcessingSystem {
public ExcludingSystem() {
super(Aspect.exclude(C.class));
}
@Override
protected void process(Entity e) {}
}
public static class EmptySystem extends EntityProcessingSystem {
public EmptySystem() {
super(Aspect.all());
}
@Override
protected void process(Entity e) {}
}
}
| antag99/artemis-odb | artemis/src/test/java/com/artemis/EntitySystemTest.java | Java | apache-2.0 | 2,187 |
package com.sqisland.gce2retrofit;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.stream.JsonReader;
import com.squareup.javawriter.JavaWriter;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.text.WordUtils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static javax.lang.model.element.Modifier.PUBLIC;
public class Generator {
private static final String OPTION_CLASS_MAP = "classmap";
private static final String OPTION_METHODS = "methods";
private static Gson gson = new Gson();
public enum MethodType {
SYNC, ASYNC, REACTIVE
}
public static void main(String... args)
throws IOException, URISyntaxException {
Options options = getOptions();
CommandLine cmd = getCommandLine(options, args);
if (cmd == null) {
return;
}
String[] arguments = cmd.getArgs();
if (arguments.length != 2) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("java -jar gce2retrofit.jar discovery.json output_dir", options);
System.exit(1);
}
String discoveryFile = arguments[0];
String outputDir = arguments[1];
Map<String, String> classMap = cmd.hasOption(OPTION_CLASS_MAP)?
readClassMap(new FileReader(cmd.getOptionValue(OPTION_CLASS_MAP))) : null;
EnumSet<MethodType> methodTypes = getMethods(cmd.getOptionValue(OPTION_METHODS));
generate(new FileReader(discoveryFile), new FileWriterFactory(new File(outputDir)),
classMap, methodTypes);
}
private static Options getOptions() {
Options options = new Options();
options.addOption(
OPTION_CLASS_MAP, true, "Map fields to classes. Format: field_name\\tclass_name");
options.addOption(
OPTION_METHODS, true,
"Methods to generate, either sync, async or reactive. Default is to generate sync & async.");
return options;
}
private static CommandLine getCommandLine(Options options, String... args) {
CommandLineParser parser = new BasicParser();
try {
CommandLine cmd = parser.parse(options, args);
return cmd;
} catch (ParseException e) {
System.out.println("Unexpected exception:" + e.getMessage());
}
return null;
}
public static void generate(
Reader discoveryReader, WriterFactory writerFactory,
Map<String, String> classMap, EnumSet<MethodType> methodTypes)
throws IOException, URISyntaxException {
JsonReader jsonReader = new JsonReader(discoveryReader);
Discovery discovery = gson.fromJson(jsonReader, Discovery.class);
String packageName = StringUtil.getPackageName(discovery.baseUrl);
if (packageName == null || packageName.isEmpty()) {
packageName = StringUtil.getPackageName(discovery.rootUrl);
}
String modelPackageName = packageName + ".model";
for (Entry<String, JsonElement> entry : discovery.schemas.entrySet()) {
generateModel(
writerFactory, modelPackageName, entry.getValue().getAsJsonObject(), classMap);
}
if (discovery.resources != null) {
generateInterfaceFromResources(
writerFactory, packageName, "", discovery.resources, methodTypes);
}
if (discovery.name != null && discovery.methods != null) {
generateInterface(
writerFactory, packageName, discovery.name, discovery.methods, methodTypes);
}
}
public static Map<String, String> readClassMap(Reader reader) throws IOException {
Map<String, String> classMap = new HashMap<String, String>();
String line;
BufferedReader bufferedReader = new BufferedReader(reader);
while ((line = bufferedReader.readLine()) != null) {
String[] fields = line.split("\t");
if (fields.length == 2) {
classMap.put(fields[0], fields[1]);
}
}
return classMap;
}
public static EnumSet<MethodType> getMethods(String input) {
EnumSet<MethodType> methodTypes = EnumSet.noneOf(MethodType.class);
if (input != null) {
String[] parts = input.split(",");
for (String part : parts) {
if ("sync".equals(part) || "both".equals(part)) {
methodTypes.add(MethodType.SYNC);
}
if ("async".equals(part) || "both".equals(part)) {
methodTypes.add(MethodType.ASYNC);
}
if ("reactive".equals(part)) {
methodTypes.add(MethodType.REACTIVE);
}
}
}
if (methodTypes.isEmpty()) {
methodTypes = EnumSet.of(Generator.MethodType.ASYNC, Generator.MethodType.SYNC);
}
return methodTypes;
}
private static void generateModel(
WriterFactory writerFactory, String modelPackageName,
JsonObject schema, Map<String, String> classMap)
throws IOException {
String id = schema.get("id").getAsString();
String path = StringUtil.getPath(modelPackageName, id + ".java");
Writer writer = writerFactory.getWriter(path);
JavaWriter javaWriter = new JavaWriter(writer);
javaWriter.emitPackage(modelPackageName)
.emitImports("com.google.gson.annotations.SerializedName")
.emitEmptyLine()
.emitImports("java.util.List")
.emitEmptyLine();
String type = schema.get("type").getAsString();
if (type.equals("object")) {
javaWriter.beginType(modelPackageName + "." + id, "class", EnumSet.of(PUBLIC));
generateObject(javaWriter, schema, classMap);
javaWriter.endType();
} else if (type.equals("string")) {
javaWriter.beginType(modelPackageName + "." + id, "enum", EnumSet.of(PUBLIC));
generateEnum(javaWriter, schema);
javaWriter.endType();
}
writer.close();
}
private static void generateObject(
JavaWriter javaWriter, JsonObject schema, Map<String, String> classMap)
throws IOException {
JsonElement element = schema.get("properties");
if (element == null) {
return;
}
JsonObject properties = element.getAsJsonObject();
for (Entry<String, JsonElement> entry : properties.entrySet()) {
String key = entry.getKey();
String variableName = key;
if (StringUtil.isReservedWord(key)) {
javaWriter.emitAnnotation("SerializedName(\"" + key + "\")");
variableName += "_";
}
PropertyType propertyType = gson.fromJson(
entry.getValue(), PropertyType.class);
String javaType = propertyType.toJavaType();
if (classMap != null && classMap.containsKey(key)) {
javaType = classMap.get(key);
}
javaWriter.emitField(javaType, variableName, EnumSet.of(PUBLIC));
}
}
private static void generateEnum(JavaWriter javaWriter, JsonObject schema) throws IOException {
JsonArray enums = schema.get("enum").getAsJsonArray();
for (int i = 0; i < enums.size(); ++i) {
javaWriter.emitEnumValue(enums.get(i).getAsString());
}
}
private static void generateInterfaceFromResources(
WriterFactory writerFactory, String packageName,
String resourceName, JsonObject resources,
EnumSet<MethodType> methodTypes)
throws IOException {
for (Entry<String, JsonElement> entry : resources.entrySet()) {
JsonObject entryValue = entry.getValue().getAsJsonObject();
if (entryValue.has("methods")) {
generateInterface(writerFactory, packageName,
resourceName + "_" + entry.getKey(),
entryValue.get("methods").getAsJsonObject(),
methodTypes);
}
if (entryValue.has("resources")) {
generateInterfaceFromResources(writerFactory, packageName,
resourceName + "_" + entry.getKey(),
entryValue.get("resources").getAsJsonObject(),
methodTypes);
}
}
}
private static void generateInterface(
WriterFactory writerFactory, String packageName,
String resourceName, JsonObject methods,
EnumSet<MethodType> methodTypes)
throws IOException {
String capitalizedName = WordUtils.capitalizeFully(resourceName, '_');
String className = capitalizedName.replaceAll("_", "");
String path = StringUtil.getPath(packageName, className + ".java");
Writer fileWriter = writerFactory.getWriter(path);
JavaWriter javaWriter = new JavaWriter(fileWriter);
javaWriter.emitPackage(packageName)
.emitImports(packageName + ".model.*")
.emitEmptyLine()
.emitImports(
"retrofit.Callback",
"retrofit.client.Response",
"retrofit.http.GET",
"retrofit.http.POST",
"retrofit.http.PATCH",
"retrofit.http.DELETE",
"retrofit.http.Body",
"retrofit.http.Path",
"retrofit.http.Query");
if (methodTypes.contains(MethodType.REACTIVE)) {
javaWriter.emitImports("rx.Observable");
}
javaWriter.emitEmptyLine();
javaWriter.beginType(
packageName + "." + className, "interface", EnumSet.of(PUBLIC));
for (Entry<String, JsonElement> entry : methods.entrySet()) {
String methodName = entry.getKey();
Method method = gson.fromJson(entry.getValue(), Method.class);
for (MethodType methodType : methodTypes) {
javaWriter.emitAnnotation(method.httpMethod, "\"/" + method.path + "\"");
emitMethodSignature(fileWriter, methodName, method, methodType);
}
}
javaWriter.endType();
fileWriter.close();
}
// TODO: Use JavaWriter to emit method signature
private static void emitMethodSignature(
Writer writer, String methodName, Method method, MethodType methodType) throws IOException {
ArrayList<String> params = new ArrayList<String>();
if (method.request != null) {
params.add("@Body " + method.request.$ref + " " +
(method.request.parameterName != null ? method.request.parameterName : "resource"));
}
for (Entry<String, JsonElement> param : getParams(method)) {
params.add(param2String(param));
}
String returnValue = "void";
if (methodType == MethodType.SYNC && "POST".equals(method.httpMethod)) {
returnValue = "Response";
}
if (method.response != null) {
if (methodType == MethodType.SYNC) {
returnValue = method.response.$ref;
} else if (methodType == MethodType.REACTIVE) {
returnValue = "Observable<" + method.response.$ref + ">";
}
}
if (methodType == MethodType.ASYNC) {
if (method.response == null) {
params.add("Callback<Void> cb");
} else {
params.add("Callback<" + method.response.$ref + "> cb");
}
}
writer.append(" " + returnValue + " " + methodName + (methodType == MethodType.REACTIVE ? "Rx" : "") + "(");
for (int i = 0; i < params.size(); ++i) {
if (i != 0) {
writer.append(", ");
}
writer.append(params.get(i));
}
writer.append(");\n");
}
/**
* Assemble a list of parameters, with the first entries matching the ones
* listed in parameterOrder
*
* @param method The method containing parameters and parameterOrder
* @return Ordered parameters
*/
private static List<Entry<String, JsonElement>> getParams(Method method) {
List<Entry<String, JsonElement>> params
= new ArrayList<Entry<String, JsonElement>>();
if (method.parameters == null) {
return params;
}
// Convert the entry set into a map, and extract the keys not listed in
// parameterOrder
HashMap<String, Entry<String, JsonElement>> map
= new HashMap<String, Entry<String, JsonElement>>();
List<String> remaining = new ArrayList<String>();
for (Entry<String, JsonElement> entry : method.parameters.entrySet()) {
String key = entry.getKey();
map.put(key, entry);
if (method.parameterOrder == null ||
!method.parameterOrder.contains(key)) {
remaining.add(key);
}
}
// Add the keys in parameterOrder
if (method.parameterOrder != null) {
for (String key : method.parameterOrder) {
params.add(map.get(key));
}
}
// Then add the keys not in parameterOrder
for (String key : remaining) {
params.add(map.get(key));
}
return params;
}
private static String param2String(Entry<String, JsonElement> param) {
StringBuffer buf = new StringBuffer();
String paramName = param.getKey();
ParameterType paramType = gson.fromJson(
param.getValue(), ParameterType.class);
if ("path".equals(paramType.location)) {
buf.append("@Path(\"" + paramName + "\") ");
}
if ("query".equals(paramType.location)) {
buf.append("@Query(\"" + paramName + "\") ");
}
String type = paramType.toJavaType();
if (!paramType.required) {
type = StringUtil.primitiveToObject(type);
}
buf.append(type + " " + paramName);
return buf.toString();
}
}
| MaTriXy/gce2retrofit | gce2retrofit/src/main/java/com/sqisland/gce2retrofit/Generator.java | Java | apache-2.0 | 13,413 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.app.catalog.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.io.Serializable;
import java.sql.Timestamp;
@Entity
@Table(name = "APPLICATION_INTERFACE")
public class ApplicationInterface implements Serializable {
@Id
@Column(name = "INTERFACE_ID")
private String interfaceID;
@Column(name = "APPLICATION_NAME")
private String appName;
@Column(name = "APPLICATION_DESCRIPTION")
private String appDescription;
@Column(name = "CREATION_TIME")
private Timestamp creationTime;
@Column(name = "GATEWAY_ID")
private String gatewayId;
@Column(name = "ARCHIVE_WORKING_DIRECTORY")
private boolean archiveWorkingDirectory;
@Column(name = "HAS_OPTIONAL_FILE_INPUTS")
private boolean hasOptionalFileInputs;
@Column(name = "UPDATE_TIME")
private Timestamp updateTime;
public String getGatewayId() {
return gatewayId;
}
public void setGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
}
public boolean isArchiveWorkingDirectory() {
return archiveWorkingDirectory;
}
public void setArchiveWorkingDirectory(boolean archiveWorkingDirectory) {
this.archiveWorkingDirectory = archiveWorkingDirectory;
}
public Timestamp getCreationTime() {
return creationTime;
}
public void setCreationTime(Timestamp creationTime) {
this.creationTime = creationTime;
}
public Timestamp getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Timestamp updateTime) {
this.updateTime = updateTime;
}
public String getInterfaceID() {
return interfaceID;
}
public void setInterfaceID(String interfaceID) {
this.interfaceID = interfaceID;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getAppDescription() {
return appDescription;
}
public void setAppDescription(String appDescription) {
this.appDescription = appDescription;
}
public boolean isHasOptionalFileInputs() {
return hasOptionalFileInputs;
}
public void setHasOptionalFileInputs(boolean hasOptionalFileInputs) {
this.hasOptionalFileInputs = hasOptionalFileInputs;
}
}
| machristie/airavata | modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/model/ApplicationInterface.java | Java | apache-2.0 | 3,284 |
/**
* Copyright 2014 Nortal AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nortal.petit.orm.statement;
import java.util.List;
import org.springframework.util.CollectionUtils;
/**
* @author Lauri Lättemäe (lauri.lattemae@nortal.com)
* @created 29.04.2013
*/
public abstract class ExecutableStatement<B> extends SimpleStatement<B> {
/**
* Returns statements sql with parameter values
*
* @return
*/
@Override
public String getSqlWithParams() {
prepare();
StringBuffer sb = new StringBuffer();
if (!CollectionUtils.isEmpty(getBeans())) {
for (B bean : getBeans()) {
prepare(bean);
sb.append(super.getSqlWithParams()).append("\n");
}
} else {
sb.append(super.getSqlWithParams()).append("\n");
}
return sb.toString();
}
protected abstract List<B> getBeans();
protected abstract void prepare(B bean);
public abstract void exec();
}
| jimmytheneutrino/petit | modules/orm/src/main/java/com/nortal/petit/orm/statement/ExecutableStatement.java | Java | apache-2.0 | 1,555 |
/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.javascript.jscomp.ControlFlowGraph.Branch;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.NodeTraversal.ScopedCallback;
import com.google.javascript.jscomp.graph.GraphReachability;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphEdge;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphNode;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.Deque;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Removes dead code from a parse tree. The kinds of dead code that this pass
* removes are:
* - Any code following a return statement, such as the <code>alert</code>
* call in: <code>if (x) { return; alert('unreachable'); }</code>.
* - Statements that have no side effects, such as:
* <code>a.b.MyClass.prototype.propertyName;</code> or <code>true;</code>.
* That first kind of statement sometimes appears intentionally, so that
* prototype properties can be annotated using JSDoc without actually
* being initialized.
*
*/
class UnreachableCodeElimination extends AbstractPostOrderCallback
implements CompilerPass, ScopedCallback {
private static final Logger logger =
Logger.getLogger(UnreachableCodeElimination.class.getName());
private final AbstractCompiler compiler;
private final boolean removeNoOpStatements;
Deque<ControlFlowGraph<Node>> cfgStack =
new LinkedList<ControlFlowGraph<Node>>();
ControlFlowGraph<Node> curCfg = null;
UnreachableCodeElimination(AbstractCompiler compiler,
boolean removeNoOpStatements) {
this.compiler = compiler;
this.removeNoOpStatements = removeNoOpStatements;
}
@Override
public void enterScope(NodeTraversal t) {
Scope scope = t.getScope();
// Computes the control flow graph.
ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, false);
cfa.process(null, scope.getRootNode());
cfgStack.push(curCfg);
curCfg = cfa.getCfg();
new GraphReachability<Node, ControlFlowGraph.Branch>(curCfg)
.compute(curCfg.getEntry().getValue());
}
@Override
public void exitScope(NodeTraversal t) {
curCfg = cfgStack.pop();
}
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverse(compiler, root, this);
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (parent == null) {
return;
}
if (n.getType() == Token.FUNCTION || n.getType() == Token.SCRIPT) {
return;
}
// Removes TRYs that had its CATCH removed and/or empty FINALLY.
// TODO(dcc): Move the parts of this that don't require a control flow
// graph to PeepholeRemoveDeadCode
if (n.getType() == Token.TRY) {
Node body = n.getFirstChild();
Node catchOrFinallyBlock = body.getNext();
Node finallyBlock = catchOrFinallyBlock.getNext();
if (!catchOrFinallyBlock.hasChildren() &&
(finallyBlock == null || !finallyBlock.hasChildren())) {
n.removeChild(body);
parent.replaceChild(n, body);
compiler.reportCodeChange();
n = body;
}
}
DiGraphNode<Node, Branch> gNode = curCfg.getDirectedGraphNode(n);
if (gNode == null) { // Not in CFG.
return;
}
if (gNode.getAnnotation() != GraphReachability.REACHABLE ||
(removeNoOpStatements && !NodeUtil.mayHaveSideEffects(n))) {
removeDeadExprStatementSafely(n);
return;
}
tryRemoveUnconditionalBranching(n);
}
/**
* Tries to remove n if an unconditional branch node (break, continue or
* return) if the target of n is the same as the the follow of n. That is, if
* we remove n, the control flow remains the same. Also if n targets to
* another unconditional branch, this function will recursively try to remove
* the target branch as well. The reason why we want to cascade this removal
* is because we only run this pass once. If we have code such as
*
* break -> break -> break
*
* where all 3 break's are useless. The order of removal matters. When we
* first look at the first break, we see that it branches to the 2nd break.
* However, if we remove the last break, the 2nd break becomes useless and
* finally the first break becomes useless as well.
*
* @return The target of this jump. If the target is also useless jump,
* the target of that useless jump recursively.
*/
@SuppressWarnings("fallthrough")
private Node tryRemoveUnconditionalBranching(Node n) {
/*
* For each of the unconditional branching control flow node, check to see
* if the ControlFlowAnalysis.computeFollowNode of that node is same as
* the branching target. If it is, the branch node is safe to be removed.
*
* This is not as clever as MinimizeExitPoints because it doesn't do any
* if-else conversion but it handles more complicated switch statements
* much nicer.
*/
// If n is null the target is the end of the function, nothing to do.
if (n == null) {
return n;
}
DiGraphNode<Node, Branch> gNode = curCfg.getDirectedGraphNode(n);
if (gNode == null) {
return n;
}
// If the parent is null, this mean whatever node it was there is now
// useless and it has been removed by other logics in this pass. That node
// while no longer exists in the AST, is still in the CFG because we
// never update the graph as nodes are removed.
if (n.getParent() == null) {
List<DiGraphEdge<Node,Branch>> outEdges = gNode.getOutEdges();
if (outEdges.size() == 1) {
return tryRemoveUnconditionalBranching(
outEdges.get(0).getDestination().getValue());
}
}
switch (n.getType()) {
case Token.BLOCK:
if (n.hasChildren()) {
Node first = n.getFirstChild();
return tryRemoveUnconditionalBranching(first);
} else {
return tryRemoveUnconditionalBranching(
ControlFlowAnalysis.computeFollowNode(n));
}
case Token.RETURN:
if (n.hasChildren()) {
break;
}
case Token.BREAK:
case Token.CONTINUE:
// We are looking for a control flow changing statement that always
// branches to the same node. If removing it the control flow still
// branches to that same node. It is safe to remove it.
List<DiGraphEdge<Node,Branch>> outEdges = gNode.getOutEdges();
if (outEdges.size() == 1 &&
// If there is a next node, there is no chance this jump is useless.
(n.getNext() == null || n.getNext().getType() == Token.FUNCTION)) {
Preconditions.checkState(outEdges.get(0).getValue() == Branch.UNCOND);
Node fallThrough = tryRemoveUnconditionalBranching(
ControlFlowAnalysis.computeFollowNode(n));
Node nextCfgNode = outEdges.get(0).getDestination().getValue();
if (nextCfgNode == fallThrough) {
removeDeadExprStatementSafely(n);
return fallThrough;
}
}
}
return n;
}
private void removeDeadExprStatementSafely(Node n) {
if (n.getType() == Token.EMPTY ||
(n.getType() == Token.BLOCK && !n.hasChildren())) {
// Not always trivial to remove, let FoldContants work its magic later.
return;
}
// Removing an unreachable DO node is messy because it means we still have
// to execute one iteration. If the DO's body has breaks in the middle, it
// can get even more trickier and code size might actually increase.
switch (n.getType()) {
case Token.DO:
case Token.TRY:
case Token.CATCH:
case Token.FINALLY:
return;
}
NodeUtil.redeclareVarsInsideBranch(n);
compiler.reportCodeChange();
if (logger.isLoggable(Level.FINE)) {
logger.fine("Removing " + n.toString());
}
NodeUtil.removeChild(n.getParent(), n);
}
}
| antz29/closure-compiler | src/com/google/javascript/jscomp/UnreachableCodeElimination.java | Java | apache-2.0 | 8,731 |
package theinternet.pages;
import com.frameworkium.core.ui.annotations.Visible;
import com.frameworkium.core.ui.pages.BasePage;
import com.frameworkium.core.ui.pages.PageFactory;
import io.qameta.allure.Step;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import ru.yandex.qatools.htmlelements.annotations.Name;
import ru.yandex.qatools.htmlelements.element.FileInput;
public class FileUploadPage extends BasePage<FileUploadPage> {
@Visible
@Name("Choose Files button")
@FindBy(css = "input#file-upload")
private FileInput chooseFileInput;
@Visible
@Name("Upload button")
@FindBy(css = "input#file-submit")
private WebElement uploadButton;
@Step("Upload a file by choosing file and then clicking upload")
public FileUploadSuccessPage uploadFile(String filePath) {
chooseFileInput.setFileToUpload(filePath);
uploadButton.click();
return PageFactory.newInstance(FileUploadSuccessPage.class);
}
}
| Frameworkium/frameworkium | src/test/java/theinternet/pages/FileUploadPage.java | Java | apache-2.0 | 1,003 |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.flowable;
import java.util.concurrent.atomic.AtomicReference;
import org.reactivestreams.*;
import io.reactivex.*;
import io.reactivex.annotations.Nullable;
import io.reactivex.disposables.*;
import io.reactivex.exceptions.Exceptions;
import io.reactivex.functions.Function;
import io.reactivex.internal.disposables.DisposableHelper;
import io.reactivex.internal.functions.ObjectHelper;
import io.reactivex.internal.subscriptions.*;
import io.reactivex.internal.util.AtomicThrowable;
import io.reactivex.plugins.RxJavaPlugins;
/**
* Maps a sequence of values into CompletableSources and awaits their termination.
* @param <T> the value type
*/
public final class FlowableFlatMapCompletable<T> extends AbstractFlowableWithUpstream<T, T> {
final Function<? super T, ? extends CompletableSource> mapper;
final int maxConcurrency;
final boolean delayErrors;
public FlowableFlatMapCompletable(Flowable<T> source,
Function<? super T, ? extends CompletableSource> mapper, boolean delayErrors,
int maxConcurrency) {
super(source);
this.mapper = mapper;
this.delayErrors = delayErrors;
this.maxConcurrency = maxConcurrency;
}
@Override
protected void subscribeActual(Subscriber<? super T> subscriber) {
source.subscribe(new FlatMapCompletableMainSubscriber<T>(subscriber, mapper, delayErrors, maxConcurrency));
}
static final class FlatMapCompletableMainSubscriber<T> extends BasicIntQueueSubscription<T>
implements FlowableSubscriber<T> {
private static final long serialVersionUID = 8443155186132538303L;
final Subscriber<? super T> downstream;
final AtomicThrowable errors;
final Function<? super T, ? extends CompletableSource> mapper;
final boolean delayErrors;
final CompositeDisposable set;
final int maxConcurrency;
Subscription upstream;
volatile boolean cancelled;
FlatMapCompletableMainSubscriber(Subscriber<? super T> subscriber,
Function<? super T, ? extends CompletableSource> mapper, boolean delayErrors,
int maxConcurrency) {
this.downstream = subscriber;
this.mapper = mapper;
this.delayErrors = delayErrors;
this.errors = new AtomicThrowable();
this.set = new CompositeDisposable();
this.maxConcurrency = maxConcurrency;
this.lazySet(1);
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
int m = maxConcurrency;
if (m == Integer.MAX_VALUE) {
s.request(Long.MAX_VALUE);
} else {
s.request(m);
}
}
}
@Override
public void onNext(T value) {
CompletableSource cs;
try {
cs = ObjectHelper.requireNonNull(mapper.apply(value), "The mapper returned a null CompletableSource");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
upstream.cancel();
onError(ex);
return;
}
getAndIncrement();
InnerConsumer inner = new InnerConsumer();
if (!cancelled && set.add(inner)) {
cs.subscribe(inner);
}
}
@Override
public void onError(Throwable e) {
if (errors.addThrowable(e)) {
if (delayErrors) {
if (decrementAndGet() == 0) {
Throwable ex = errors.terminate();
downstream.onError(ex);
} else {
if (maxConcurrency != Integer.MAX_VALUE) {
upstream.request(1);
}
}
} else {
cancel();
if (getAndSet(0) > 0) {
Throwable ex = errors.terminate();
downstream.onError(ex);
}
}
} else {
RxJavaPlugins.onError(e);
}
}
@Override
public void onComplete() {
if (decrementAndGet() == 0) {
Throwable ex = errors.terminate();
if (ex != null) {
downstream.onError(ex);
} else {
downstream.onComplete();
}
} else {
if (maxConcurrency != Integer.MAX_VALUE) {
upstream.request(1);
}
}
}
@Override
public void cancel() {
cancelled = true;
upstream.cancel();
set.dispose();
}
@Override
public void request(long n) {
// ignored, no values emitted
}
@Nullable
@Override
public T poll() throws Exception {
return null; // always empty
}
@Override
public boolean isEmpty() {
return true; // always empty
}
@Override
public void clear() {
// nothing to clear
}
@Override
public int requestFusion(int mode) {
return mode & ASYNC;
}
void innerComplete(InnerConsumer inner) {
set.delete(inner);
onComplete();
}
void innerError(InnerConsumer inner, Throwable e) {
set.delete(inner);
onError(e);
}
final class InnerConsumer extends AtomicReference<Disposable> implements CompletableObserver, Disposable {
private static final long serialVersionUID = 8606673141535671828L;
@Override
public void onSubscribe(Disposable d) {
DisposableHelper.setOnce(this, d);
}
@Override
public void onComplete() {
innerComplete(this);
}
@Override
public void onError(Throwable e) {
innerError(this, e);
}
@Override
public void dispose() {
DisposableHelper.dispose(this);
}
@Override
public boolean isDisposed() {
return DisposableHelper.isDisposed(get());
}
}
}
}
| NiteshKant/RxJava | src/main/java/io/reactivex/internal/operators/flowable/FlowableFlatMapCompletable.java | Java | apache-2.0 | 7,266 |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.dmn.core.compiler;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import org.kie.dmn.api.core.DMNType;
import org.kie.dmn.api.core.ast.BusinessKnowledgeModelNode;
import org.kie.dmn.api.core.ast.DMNNode;
import org.kie.dmn.api.core.ast.DecisionNode;
import org.kie.dmn.api.core.ast.DecisionServiceNode;
import org.kie.dmn.api.core.ast.InputDataNode;
import org.kie.dmn.core.api.DMNExpressionEvaluator;
import org.kie.dmn.core.ast.DecisionNodeImpl;
import org.kie.dmn.core.impl.CompositeTypeImpl;
import org.kie.dmn.core.impl.DMNModelImpl;
import org.kie.dmn.core.util.Msg;
import org.kie.dmn.model.api.DRGElement;
import org.kie.dmn.model.api.Decision;
public class DecisionCompiler implements DRGElementCompiler {
@Override
public boolean accept(DRGElement de) {
return de instanceof Decision;
}
@Override
public void compileNode(DRGElement de, DMNCompilerImpl compiler, DMNModelImpl model) {
Decision decision = (Decision) de;
DecisionNodeImpl dn = new DecisionNodeImpl( decision );
DMNType type = null;
if ( decision.getVariable() == null ) {
DMNCompilerHelper.reportMissingVariable( model, de, decision, Msg.MISSING_VARIABLE_FOR_DECISION );
return;
}
DMNCompilerHelper.checkVariableName( model, decision, decision.getName() );
if ( decision.getVariable() != null && decision.getVariable().getTypeRef() != null ) {
type = compiler.resolveTypeRef(model, decision, decision.getVariable(), decision.getVariable().getTypeRef());
} else {
type = compiler.resolveTypeRef(model, decision, decision, null);
}
dn.setResultType( type );
model.addDecision( dn );
}
@Override
public boolean accept(DMNNode node) {
return node instanceof DecisionNodeImpl;
}
@Override
public void compileEvaluator(DMNNode node, DMNCompilerImpl compiler, DMNCompilerContext ctx, DMNModelImpl model) {
DecisionNodeImpl di = (DecisionNodeImpl) node;
compiler.linkRequirements( model, di );
ctx.enterFrame();
try {
Map<String, DMNType> importedTypes = new HashMap<>();
for( DMNNode dep : di.getDependencies().values() ) {
if( dep instanceof DecisionNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
ctx.setVariable(dep.getName(), ((DecisionNode) dep).getResultType());
} else {
// then the Decision dependency is an imported Decision.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((DecisionNode) dep).getResultType());
}
}
} else if( dep instanceof InputDataNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
ctx.setVariable(dep.getName(), ((InputDataNode) dep).getType());
} else {
// then the InputData dependency is an imported InputData.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((InputDataNode) dep).getType());
}
}
} else if( dep instanceof BusinessKnowledgeModelNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
// might need to create a DMNType for "functions" and replace the type here by that
ctx.setVariable(dep.getName(), ((BusinessKnowledgeModelNode) dep).getResultType());
} else {
// then the BKM dependency is an imported BKM.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((BusinessKnowledgeModelNode) dep).getResultType());
}
}
} else if (dep instanceof DecisionServiceNode) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
// might need to create a DMNType for "functions" and replace the type here by that
ctx.setVariable(dep.getName(), ((DecisionServiceNode) dep).getResultType());
} else {
// then the BKM dependency is an imported BKM.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((DecisionServiceNode) dep).getResultType());
}
}
}
}
for (Entry<String, DMNType> importedType : importedTypes.entrySet()) {
ctx.setVariable(importedType.getKey(), importedType.getValue());
}
DMNExpressionEvaluator evaluator = compiler.getEvaluatorCompiler().compileExpression( ctx, model, di, di.getName(), di.getDecision().getExpression() );
di.setEvaluator( evaluator );
} finally {
ctx.exitFrame();
}
}
} | romartin/drools | kie-dmn/kie-dmn-core/src/main/java/org/kie/dmn/core/compiler/DecisionCompiler.java | Java | apache-2.0 | 6,980 |
/*
* Copyright 2012-2013 eBay Software Foundation and ios-driver committers
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.uiautomation.ios;
import com.google.common.collect.ImmutableList;
import org.libimobiledevice.ios.driver.binding.exceptions.SDKException;
import org.libimobiledevice.ios.driver.binding.model.ApplicationInfo;
import org.libimobiledevice.ios.driver.binding.model.DeviceInfo;
import org.libimobiledevice.ios.driver.binding.services.DeviceCallBack;
import org.libimobiledevice.ios.driver.binding.services.DeviceService;
import org.libimobiledevice.ios.driver.binding.services.IOSDevice;
import org.libimobiledevice.ios.driver.binding.services.ImageMountingService;
import org.libimobiledevice.ios.driver.binding.services.InformationService;
import org.libimobiledevice.ios.driver.binding.services.InstallerService;
import org.openqa.selenium.WebDriverException;
import org.uiautomation.ios.application.IPAShellApplication;
import org.uiautomation.ios.utils.DDILocator;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.logging.Logger;
public class DeviceStore extends DeviceCallBack {
private static final Logger log = Logger.getLogger(DeviceStore.class.getName());
private final List<RealDevice> reals = new CopyOnWriteArrayList<RealDevice>();
private final List<SimulatorDevice> sims = new CopyOnWriteArrayList<SimulatorDevice>();
private final ApplicationStore apps;
private final Set<String> uuidWhitelist;
public DeviceStore(ApplicationStore apps, Set<String> uuidWhitelist) {
super();
this.apps = apps;
this.uuidWhitelist = uuidWhitelist;
}
/**
* @return immutable copy of the currently available devices.
*/
public List<Device> getDevices() {
List<Device> all = new ArrayList<Device>();
all.addAll(reals);
all.addAll(sims);
return ImmutableList.copyOf(all);
}
public List<RealDevice> getRealDevices() {
return reals;
}
public List<SimulatorDevice> getSimulatorDevices() {
return sims;
}
public void add(SimulatorDevice simulatorDevice) {
sims.add(simulatorDevice);
}
@Override
protected void onDeviceAdded(String uuid) {
if (!uuidWhitelist.isEmpty() && !uuidWhitelist.contains(uuid)) {
log.info("device detected but not whitelisted");
return;
}
RealDevice d = null;
try {
IOSDevice device = DeviceService.get(uuid);
DeviceInfo info = new DeviceInfo(uuid);
d = new RealDevice(info);
log.info("new device detected (" + uuid + ") " + info.getDeviceName());
reals.add(d);
InstallerService s = new InstallerService(device);
String id = "com.apple.mobilesafari";
ApplicationInfo safari = s.getApplication(id);
String v = (String) safari.getProperty("CFBundleVersion");
log.info("device " + info.getDeviceName() + " = safari " + v);
IPAShellApplication ipa = new IPAShellApplication(id, v, safari);
apps.add(ipa);
InformationService i = new InformationService(device);
if (!i.isDevModeEnabled()) {
log.warning(
"The device " + uuid + " is not set to dev mode. It can't be used for testing.");
File ddi = DDILocator.locateDDI(device);
mount(device, ddi);
log.info("DDI mounted.Device now in dev mode.");
}
} catch (SDKException | WebDriverException e) {
if (d != null) {
reals.remove(d);
}
}
}
private void mount(IOSDevice device, File ddi) throws SDKException {
ImageMountingService service = null;
try {
service = new ImageMountingService(device);
service.mount(ddi);
} finally {
if (service != null) {
service.free();
}
}
}
@Override
protected void onDeviceRemoved(String uuid) {
if (!uuidWhitelist.isEmpty() && !uuidWhitelist.contains(uuid)) {
log.info("device removed but not whitelisted");
return;
}
for (RealDevice d : reals) {
if (d.getUuid().equals(uuid)) {
log.info("Removing " + uuid + " for the devices pool");
boolean ok = reals.remove(d);
if (!ok) {
log.warning("device " + uuid + " has been unplugged, but was never there ?");
}
}
}
}
}
| darraghgrace/ios-driver | server/src/main/java/org/uiautomation/ios/DeviceStore.java | Java | apache-2.0 | 4,836 |
/* (c) British Telecommunications plc, 2009, All Rights Reserved */
package com.bt.pi.sss;
import com.bt.pi.app.common.entities.User;
public interface UserManager {
boolean userExists(String accessKey);
User getUserByAccessKey(String accessKey);
}
| barnyard/pi-sss | src/main/java/com/bt/pi/sss/UserManager.java | Java | apache-2.0 | 259 |
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pl.allegro.foggerexample.config;
import android.app.Application;
import android.app.Instrumentation;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import pl.allegro.foggerexample.config.application.ApplicationRunConfiguration;
import pl.allegro.foggerexample.config.dagger.Injector;
import pl.allegro.foggerexample.config.dagger.module.RootModule;
public class FoggerExampleApplication extends Application {
private static FoggerExampleApplication instance;
private ApplicationRunConfiguration applicationRunConfiguration;
public FoggerExampleApplication() {
}
public FoggerExampleApplication(final Context context) {
super();
attachBaseContext(context);
setInstance(this);
}
@Override
public void onCreate() {
super.onCreate();
initDaggerOnApplicationCreationStep();
Injector.inject(this);
}
private void initDaggerOnApplicationCreationStep() {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
applicationRunConfiguration = ApplicationRunConfiguration.create(sharedPreferences);
Object[] modules = new Object[]{new RootModule()};
Injector.init(modules);
Injector.injectStatics();
}
private static void setInstance(FoggerExampleApplication foggerExampleApplication) {
instance = foggerExampleApplication;
}
public FoggerExampleApplication(final Instrumentation instrumentation) {
super();
attachBaseContext(instrumentation.getTargetContext());
}
public static FoggerExampleApplication getInstance() {
return instance;
}
}
| RyanTech/fogger | example/src/main/java/pl/allegro/foggerexample/config/FoggerExampleApplication.java | Java | apache-2.0 | 2,363 |
/*
* Copyright 2015 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.jso.impl;
import java.io.IOException;
/**
*
* @author Alexey Andreev
*/
interface NameEmitter {
void emit(int precedence) throws IOException;
}
| jtulach/teavm | jso/impl/src/main/java/org/teavm/jso/impl/NameEmitter.java | Java | apache-2.0 | 781 |
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package adwords.axis.v201502.advancedoperations;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.v201502.cm.AdvertisingChannelType;
import com.google.api.ads.adwords.axis.v201502.cm.ApiException;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyConfiguration;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyOperation;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyReturnValue;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyServiceInterface;
import com.google.api.ads.adwords.axis.v201502.cm.Budget;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetBudgetDeliveryMethod;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetBudgetPeriod;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetOperation;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetReturnValue;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetServiceInterface;
import com.google.api.ads.adwords.axis.v201502.cm.Campaign;
import com.google.api.ads.adwords.axis.v201502.cm.CampaignOperation;
import com.google.api.ads.adwords.axis.v201502.cm.CampaignReturnValue;
import com.google.api.ads.adwords.axis.v201502.cm.CampaignServiceInterface;
import com.google.api.ads.adwords.axis.v201502.cm.Money;
import com.google.api.ads.adwords.axis.v201502.cm.NetworkSetting;
import com.google.api.ads.adwords.axis.v201502.cm.Operator;
import com.google.api.ads.adwords.axis.v201502.cm.SharedBiddingStrategy;
import com.google.api.ads.adwords.axis.v201502.cm.TargetSpendBiddingScheme;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.client.auth.oauth2.Credential;
import java.rmi.RemoteException;
import javax.xml.rpc.ServiceException;
/**
* This example adds a Shared Bidding Strategy and uses it to construct a campaign.
*/
public class UseSharedBiddingStrategy {
// Optional: If you'd like to use an existing shared budget, assign a
// shared budget ID here.
private static final Long SHARED_BUDGET_ID = null;
public static void main(String[] args) throws Exception {
Credential oAuth2Credential = new OfflineCredentials.Builder()
.forApi(Api.ADWORDS)
.fromFile()
.build()
.generateCredential();
// Construct an AdWordsSession.
AdWordsSession session = new AdWordsSession.Builder()
.fromFile()
.withOAuth2Credential(oAuth2Credential)
.build();
AdWordsServices adWordsServices = new AdWordsServices();
runExample(adWordsServices, session, SHARED_BUDGET_ID);
}
public static void runExample(AdWordsServices adWordsServices, AdWordsSession session,
Long sharedBudgetId) throws Exception {
SharedBiddingStrategy sharedBiddingStrategy = createBiddingStrategy(adWordsServices, session);
if (sharedBudgetId == null) {
Budget budget = createSharedBudget(adWordsServices, session);
sharedBudgetId = budget.getBudgetId();
}
createCampaignWithBiddingStrategy(adWordsServices, session, sharedBiddingStrategy.getId(),
sharedBudgetId);
}
/**
* Creates the bidding strategy object.
*
* @param adWordsServices the user to run the example with
* @param session the AdWordsSession
* @throws RemoteException
* @throws ApiException
* @throws ServiceException
*/
private static SharedBiddingStrategy createBiddingStrategy(AdWordsServices adWordsServices,
AdWordsSession session)
throws ApiException, RemoteException, ServiceException {
// Get the BiddingStrategyService, which loads the required classes.
BiddingStrategyServiceInterface biddingStrategyService =
adWordsServices.get(session, BiddingStrategyServiceInterface.class);
// Create a shared bidding strategy.
SharedBiddingStrategy sharedBiddingStrategy = new SharedBiddingStrategy();
sharedBiddingStrategy.setName("Maximize Clicks" + System.currentTimeMillis());
TargetSpendBiddingScheme biddingScheme = new TargetSpendBiddingScheme();
// Optionally set additional bidding scheme parameters.
biddingScheme.setBidCeiling(new Money(null, 2000000L));
biddingScheme.setSpendTarget(new Money(null, 20000000L));
sharedBiddingStrategy.setBiddingScheme(biddingScheme);
// Create operation.
BiddingStrategyOperation operation = new BiddingStrategyOperation();
operation.setOperand(sharedBiddingStrategy);
operation.setOperator(Operator.ADD);
BiddingStrategyOperation[] operations = new BiddingStrategyOperation[] {operation};
BiddingStrategyReturnValue result = biddingStrategyService.mutate(operations);
SharedBiddingStrategy newBiddingStrategy = result.getValue(0);
System.out.printf(
"Shared bidding strategy with name '%s' and ID %d of type %s was created.\n",
newBiddingStrategy.getName(), newBiddingStrategy.getId(),
newBiddingStrategy.getBiddingScheme().getBiddingSchemeType());
return newBiddingStrategy;
}
/**
* Creates an explicit budget to be used only to create the Campaign.
*
* @param adWordsServices the user to run the example with
* @param session the AdWordsSession
* @throws ServiceException
* @throws RemoteException
* @throws ApiException
*/
private static Budget createSharedBudget(AdWordsServices adWordsServices,
AdWordsSession session)
throws ServiceException, ApiException, RemoteException {
// Get the BudgetService, which loads the required classes.
BudgetServiceInterface budgetService =
adWordsServices.get(session, BudgetServiceInterface.class);
// Create a shared budget.
Budget budget = new Budget();
budget.setName("Shared Interplanetary Budget #" + System.currentTimeMillis());
budget.setPeriod(BudgetBudgetPeriod.DAILY);
budget.setAmount(new Money(null, 50000000L));
budget.setDeliveryMethod(BudgetBudgetDeliveryMethod.STANDARD);
budget.setIsExplicitlyShared(true);
BudgetOperation operation = new BudgetOperation();
operation.setOperand(budget);
operation.setOperator(Operator.ADD);
BudgetOperation[] operations = new BudgetOperation[] {operation};
// Make the mutate request.
BudgetReturnValue result = budgetService.mutate(operations);
Budget newBudget = result.getValue(0);
System.out.printf("Budget with name '%s', ID %d was created.\n", newBudget.getName(),
newBudget.getBudgetId());
return newBudget;
}
/**
* Create a Campaign with a Shared Bidding Strategy.
*
* @param adWordsServices the user to run the example with
* @param session the AdWordsSession
* @param biddingStrategyId the bidding strategy id to use
* @param sharedBudgetId the shared budget id to use
* @throws RemoteException
* @throws ApiException
* @throws ServiceException
*/
private static Campaign createCampaignWithBiddingStrategy(
AdWordsServices adWordsServices, AdWordsSession session, Long biddingStrategyId,
Long sharedBudgetId) throws ApiException, RemoteException, ServiceException {
// Get the CampaignService, which loads the required classes.
CampaignServiceInterface campaignService =
adWordsServices.get(session, CampaignServiceInterface.class);
// Create campaign.
Campaign campaign = new Campaign();
campaign.setName("Interplanetary Cruise #" + System.currentTimeMillis());
// Set the budget.
Budget budget = new Budget();
budget.setBudgetId(sharedBudgetId);
campaign.setBudget(budget);
// Set bidding strategy (required).
BiddingStrategyConfiguration biddingStrategyConfiguration = new BiddingStrategyConfiguration();
biddingStrategyConfiguration.setBiddingStrategyId(biddingStrategyId);
campaign.setBiddingStrategyConfiguration(biddingStrategyConfiguration);
// Set advertising channel type (required).
campaign.setAdvertisingChannelType(AdvertisingChannelType.SEARCH);
// Set network targeting (recommended).
NetworkSetting networkSetting = new NetworkSetting();
networkSetting.setTargetGoogleSearch(true);
networkSetting.setTargetSearchNetwork(true);
networkSetting.setTargetContentNetwork(true);
campaign.setNetworkSetting(networkSetting);
// Create operation.
CampaignOperation operation = new CampaignOperation();
operation.setOperand(campaign);
operation.setOperator(Operator.ADD);
CampaignReturnValue result = campaignService.mutate(new CampaignOperation[] {operation});
Campaign newCampaign = result.getValue(0);
System.out.printf("Campaign with name '%s', ID %d and bidding scheme ID %d was created.\n",
newCampaign.getName(), newCampaign.getId(),
newCampaign.getBiddingStrategyConfiguration().getBiddingStrategyId());
return newCampaign;
}
}
| raja15792/googleads-java-lib | examples/adwords_axis/src/main/java/adwords/axis/v201502/advancedoperations/UseSharedBiddingStrategy.java | Java | apache-2.0 | 9,544 |
/*
* Copyright 2014 Space Dynamics Laboratory - Utah State University Research Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.usu.sdl.openstorefront.web.init;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.annotation.WebListener;
/**
*
* @author dshurtleff
*/
@WebListener
public class ApplicationInit
implements ServletContextListener
{
@Override
public void contextInitialized(ServletContextEvent sce)
{
}
@Override
public void contextDestroyed(ServletContextEvent sce)
{
}
}
| skycow/Open-Storefront | server/openstorefront/openstorefront-web/src/main/java/edu/usu/sdl/openstorefront/web/init/ApplicationInit.java | Java | apache-2.0 | 1,105 |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.scanner;
import java.util.Collection;
import java.util.HashMap;
import org.drools.compiler.kie.builder.impl.MessageImpl;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.conf.EqualityBehaviorOption;
import org.kie.api.conf.EventProcessingOption;
import org.kie.internal.builder.IncrementalResults;
import org.kie.internal.builder.InternalKieBuilder;
import static org.junit.Assert.*;
public class KieModuleIncrementalCompilationTest extends AbstractKieCiTest {
@Test
public void testCheckMetaDataAfterIncrementalDelete() throws Exception {
String drl1 = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
"end\n";
String drl2 = "package org.kie.scanner\n" +
"rule R2_2 when\n" +
" String( )\n" +
"then\n" +
"end\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem()
.write( "src/main/resources/r1.drl", drl1 )
.write( "src/main/resources/r2.drl", drl2 );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 2, getRuleNames( kieBuilder ).get( "org.kie.scanner" ).size() );
kfs.delete( "src/main/resources/r2.drl" );
IncrementalResults addResults = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/r2.drl" ).build();
assertEquals( 1, getRuleNames( kieBuilder ).get( "org.kie.scanner" ).size() );
}
private HashMap<String, Collection<String>> getRuleNames( KieBuilder kieBuilder ) {
KieModuleMetaData kieModuleMetaData = getKieModuleMetaData( kieBuilder );
HashMap<String, Collection<String>> ruleNames = new HashMap<String, Collection<String>>();
for ( String packageName : kieModuleMetaData.getPackages() ) {
ruleNames.put( packageName, kieModuleMetaData.getRuleNamesInPackage( packageName ) );
}
return ruleNames;
}
private KieModuleMetaData getKieModuleMetaData( KieBuilder kieBuilder ) {
return KieModuleMetaData.Factory.newKieModuleMetaData( ( (InternalKieBuilder) kieBuilder ).getKieModuleIgnoringErrors() );
}
@Test
public void testIncrementalCompilationFirstBuildHasErrors() throws Exception {
KieServices ks = KieServices.Factory.get();
//Malformed POM - No Version information
ReleaseId releaseId = ks.newReleaseId( "org.kie", "incremental-test-with-invalid pom", "" );
KieFileSystem kfs = createKieFileSystemWithKProject( ks );
kfs.writePomXML( getPom( releaseId ) );
//Valid
String drl1 =
"rule R1 when\n" +
" $s : String()\n" +
"then\n" +
"end\n";
//Invalid
String drl2 =
"rule R2 when\n" +
" $s : Strin( )\n" +
"then\n" +
"end\n";
//Write Rule 1 - No DRL errors, but POM is in error
kfs.write( "src/main/resources/KBase1/r1.drl", drl1 );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 1,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
//Add file with error - expect 1 "added" error message
kfs.write( "src/main/resources/KBase1/r2.drl", drl2 );
IncrementalResults addResults = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/KBase1/r2.drl" ).build();
assertEquals( 1, addResults.getAddedMessages().size() );
assertEquals( 0, addResults.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithRuleFunctionRule() throws Exception {
String rule_1 = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
"end\n";
String rule_2 = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
" System.out.println(MyFunction());\n" +
"end\n";
String function = "package org.kie.scanner\n" +
"function int MyFunction() {\n" +
" return 1;\n" +
"}\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule_1 );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 0,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
kfs.write( "src/main/resources/org/kie/scanner/function.drl", function );
IncrementalResults addResults1 = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/function.drl" ).build();
assertEquals( 0, addResults1.getAddedMessages().size() );
assertEquals( 0, addResults1.getRemovedMessages().size() );
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule_2 );
IncrementalResults addResults2 = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/rule.drl" ).build();
assertEquals( 0, addResults2.getAddedMessages().size() );
assertEquals( 0, addResults2.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithRuleThenFunction() throws Exception {
String rule = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
" System.out.println(MyFunction());\n" +
"end\n";
String function = "package org.kie.scanner\n" +
"function int MyFunction() {\n" +
" return 1;\n" +
"}\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 1,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
kfs.write( "src/main/resources/org/kie/scanner/function.drl", function );
IncrementalResults addResults1 = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/function.drl" ).build();
assertEquals( 0, addResults1.getAddedMessages().size() );
assertEquals( 1, addResults1.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithFunctionThenRule() throws Exception {
String rule = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
" System.out.println(MyFunction());\n" +
"end\n";
String function = "package org.kie.scanner\n" +
"function int MyFunction() {\n" +
" return 1;\n" +
"}\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( "src/main/resources/org/kie/scanner/function.drl", function );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 0,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule );
IncrementalResults addResults = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/rule.drl" ).build();
assertEquals( 0, addResults.getAddedMessages().size() );
assertEquals( 0, addResults.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithMultipleKieBases() throws Exception {
String rule = "package org.kie.scanner\n" +
"rule R1 when\n" +
"then\n" +
"end\n";
String invalidRule = "package org.kie.scanner\n" +
"rule R2 when\n" +
" Cheese()\n" + // missing import
"then\n" +
"end\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = createKieFileSystemWithTwoKBases(ks);
kfs.write("src/main/resources/org/kie/scanner/rule.drl",
rule);
KieBuilder kieBuilder = ks.newKieBuilder(kfs).buildAll();
assertEquals(0,
kieBuilder.getResults().getMessages().size());
kfs.write("src/main/resources/org/kie/scanner/invalidRule.drl",
invalidRule);
IncrementalResults addResults = ((InternalKieBuilder) kieBuilder).createFileSet("src/main/resources/org/kie/scanner/invalidRule.drl").build();
assertEquals(2, addResults.getAddedMessages().size());
addResults
.getAddedMessages()
.stream()
.map(m -> (MessageImpl) m )
.forEach(m -> assertNotNull(m.getKieBaseName()));
}
private KieFileSystem createKieFileSystemWithTwoKBases(final KieServices ks) {
final KieModuleModel kproj = ks.newKieModuleModel();
kproj.newKieBaseModel("default").setDefault(true)
.setEqualsBehavior( EqualityBehaviorOption.EQUALITY )
.setEventProcessingMode( EventProcessingOption.STREAM );
kproj.newKieBaseModel("kbase1").setDefault(false)
.setEqualsBehavior(EqualityBehaviorOption.EQUALITY)
.setEventProcessingMode(EventProcessingOption.STREAM);
final KieFileSystem kfs = ks.newKieFileSystem();
kfs.writeKModuleXML(kproj.toXML());
return kfs;
}
}
| reynoldsm88/drools | kie-ci/src/test/java/org/kie/scanner/KieModuleIncrementalCompilationTest.java | Java | apache-2.0 | 10,918 |
/*
* Copyright 2019 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.sender.grpc;
import com.navercorp.pinpoint.common.util.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
/**
* @author Woonduk Kang(emeroad)
*/
public class ReconnectExecutor {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private volatile boolean shutdown;
private final ScheduledExecutorService scheduledExecutorService;
private final AtomicLong rejectedCounter = new AtomicLong();
public ReconnectExecutor(ScheduledExecutorService scheduledExecutorService) {
this.scheduledExecutorService = Assert.requireNonNull(scheduledExecutorService, "scheduledExecutorService");
}
private void execute0(Runnable command) {
Assert.requireNonNull(command, "command");
if (shutdown) {
logger.debug("already shutdown");
return;
}
if (command instanceof ReconnectJob) {
ReconnectJob reconnectJob = (ReconnectJob) command;
try {
scheduledExecutorService.schedule(reconnectJob, reconnectJob.nextBackoffNanos(), TimeUnit.NANOSECONDS);
} catch (RejectedExecutionException e) {
final long failCount = rejectedCounter.incrementAndGet();
logger.info("{} reconnectJob scheduled fail {}", command, failCount);
}
} else {
throw new IllegalArgumentException("unknown command type " + command);
}
}
public void close() {
shutdown = true;
}
public Reconnector newReconnector(Runnable reconnectJob) {
Assert.requireNonNull(reconnectJob, "reconnectJob");
if (logger.isInfoEnabled()) {
logger.info("newReconnector(reconnectJob = [{}])", reconnectJob);
}
final Executor dispatch = new Executor() {
@Override
public void execute(Runnable command) {
ReconnectExecutor.this.execute0(command);
}
};
final ReconnectJob reconnectJobWrap = wrapReconnectJob(reconnectJob);
return new ReconnectAdaptor(dispatch, reconnectJobWrap);
}
private ReconnectJob wrapReconnectJob(Runnable runnable) {
return new ExponentialBackoffReconnectJob(runnable);
}
}
| suraj-raturi/pinpoint | profiler/src/main/java/com/navercorp/pinpoint/profiler/sender/grpc/ReconnectExecutor.java | Java | apache-2.0 | 3,111 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.internal.csv;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
/** Utility methods for dealing with CSV files */
public class CSVUtils {
private static final String[] EMPTY_STRING_ARRAY = new String[0];
private static final String[][] EMPTY_DOUBLE_STRING_ARRAY = new String[0][0];
/**
* <code>CSVUtils</code> instances should NOT be constructed in standard programming.
*
* <p>This constructor is public to permit tools that require a JavaBean instance to operate.
*/
public CSVUtils() {}
/**
* Converts an array of string values into a single CSV line. All <code>null</code> values are
* converted to the string <code>"null"</code>, all strings equal to <code>"null"</code> will
* additionally get quotes around.
*
* @param values the value array
* @return the CSV string, will be an empty string if the length of the value array is 0
*/
public static String printLine(String[] values, CSVStrategy strategy) {
// set up a CSVUtils
StringWriter stringWriter = new StringWriter();
CSVPrinter csvPrinter = new CSVPrinter(stringWriter, strategy);
// check for null values an "null" as strings and convert them
// into the strings "null" and "\"null\""
for (int i = 0; i < values.length; i++) {
if (values[i] == null) {
values[i] = "null";
} else if (values[i].equals("null")) {
values[i] = "\"null\"";
}
}
// convert to CSV
try {
csvPrinter.println(values);
} catch (IOException e) {
// should not happen with StringWriter
}
// as the resulting string has \r\n at the end, we will trim that away
return stringWriter.toString().trim();
}
// ======================================================
// static parsers
// ======================================================
/**
* Parses the given String according to the default {@link CSVStrategy}.
*
* @param s CSV String to be parsed.
* @return parsed String matrix (which is never null)
* @throws IOException in case of error
*/
public static String[][] parse(String s) throws IOException {
if (s == null) {
throw new IllegalArgumentException("Null argument not allowed.");
}
String[][] result = (new CSVParser(new StringReader(s))).getAllValues();
if (result == null) {
// since CSVStrategy ignores empty lines an empty array is returned
// (i.e. not "result = new String[][] {{""}};")
result = EMPTY_DOUBLE_STRING_ARRAY;
}
return result;
}
/**
* Parses the first line only according to the default {@link CSVStrategy}.
*
* <p>Parsing empty string will be handled as valid records containing zero elements, so the
* following property holds: parseLine("").length == 0.
*
* @param s CSV String to be parsed.
* @return parsed String vector (which is never null)
* @throws IOException in case of error
*/
public static String[] parseLine(String s) throws IOException {
if (s == null) {
throw new IllegalArgumentException("Null argument not allowed.");
}
// uh,jh: make sure that parseLine("").length == 0
if (s.length() == 0) {
return EMPTY_STRING_ARRAY;
}
return (new CSVParser(new StringReader(s))).getLine();
}
}
| apache/solr | solr/core/src/java/org/apache/solr/internal/csv/CSVUtils.java | Java | apache-2.0 | 4,128 |
/*
* Autosleep
* Copyright (C) 2016 Orange
* Authors: Benjamin Einaudi benjamin.einaudi@orange.com
* Arnaud Ruffin arnaud.ruffin@orange.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.autosleep.ui.proxy;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.web.client.RestTemplate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
@Configuration
public class HttpClientConfiguration {
@Value("${autowakeup.skip.ssl.validation:false}")
private boolean skipSslValidation;
private SSLContext buildSslContext(TrustManager trustManager) {
try {
SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, new TrustManager[]{trustManager}, null);
return sslContext;
} catch (KeyManagementException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
private TrustManager buildTrustAllCerts() {
return new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] certificates, String client) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] certificates, String client) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
};
}
private HostnameVerifier buildVerifyNoHostname() {
return (hostname, session) -> true;
}
@Bean
public RestTemplate restTemplate() {
if (!skipSslValidation) {
return new RestTemplate();
} else {
final HostnameVerifier hostnameVerifier = buildVerifyNoHostname();
final SSLContext sslContext = buildSslContext(buildTrustAllCerts());
return new RestTemplate(new SimpleClientHttpRequestFactory() {
@Override
protected void prepareConnection(HttpURLConnection connection, String httpMethod) throws IOException {
if (connection instanceof HttpsURLConnection) {
HttpsURLConnection secureConnection = (HttpsURLConnection) connection;
secureConnection.setHostnameVerifier(hostnameVerifier);
secureConnection.setSSLSocketFactory(sslContext.getSocketFactory());
}
super.prepareConnection(connection, httpMethod);
}
});
}
}
}
| pradyutsarma/autosleep | spring-apps/autowakeup-proxy/src/main/java/org/cloudfoundry/autosleep/ui/proxy/HttpClientConfiguration.java | Java | apache-2.0 | 3,689 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2010, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.event.observer.transactional;
import static javax.ejb.TransactionManagementType.BEAN;
import static javax.enterprise.event.TransactionPhase.AFTER_COMPLETION;
import static javax.enterprise.event.TransactionPhase.AFTER_FAILURE;
import static javax.enterprise.event.TransactionPhase.AFTER_SUCCESS;
import static javax.enterprise.event.TransactionPhase.BEFORE_COMPLETION;
import static javax.enterprise.event.TransactionPhase.IN_PROGRESS;
import java.io.Serializable;
import javax.annotation.Priority;
import javax.ejb.Stateful;
import javax.ejb.TransactionManagement;
import javax.enterprise.context.SessionScoped;
import javax.enterprise.event.Observes;
@Stateful
@TransactionManagement(BEAN)
@Tame
@SessionScoped
@SuppressWarnings("serial")
public class Pomeranian implements PomeranianInterface, Serializable {
@Override
public void observeInProgress(@Observes(during = IN_PROGRESS) Bark event) {
Actions.add(IN_PROGRESS);
}
@Override
public void observeAfterCompletion(@Observes(during = AFTER_COMPLETION) Bark someEvent) {
Actions.add(AFTER_COMPLETION);
}
@Override
public void observeAfterSuccess(@Observes(during = AFTER_SUCCESS) Bark event) {
Actions.add(AFTER_SUCCESS);
}
@Override
public void observeAfterSuccessWithHighPriority(@Priority(1) @Observes(during = AFTER_SUCCESS) Bark event) {
Actions.add(AFTER_SUCCESS + "1");
}
@Override
public void observeAfterSuccessWithLowPriority(@Priority(100) @Observes(during = AFTER_SUCCESS) Bark event) {
Actions.add(AFTER_SUCCESS + "100");
}
@Override
public void observeAfterFailure(@Observes(during = AFTER_FAILURE) Bark event) {
Actions.add(AFTER_FAILURE);
}
@Override
public void observeBeforeCompletion(@Observes(during = BEFORE_COMPLETION) Bark event) {
Actions.add(BEFORE_COMPLETION);
}
@Override
public void observeAndFail(@Observes(during=BEFORE_COMPLETION) @Gnarly Bark event) throws FooException {
Actions.add(BEFORE_COMPLETION);
throw new FooException();
}
}
| antoinesd/weld-core | tests-arquillian/src/test/java/org/jboss/weld/tests/event/observer/transactional/Pomeranian.java | Java | apache-2.0 | 2,906 |
/**
* Copyright (c) Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0.
*
* THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
* OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
* ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
* MERCHANTABLITY OR NON-INFRINGEMENT.
*
* See the Apache Version 2.0 License for specific language governing
* permissions and limitations under the License.
*/
package com.interopbridges.scx.webservices;
/**
*
* <p>
* Concrete representation of an Endpoint to match what is described in the web
* service's WSDL.
* </p>
*
* <p>
*
* <pre>
* <service name="CalculatorService">
* <port name="CalculatorPort" binding="tns:CalculatorPortBinding">
* <soap:address location="http://scxom64-ws7-02:9080/WebServiceProject/CalculatorService" />
* </port>
* </service>
* </pre>
*
* </p>
*
* <p>
* Typically this might look like:
* <ol>
* <li><b>http://scxom64-ws7-02:9080/WebServiceProject/CalculatorService</b></li>
* <li><b>http://scxom-ws7-02:8080/axis2/services/DinnerFinderService</li>
* DinnerFinderServiceHttpSoap11Endpoint/</b>
* </ol>>
* </p>
*
* @author Christopher Crammond
*/
public class Endpoint implements EndpointMBean {
/**
* Key for describing for the (interopbridges) JMX type of MBean
*/
private String _jmxType = "endpoint";
/**
* String representing the full URL of the endpoint address. This should
* match the soap:address's location attribute from the WSDL. <br>
*
*/
private String _url;
/**
* Empty Constructor. It is considered to be a best practice to create this
* default constructor rather than relying on the compiler to auto-generate
* it.
*/
public Endpoint() {
this._url = "";
}
/**
* Preferred Constructor
*
* @param url
* String representing the full URL of the endpoint address.
*/
public Endpoint(String url) {
this._url = url;
}
/*
* (non-Javadoc)
*
* @see com.interopbridges.scx.webservices.EndpointMBean#getUrl()
*/
public String getUrl() {
return this._url;
}
/*
* (non-Javadoc)
*
* @see com.interopbridges.scx.webservices.IMBean#getJmxType()
*/
public String getJmxType() {
return this._jmxType;
}
}
| Microsoft/BeanSpy | test/code/JEE/Common/src/com/interopbridges/scx/webservices/Endpoint.java | Java | apache-2.0 | 2,617 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.snapshot;
import java.io.Serializable;
import org.apache.geode.internal.cache.snapshot.SnapshotFileMapper;
/**
* Provides a way to configure the behavior of snapshot operations. The default options are:
* <dl>
* <dt>filter</dt>
* <dd>null</dd>
* </dl>
*
* @param <K> the cache entry key type
* @param <V> the cache entry value type
*
* @since GemFire 7.0
*/
public interface SnapshotOptions<K, V> extends Serializable {
/**
* Defines the available snapshot file formats.
*
* @since GemFire 7.0
*/
enum SnapshotFormat {
/** an optimized binary format specific to GemFire */
GEMFIRE
}
/**
* Sets a filter to apply to snapshot entries. Entries that are accepted by the filter will be
* included in import and export operations.
*
* @param filter the filter to apply, or null to remove the filter
* @return the snapshot options
*/
SnapshotOptions<K, V> setFilter(SnapshotFilter<K, V> filter);
/**
* Returns the filter to be applied to snapshot entries. Entries that are accepted by the filter
* will be included in import and export operations.
*
* @return the filter, or null if the filter is not set
*/
SnapshotFilter<K, V> getFilter();
/**
* Sets whether to invoke callbacks when loading a snapshot. The default is false.
*
* @param invokeCallbacks
*
* @return the snapshot options
*/
SnapshotOptions<K, V> invokeCallbacks(boolean invokeCallbacks);
/**
* Returns whether loading a snapshot causes callbacks to be invoked
*
* @return whether loading a snapshot causes callbacks to be invoked
*/
boolean shouldInvokeCallbacks();
/**
* Returns true if the snapshot operation will proceed in parallel.
*
* @return true if the parallel mode has been enabled
*
* @since Geode 1.3
*/
boolean isParallelMode();
/**
* Enables parallel mode for snapshot export, which will cause each member of a partitioned region
* to save its local data set (ignoring redundant copies) to a separate snapshot file.
*
* <p>
* Parallelizing snapshot operations may yield significant performance improvements for large data
* sets. This is particularly true when each member is writing to separate physical disks.
* <p>
* This flag is ignored for replicated regions.
*
* @param parallel true if the snapshot operations will be performed in parallel
* @return the snapshot options
*
* @see SnapshotFileMapper
*
* @since Geode 1.3
*/
SnapshotOptions<K, V> setParallelMode(boolean parallel);
}
| charliemblack/geode | geode-core/src/main/java/org/apache/geode/cache/snapshot/SnapshotOptions.java | Java | apache-2.0 | 3,388 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.util.bean;
import java.io.Serializable;
import javax.enterprise.inject.spi.InjectionPoint;
import org.jboss.weld.injection.ForwardingInjectionPoint;
import org.jboss.weld.serialization.InjectionPointHolder;
public class SerializableForwardingInjectionPoint extends ForwardingInjectionPoint implements Serializable {
private static final long serialVersionUID = 7803445899943317029L;
private final InjectionPointHolder ip;
public SerializableForwardingInjectionPoint(String contextId, InjectionPoint ip) {
this.ip = new InjectionPointHolder(contextId, ip);
}
@Override
protected InjectionPoint delegate() {
return ip.get();
}
}
| antoinesd/weld-core | impl/src/main/java/org/jboss/weld/util/bean/SerializableForwardingInjectionPoint.java | Java | apache-2.0 | 1,478 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.flume.appender;
import java.util.Properties;
import org.apache.flume.Event;
import org.apache.flume.api.RpcClient;
import org.apache.flume.api.RpcClientFactory;
import org.apache.logging.log4j.core.appender.AppenderLoggingException;
import org.apache.logging.log4j.core.appender.ManagerFactory;
/**
* Manager for FlumeAvroAppenders.
*/
public class FlumeAvroManager extends AbstractFlumeManager {
private static final int MAX_RECONNECTS = 3;
private static final int MINIMUM_TIMEOUT = 1000;
private static AvroManagerFactory factory = new AvroManagerFactory();
private final Agent[] agents;
private final int batchSize;
private final int retries;
private final int connectTimeout;
private final int requestTimeout;
private final int current = 0;
private RpcClient rpcClient = null;
/**
* Constructor
* @param name The unique name of this manager.
* @param agents An array of Agents.
* @param batchSize The number of events to include in a batch.
* @param retries The number of times to retry connecting before giving up.
* @param connectTimeout The connection timeout in ms.
* @param requestTimeout The request timeout in ms.
*
*/
protected FlumeAvroManager(final String name, final String shortName, final Agent[] agents, final int batchSize,
final int retries, final int connectTimeout, final int requestTimeout) {
super(name);
this.agents = agents;
this.batchSize = batchSize;
this.retries = retries;
this.connectTimeout = connectTimeout;
this.requestTimeout = requestTimeout;
this.rpcClient = connect(agents, retries, connectTimeout, requestTimeout);
}
/**
* Returns a FlumeAvroManager.
* @param name The name of the manager.
* @param agents The agents to use.
* @param batchSize The number of events to include in a batch.
* @param retries The number of times to retry connecting before giving up.
* @param connectTimeout The connection timeout in ms.
* @param requestTimeout The request timeout in ms.
* @return A FlumeAvroManager.
*/
public static FlumeAvroManager getManager(final String name, final Agent[] agents, int batchSize,
final int retries, final int connectTimeout, final int requestTimeout) {
if (agents == null || agents.length == 0) {
throw new IllegalArgumentException("At least one agent is required");
}
if (batchSize <= 0) {
batchSize = 1;
}
final StringBuilder sb = new StringBuilder("FlumeAvro[");
boolean first = true;
for (final Agent agent : agents) {
if (!first) {
sb.append(",");
}
sb.append(agent.getHost()).append(":").append(agent.getPort());
first = false;
}
sb.append("]");
return getManager(sb.toString(), factory,
new FactoryData(name, agents, batchSize, retries, connectTimeout, requestTimeout));
}
/**
* Returns the agents.
* @return The agent array.
*/
public Agent[] getAgents() {
return agents;
}
/**
* Returns the index of the current agent.
* @return The index for the current agent.
*/
public int getCurrent() {
return current;
}
public int getRetries() {
return retries;
}
public int getConnectTimeout() {
return connectTimeout;
}
public int getRequestTimeout() {
return requestTimeout;
}
public int getBatchSize() {
return batchSize;
}
public synchronized void send(final BatchEvent events) {
if (rpcClient == null) {
rpcClient = connect(agents, retries, connectTimeout, requestTimeout);
}
if (rpcClient != null) {
try {
LOGGER.trace("Sending batch of {} events", events.getEvents().size());
rpcClient.appendBatch(events.getEvents());
} catch (final Exception ex) {
rpcClient.close();
rpcClient = null;
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg, ex);
throw new AppenderLoggingException("No Flume agents are available");
}
} else {
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg);
throw new AppenderLoggingException("No Flume agents are available");
}
}
@Override
public synchronized void send(final Event event) {
if (rpcClient == null) {
rpcClient = connect(agents, retries, connectTimeout, requestTimeout);
}
if (rpcClient != null) {
try {
rpcClient.append(event);
} catch (final Exception ex) {
rpcClient.close();
rpcClient = null;
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg, ex);
throw new AppenderLoggingException("No Flume agents are available");
}
} else {
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg);
throw new AppenderLoggingException("No Flume agents are available");
}
}
/**
* There is a very good chance that this will always return the first agent even if it isn't available.
* @param agents The list of agents to choose from
* @return The FlumeEventAvroServer.
*/
private RpcClient connect(final Agent[] agents, int retries, final int connectTimeout, final int requestTimeout) {
try {
final Properties props = new Properties();
props.put("client.type", agents.length > 1 ? "default_failover" : "default");
int count = 1;
final StringBuilder sb = new StringBuilder();
for (final Agent agent : agents) {
if (sb.length() > 0) {
sb.append(" ");
}
final String hostName = "host" + count++;
props.put("hosts." + hostName, agent.getHost() + ":" + agent.getPort());
sb.append(hostName);
}
props.put("hosts", sb.toString());
if (batchSize > 0) {
props.put("batch-size", Integer.toString(batchSize));
}
if (retries > 1) {
if (retries > MAX_RECONNECTS) {
retries = MAX_RECONNECTS;
}
props.put("max-attempts", Integer.toString(retries * agents.length));
}
if (requestTimeout >= MINIMUM_TIMEOUT) {
props.put("request-timeout", Integer.toString(requestTimeout));
}
if (connectTimeout >= MINIMUM_TIMEOUT) {
props.put("connect-timeout", Integer.toString(connectTimeout));
}
return RpcClientFactory.getInstance(props);
} catch (final Exception ex) {
LOGGER.error("Unable to create Flume RPCClient: {}", ex.getMessage());
return null;
}
}
@Override
protected void releaseSub() {
if (rpcClient != null) {
try {
rpcClient.close();
} catch (final Exception ex) {
LOGGER.error("Attempt to close RPC client failed", ex);
}
}
rpcClient = null;
}
/**
* Factory data.
*/
private static class FactoryData {
private final String name;
private final Agent[] agents;
private final int batchSize;
private final int retries;
private final int conntectTimeout;
private final int requestTimeout;
/**
* Constructor.
* @param name The name of the Appender.
* @param agents The agents.
* @param batchSize The number of events to include in a batch.
*/
public FactoryData(final String name, final Agent[] agents, final int batchSize, final int retries,
final int connectTimeout, final int requestTimeout) {
this.name = name;
this.agents = agents;
this.batchSize = batchSize;
this.retries = retries;
this.conntectTimeout = connectTimeout;
this.requestTimeout = requestTimeout;
}
}
/**
* Avro Manager Factory.
*/
private static class AvroManagerFactory implements ManagerFactory<FlumeAvroManager, FactoryData> {
/**
* Create the FlumeAvroManager.
* @param name The name of the entity to manage.
* @param data The data required to create the entity.
* @return The FlumeAvroManager.
*/
@Override
public FlumeAvroManager createManager(final String name, final FactoryData data) {
try {
return new FlumeAvroManager(name, data.name, data.agents, data.batchSize, data.retries,
data.conntectTimeout, data.requestTimeout);
} catch (final Exception ex) {
LOGGER.error("Could not create FlumeAvroManager", ex);
}
return null;
}
}
}
| ClarenceAu/log4j2 | log4j-flume-ng/src/main/java/org/apache/logging/log4j/flume/appender/FlumeAvroManager.java | Java | apache-2.0 | 10,591 |
package net.stickycode.configuration.value;
import net.stickycode.configuration.ConfigurationValue;
public class SystemValue
implements ConfigurationValue {
private String value;
public SystemValue(String value) {
this.value = value;
}
@Override
public String get() {
return value;
}
@Override
public boolean hasPrecedence(ConfigurationValue v) {
if (ApplicationValue.class.isAssignableFrom(v.getClass()))
return false;
if (SystemValue.class.isAssignableFrom(v.getClass()))
return false;
return true;
}
@Override
public String toString() {
return getClass().getSimpleName() + "{" + value + "}";
}
}
| walterDurin/stickycode | net.stickycode.configuration/sticky-configuration/src/main/java/net/stickycode/configuration/value/SystemValue.java | Java | apache-2.0 | 672 |
// Generated by xsd compiler for android/java
// DO NOT CHANGE!
package com.ebay.marketplace.search.v1.services;
import com.leansoft.nano.annotation.*;
/**
*
* Reserved for future use.
*
*/
@RootElement(name = "findItemsForFavoriteSearchResponse", namespace = "http://www.ebay.com/marketplace/search/v1/services")
public class FindItemsForFavoriteSearchResponse extends BaseFindingServiceResponse {
@Element
private CategoryHistogramContainer categoryHistogramContainer;
@Element
private AspectHistogramContainer aspectHistogramContainer;
@Element
private ConditionHistogramContainer conditionHistogramContainer;
/**
* public getter
*
*
* Reserved for future use.
*
*
* @returns com.ebay.marketplace.search.v1.services.CategoryHistogramContainer
*/
public CategoryHistogramContainer getCategoryHistogramContainer() {
return this.categoryHistogramContainer;
}
/**
* public setter
*
*
* Reserved for future use.
*
*
* @param com.ebay.marketplace.search.v1.services.CategoryHistogramContainer
*/
public void setCategoryHistogramContainer(CategoryHistogramContainer categoryHistogramContainer) {
this.categoryHistogramContainer = categoryHistogramContainer;
}
/**
* public getter
*
*
* Reserved for future use.
*
*
* @returns com.ebay.marketplace.search.v1.services.AspectHistogramContainer
*/
public AspectHistogramContainer getAspectHistogramContainer() {
return this.aspectHistogramContainer;
}
/**
* public setter
*
*
* Reserved for future use.
*
*
* @param com.ebay.marketplace.search.v1.services.AspectHistogramContainer
*/
public void setAspectHistogramContainer(AspectHistogramContainer aspectHistogramContainer) {
this.aspectHistogramContainer = aspectHistogramContainer;
}
/**
* public getter
*
*
* Reserved for future use.
*
*
* @returns com.ebay.marketplace.search.v1.services.ConditionHistogramContainer
*/
public ConditionHistogramContainer getConditionHistogramContainer() {
return this.conditionHistogramContainer;
}
/**
* public setter
*
*
* Reserved for future use.
*
*
* @param com.ebay.marketplace.search.v1.services.ConditionHistogramContainer
*/
public void setConditionHistogramContainer(ConditionHistogramContainer conditionHistogramContainer) {
this.conditionHistogramContainer = conditionHistogramContainer;
}
} | bulldog2011/nano-rest | sample/EBaySearch/src/com/ebay/marketplace/search/v1/services/FindItemsForFavoriteSearchResponse.java | Java | apache-2.0 | 2,540 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.benchmark.impl.statistic.bestscore;
import java.util.List;
import org.optaplanner.benchmark.config.statistic.ProblemStatisticType;
import org.optaplanner.benchmark.impl.result.SubSingleBenchmarkResult;
import org.optaplanner.benchmark.impl.statistic.ProblemBasedSubSingleStatistic;
import org.optaplanner.core.api.domain.solution.Solution;
import org.optaplanner.core.api.solver.Solver;
import org.optaplanner.core.api.solver.event.BestSolutionChangedEvent;
import org.optaplanner.core.api.solver.event.SolverEventListener;
import org.optaplanner.core.impl.score.definition.ScoreDefinition;
public class BestScoreSubSingleStatistic extends ProblemBasedSubSingleStatistic<BestScoreStatisticPoint> {
private final BestScoreSubSingleStatisticListener listener;
public BestScoreSubSingleStatistic(SubSingleBenchmarkResult subSingleBenchmarkResult) {
super(subSingleBenchmarkResult, ProblemStatisticType.BEST_SCORE);
listener = new BestScoreSubSingleStatisticListener();
}
// ************************************************************************
// Lifecycle methods
// ************************************************************************
public void open(Solver solver) {
solver.addEventListener(listener);
}
public void close(Solver solver) {
solver.removeEventListener(listener);
}
private class BestScoreSubSingleStatisticListener implements SolverEventListener<Solution> {
public void bestSolutionChanged(BestSolutionChangedEvent<Solution> event) {
pointList.add(new BestScoreStatisticPoint(
event.getTimeMillisSpent(), event.getNewBestSolution().getScore()));
}
}
// ************************************************************************
// CSV methods
// ************************************************************************
@Override
protected String getCsvHeader() {
return BestScoreStatisticPoint.buildCsvLine("timeMillisSpent", "score");
}
@Override
protected BestScoreStatisticPoint createPointFromCsvLine(ScoreDefinition scoreDefinition,
List<String> csvLine) {
return new BestScoreStatisticPoint(Long.valueOf(csvLine.get(0)),
scoreDefinition.parseScore(csvLine.get(1)));
}
}
| eshen1991/optaplanner | optaplanner-benchmark/src/main/java/org/optaplanner/benchmark/impl/statistic/bestscore/BestScoreSubSingleStatistic.java | Java | apache-2.0 | 2,938 |
/**
* Copyright (C) 2011-2012 Typesafe Inc. <http://typesafe.com>
*/
package com.typesafe.config.impl;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.math.BigInteger;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.typesafe.config.ConfigException;
import com.typesafe.config.ConfigObject;
import com.typesafe.config.ConfigOrigin;
import com.typesafe.config.ConfigRenderOptions;
import com.typesafe.config.ConfigValue;
final class SimpleConfigObject extends AbstractConfigObject implements Serializable {
private static final long serialVersionUID = 2L;
// this map should never be modified - assume immutable
final private Map<String, AbstractConfigValue> value;
final private boolean resolved;
final private boolean ignoresFallbacks;
SimpleConfigObject(ConfigOrigin origin,
Map<String, AbstractConfigValue> value, ResolveStatus status,
boolean ignoresFallbacks) {
super(origin);
if (value == null)
throw new ConfigException.BugOrBroken(
"creating config object with null map");
this.value = value;
this.resolved = status == ResolveStatus.RESOLVED;
this.ignoresFallbacks = ignoresFallbacks;
// Kind of an expensive debug check. Comment out?
if (status != ResolveStatus.fromValues(value.values()))
throw new ConfigException.BugOrBroken("Wrong resolved status on " + this);
}
SimpleConfigObject(ConfigOrigin origin,
Map<String, AbstractConfigValue> value) {
this(origin, value, ResolveStatus.fromValues(value.values()), false /* ignoresFallbacks */);
}
@Override
public SimpleConfigObject withOnlyKey(String key) {
return withOnlyPath(Path.newKey(key));
}
@Override
public SimpleConfigObject withoutKey(String key) {
return withoutPath(Path.newKey(key));
}
// gets the object with only the path if the path
// exists, otherwise null if it doesn't. this ensures
// that if we have { a : { b : 42 } } and do
// withOnlyPath("a.b.c") that we don't keep an empty
// "a" object.
@Override
protected SimpleConfigObject withOnlyPathOrNull(Path path) {
String key = path.first();
Path next = path.remainder();
AbstractConfigValue v = value.get(key);
if (next != null) {
if (v != null && (v instanceof AbstractConfigObject)) {
v = ((AbstractConfigObject) v).withOnlyPathOrNull(next);
} else {
// if the path has more elements but we don't have an object,
// then the rest of the path does not exist.
v = null;
}
}
if (v == null) {
return null;
} else {
return new SimpleConfigObject(origin(), Collections.singletonMap(key, v),
v.resolveStatus(), ignoresFallbacks);
}
}
@Override
SimpleConfigObject withOnlyPath(Path path) {
SimpleConfigObject o = withOnlyPathOrNull(path);
if (o == null) {
return new SimpleConfigObject(origin(),
Collections.<String, AbstractConfigValue> emptyMap(), ResolveStatus.RESOLVED,
ignoresFallbacks);
} else {
return o;
}
}
@Override
SimpleConfigObject withoutPath(Path path) {
String key = path.first();
Path next = path.remainder();
AbstractConfigValue v = value.get(key);
if (v != null && next != null && v instanceof AbstractConfigObject) {
v = ((AbstractConfigObject) v).withoutPath(next);
Map<String, AbstractConfigValue> updated = new HashMap<String, AbstractConfigValue>(
value);
updated.put(key, v);
return new SimpleConfigObject(origin(), updated, ResolveStatus.fromValues(updated
.values()), ignoresFallbacks);
} else if (next != null || v == null) {
// can't descend, nothing to remove
return this;
} else {
Map<String, AbstractConfigValue> smaller = new HashMap<String, AbstractConfigValue>(
value.size() - 1);
for (Map.Entry<String, AbstractConfigValue> old : value.entrySet()) {
if (!old.getKey().equals(key))
smaller.put(old.getKey(), old.getValue());
}
return new SimpleConfigObject(origin(), smaller, ResolveStatus.fromValues(smaller
.values()), ignoresFallbacks);
}
}
@Override
public SimpleConfigObject withValue(String key, ConfigValue v) {
if (v == null)
throw new ConfigException.BugOrBroken(
"Trying to store null ConfigValue in a ConfigObject");
Map<String, AbstractConfigValue> newMap;
if (value.isEmpty()) {
newMap = Collections.singletonMap(key, (AbstractConfigValue) v);
} else {
newMap = new HashMap<String, AbstractConfigValue>(value);
newMap.put(key, (AbstractConfigValue) v);
}
return new SimpleConfigObject(origin(), newMap, ResolveStatus.fromValues(newMap.values()),
ignoresFallbacks);
}
@Override
SimpleConfigObject withValue(Path path, ConfigValue v) {
String key = path.first();
Path next = path.remainder();
if (next == null) {
return withValue(key, v);
} else {
AbstractConfigValue child = value.get(key);
if (child != null && child instanceof AbstractConfigObject) {
// if we have an object, add to it
return withValue(key, ((AbstractConfigObject) child).withValue(next, v));
} else {
// as soon as we have a non-object, replace it entirely
SimpleConfig subtree = ((AbstractConfigValue) v).atPath(
SimpleConfigOrigin.newSimple("withValue(" + next.render() + ")"), next);
return withValue(key, subtree.root());
}
}
}
@Override
protected AbstractConfigValue attemptPeekWithPartialResolve(String key) {
return value.get(key);
}
private SimpleConfigObject newCopy(ResolveStatus newStatus, ConfigOrigin newOrigin,
boolean newIgnoresFallbacks) {
return new SimpleConfigObject(newOrigin, value, newStatus, newIgnoresFallbacks);
}
@Override
protected SimpleConfigObject newCopy(ResolveStatus newStatus, ConfigOrigin newOrigin) {
return newCopy(newStatus, newOrigin, ignoresFallbacks);
}
@Override
protected SimpleConfigObject withFallbacksIgnored() {
if (ignoresFallbacks)
return this;
else
return newCopy(resolveStatus(), origin(), true /* ignoresFallbacks */);
}
@Override
ResolveStatus resolveStatus() {
return ResolveStatus.fromBoolean(resolved);
}
@Override
public SimpleConfigObject replaceChild(AbstractConfigValue child, AbstractConfigValue replacement) {
HashMap<String, AbstractConfigValue> newChildren = new HashMap<String, AbstractConfigValue>(value);
for (Map.Entry<String, AbstractConfigValue> old : newChildren.entrySet()) {
if (old.getValue() == child) {
if (replacement != null)
old.setValue(replacement);
else
newChildren.remove(old.getKey());
return new SimpleConfigObject(origin(), newChildren, ResolveStatus.fromValues(newChildren.values()),
ignoresFallbacks);
}
}
throw new ConfigException.BugOrBroken("SimpleConfigObject.replaceChild did not find " + child + " in " + this);
}
@Override
public boolean hasDescendant(AbstractConfigValue descendant) {
for (AbstractConfigValue child : value.values()) {
if (child == descendant)
return true;
}
// now do the expensive search
for (AbstractConfigValue child : value.values()) {
if (child instanceof Container && ((Container) child).hasDescendant(descendant))
return true;
}
return false;
}
@Override
protected boolean ignoresFallbacks() {
return ignoresFallbacks;
}
@Override
public Map<String, Object> unwrapped() {
Map<String, Object> m = new HashMap<String, Object>();
for (Map.Entry<String, AbstractConfigValue> e : value.entrySet()) {
m.put(e.getKey(), e.getValue().unwrapped());
}
return m;
}
@Override
protected SimpleConfigObject mergedWithObject(AbstractConfigObject abstractFallback) {
requireNotIgnoringFallbacks();
if (!(abstractFallback instanceof SimpleConfigObject)) {
throw new ConfigException.BugOrBroken(
"should not be reached (merging non-SimpleConfigObject)");
}
SimpleConfigObject fallback = (SimpleConfigObject) abstractFallback;
boolean changed = false;
boolean allResolved = true;
Map<String, AbstractConfigValue> merged = new HashMap<String, AbstractConfigValue>();
Set<String> allKeys = new HashSet<String>();
allKeys.addAll(this.keySet());
allKeys.addAll(fallback.keySet());
for (String key : allKeys) {
AbstractConfigValue first = this.value.get(key);
AbstractConfigValue second = fallback.value.get(key);
AbstractConfigValue kept;
if (first == null)
kept = second;
else if (second == null)
kept = first;
else
kept = first.withFallback(second);
merged.put(key, kept);
if (first != kept)
changed = true;
if (kept.resolveStatus() == ResolveStatus.UNRESOLVED)
allResolved = false;
}
ResolveStatus newResolveStatus = ResolveStatus.fromBoolean(allResolved);
boolean newIgnoresFallbacks = fallback.ignoresFallbacks();
if (changed)
return new SimpleConfigObject(mergeOrigins(this, fallback), merged, newResolveStatus,
newIgnoresFallbacks);
else if (newResolveStatus != resolveStatus() || newIgnoresFallbacks != ignoresFallbacks())
return newCopy(newResolveStatus, origin(), newIgnoresFallbacks);
else
return this;
}
private SimpleConfigObject modify(NoExceptionsModifier modifier) {
try {
return modifyMayThrow(modifier);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new ConfigException.BugOrBroken("unexpected checked exception", e);
}
}
private SimpleConfigObject modifyMayThrow(Modifier modifier) throws Exception {
Map<String, AbstractConfigValue> changes = null;
for (String k : keySet()) {
AbstractConfigValue v = value.get(k);
// "modified" may be null, which means remove the child;
// to do that we put null in the "changes" map.
AbstractConfigValue modified = modifier.modifyChildMayThrow(k, v);
if (modified != v) {
if (changes == null)
changes = new HashMap<String, AbstractConfigValue>();
changes.put(k, modified);
}
}
if (changes == null) {
return this;
} else {
Map<String, AbstractConfigValue> modified = new HashMap<String, AbstractConfigValue>();
boolean sawUnresolved = false;
for (String k : keySet()) {
if (changes.containsKey(k)) {
AbstractConfigValue newValue = changes.get(k);
if (newValue != null) {
modified.put(k, newValue);
if (newValue.resolveStatus() == ResolveStatus.UNRESOLVED)
sawUnresolved = true;
} else {
// remove this child; don't put it in the new map.
}
} else {
AbstractConfigValue newValue = value.get(k);
modified.put(k, newValue);
if (newValue.resolveStatus() == ResolveStatus.UNRESOLVED)
sawUnresolved = true;
}
}
return new SimpleConfigObject(origin(), modified,
sawUnresolved ? ResolveStatus.UNRESOLVED : ResolveStatus.RESOLVED,
ignoresFallbacks());
}
}
private static final class ResolveModifier implements Modifier {
final Path originalRestrict;
ResolveContext context;
final ResolveSource source;
ResolveModifier(ResolveContext context, ResolveSource source) {
this.context = context;
this.source = source;
originalRestrict = context.restrictToChild();
}
@Override
public AbstractConfigValue modifyChildMayThrow(String key, AbstractConfigValue v) throws NotPossibleToResolve {
if (context.isRestrictedToChild()) {
if (key.equals(context.restrictToChild().first())) {
Path remainder = context.restrictToChild().remainder();
if (remainder != null) {
ResolveResult<? extends AbstractConfigValue> result = context.restrict(remainder).resolve(v,
source);
context = result.context.unrestricted().restrict(originalRestrict);
return result.value;
} else {
// we don't want to resolve the leaf child.
return v;
}
} else {
// not in the restrictToChild path
return v;
}
} else {
// no restrictToChild, resolve everything
ResolveResult<? extends AbstractConfigValue> result = context.unrestricted().resolve(v, source);
context = result.context.unrestricted().restrict(originalRestrict);
return result.value;
}
}
}
@Override
ResolveResult<? extends AbstractConfigObject> resolveSubstitutions(ResolveContext context, ResolveSource source)
throws NotPossibleToResolve {
if (resolveStatus() == ResolveStatus.RESOLVED)
return ResolveResult.make(context, this);
final ResolveSource sourceWithParent = source.pushParent(this);
try {
ResolveModifier modifier = new ResolveModifier(context, sourceWithParent);
AbstractConfigValue value = modifyMayThrow(modifier);
return ResolveResult.make(modifier.context, value).asObjectResult();
} catch (NotPossibleToResolve e) {
throw e;
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new ConfigException.BugOrBroken("unexpected checked exception", e);
}
}
@Override
SimpleConfigObject relativized(final Path prefix) {
return modify(new NoExceptionsModifier() {
@Override
public AbstractConfigValue modifyChild(String key, AbstractConfigValue v) {
return v.relativized(prefix);
}
});
}
// this is only Serializable to chill out a findbugs warning
static final private class RenderComparator implements java.util.Comparator<String>, Serializable {
private static final long serialVersionUID = 1L;
private static boolean isAllDigits(String s) {
int length = s.length();
// empty string doesn't count as a number
// string longer than "max number of digits in a long" cannot be parsed as a long
if (length == 0)
return false;
for (int i = 0; i < length; ++i) {
char c = s.charAt(i);
if (!Character.isDigit(c))
return false;
}
return true;
}
// This is supposed to sort numbers before strings,
// and sort the numbers numerically. The point is
// to make objects which are really list-like
// (numeric indices) appear in order.
@Override
public int compare(String a, String b) {
boolean aDigits = isAllDigits(a);
boolean bDigits = isAllDigits(b);
if (aDigits && bDigits) {
return new BigInteger(a).compareTo(new BigInteger(b));
} else if (aDigits) {
return -1;
} else if (bDigits) {
return 1;
} else {
return a.compareTo(b);
}
}
}
@Override
protected void render(StringBuilder sb, int indent, boolean atRoot, ConfigRenderOptions options) {
if (isEmpty()) {
sb.append("{}");
} else {
boolean outerBraces = options.getJson() || !atRoot;
int innerIndent;
if (outerBraces) {
innerIndent = indent + 1;
sb.append("{");
if (options.getFormatted())
sb.append('\n');
} else {
innerIndent = indent;
}
int separatorCount = 0;
String[] keys = keySet().toArray(new String[size()]);
Arrays.sort(keys, new RenderComparator());
for (String k : keys) {
AbstractConfigValue v;
v = value.get(k);
if (options.getOriginComments()) {
String[] lines = v.origin().description().split("\n");
for (String l : lines) {
indent(sb, indent + 1, options);
sb.append('#');
if (!l.isEmpty())
sb.append(' ');
sb.append(l);
sb.append("\n");
}
}
if (options.getComments()) {
for (String comment : v.origin().comments()) {
indent(sb, innerIndent, options);
sb.append("#");
if (!comment.startsWith(" "))
sb.append(' ');
sb.append(comment);
sb.append("\n");
}
}
indent(sb, innerIndent, options);
v.render(sb, innerIndent, false /* atRoot */, k, options);
if (options.getFormatted()) {
if (options.getJson()) {
sb.append(",");
separatorCount = 2;
} else {
separatorCount = 1;
}
sb.append('\n');
} else {
sb.append(",");
separatorCount = 1;
}
}
// chop last commas/newlines
sb.setLength(sb.length() - separatorCount);
if (outerBraces) {
if (options.getFormatted()) {
sb.append('\n'); // put a newline back
if (outerBraces)
indent(sb, indent, options);
}
sb.append("}");
}
}
if (atRoot && options.getFormatted())
sb.append('\n');
}
@Override
public AbstractConfigValue get(Object key) {
return value.get(key);
}
private static boolean mapEquals(Map<String, ConfigValue> a, Map<String, ConfigValue> b) {
if (a == b)
return true;
Set<String> aKeys = a.keySet();
Set<String> bKeys = b.keySet();
if (!aKeys.equals(bKeys))
return false;
for (String key : aKeys) {
if (!a.get(key).equals(b.get(key)))
return false;
}
return true;
}
private static int mapHash(Map<String, ConfigValue> m) {
// the keys have to be sorted, otherwise we could be equal
// to another map but have a different hashcode.
List<String> keys = new ArrayList<String>();
keys.addAll(m.keySet());
Collections.sort(keys);
int valuesHash = 0;
for (String k : keys) {
valuesHash += m.get(k).hashCode();
}
return 41 * (41 + keys.hashCode()) + valuesHash;
}
@Override
protected boolean canEqual(Object other) {
return other instanceof ConfigObject;
}
@Override
public boolean equals(Object other) {
// note that "origin" is deliberately NOT part of equality.
// neither are other "extras" like ignoresFallbacks or resolve status.
if (other instanceof ConfigObject) {
// optimization to avoid unwrapped() for two ConfigObject,
// which is what AbstractConfigValue does.
return canEqual(other) && mapEquals(this, ((ConfigObject) other));
} else {
return false;
}
}
@Override
public int hashCode() {
// note that "origin" is deliberately NOT part of equality
// neither are other "extras" like ignoresFallbacks or resolve status.
return mapHash(this);
}
@Override
public boolean containsKey(Object key) {
return value.containsKey(key);
}
@Override
public Set<String> keySet() {
return value.keySet();
}
@Override
public boolean containsValue(Object v) {
return value.containsValue(v);
}
@Override
public Set<Map.Entry<String, ConfigValue>> entrySet() {
// total bloat just to work around lack of type variance
HashSet<java.util.Map.Entry<String, ConfigValue>> entries = new HashSet<Map.Entry<String, ConfigValue>>();
for (Map.Entry<String, AbstractConfigValue> e : value.entrySet()) {
entries.add(new AbstractMap.SimpleImmutableEntry<String, ConfigValue>(
e.getKey(), e
.getValue()));
}
return entries;
}
@Override
public boolean isEmpty() {
return value.isEmpty();
}
@Override
public int size() {
return value.size();
}
@Override
public Collection<ConfigValue> values() {
return new HashSet<ConfigValue>(value.values());
}
final private static String EMPTY_NAME = "empty config";
final private static SimpleConfigObject emptyInstance = empty(SimpleConfigOrigin
.newSimple(EMPTY_NAME));
final static SimpleConfigObject empty() {
return emptyInstance;
}
final static SimpleConfigObject empty(ConfigOrigin origin) {
if (origin == null)
return empty();
else
return new SimpleConfigObject(origin,
Collections.<String, AbstractConfigValue> emptyMap());
}
final static SimpleConfigObject emptyMissing(ConfigOrigin baseOrigin) {
return new SimpleConfigObject(SimpleConfigOrigin.newSimple(
baseOrigin.description() + " (not found)"),
Collections.<String, AbstractConfigValue> emptyMap());
}
// serialization all goes through SerializedConfigValue
private Object writeReplace() throws ObjectStreamException {
return new SerializedConfigValue(this);
}
}
| typesafehub/config | config/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java | Java | apache-2.0 | 24,048 |
/*
* Copyright 2015 AppDynamics, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.appdynamicspilot.rest;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
public class ShoppingCart implements java.io.Serializable {
Logger log = Logger.getLogger(ShoppingCart.class);
private List<ShoppingCartItem> items;
public ShoppingCart() {
items = new ArrayList<ShoppingCartItem>();
}
public void addItem(ShoppingCartItem item) {
items.add(item);
}
public void removeItem(ShoppingCartItem item) {
items.remove(item);
}
public List<ShoppingCartItem> getAllItems() {
return items;
}
public double getCartTotal() {
double total = 0.0;
for (ShoppingCartItem item : items) {
total += item.getPrice();
}
return total;
}
public void clear() {
items.clear();
}
}
| udayinfy/ECommerce-Java | ECommerce-Web/src/main/java/com/appdynamicspilot/rest/ShoppingCart.java | Java | apache-2.0 | 1,458 |
package org.carlspring.strongbox.validation;
import javax.inject.Inject;
import javax.validation.ConstraintValidator;
import javax.validation.ConstraintValidatorContext;
import org.carlspring.strongbox.authorization.service.AuthorizationConfigService;
import org.springframework.util.StringUtils;
/**
* @author Pablo Tirado
*/
public class UniqueRoleNameValidator
implements ConstraintValidator<UniqueRoleName, String>
{
@Inject
private AuthorizationConfigService authorizationConfigService;
@Override
public void initialize(UniqueRoleName constraint)
{
// empty by design
}
@Override
public boolean isValid(String roleName,
ConstraintValidatorContext context)
{
return StringUtils.isEmpty(roleName)
|| !authorizationConfigService.get().getRoles().stream().anyMatch(r -> r.getName().equals(roleName));
}
}
| sbespalov/strongbox | strongbox-web-core/src/main/java/org/carlspring/strongbox/validation/UniqueRoleNameValidator.java | Java | apache-2.0 | 922 |
/*
* Copyright 2016 Netbrasoft
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package br.com.netbrasoft.gnuob.api.category;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToCountCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToFindCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToFindCategoryById;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToMergeCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToPersistCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToRefreshCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToRemoveCategory;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.CAN_NOT_INITIALIZE_THE_DEFAULT_WSDL_FROM_0;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.CATEGORY_WEB_SERVICE_REPOSITORY_NAME;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.GNUOB_SOAP_CATEGORY_WEBSERVICE_WSDL;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.HTTP_LOCALHOST_8080_GNUOB_SOAP_CATEGORY_WEB_SERVICE_IMPL_WSDL;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.UNCHECKED_VALUE;
import static java.lang.System.getProperty;
import static org.slf4j.LoggerFactory.getLogger;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import org.javasimon.aop.Monitored;
import org.slf4j.Logger;
import org.springframework.stereotype.Repository;
import br.com.netbrasoft.gnuob.api.Category;
import br.com.netbrasoft.gnuob.api.CategoryWebServiceImpl;
import br.com.netbrasoft.gnuob.api.CategoryWebServiceImplService;
import br.com.netbrasoft.gnuob.api.MetaData;
import br.com.netbrasoft.gnuob.api.OrderBy;
import br.com.netbrasoft.gnuob.api.Paging;
import br.com.netbrasoft.gnuob.api.generic.IGenericTypeWebServiceRepository;
@Monitored
@Repository(CATEGORY_WEB_SERVICE_REPOSITORY_NAME)
public class CategoryWebServiceRepository<C extends Category> implements IGenericTypeWebServiceRepository<C> {
private static final Logger LOGGER = getLogger(CategoryWebServiceRepository.class);
private static final URL WSDL_LOCATION;
static {
URL url = null;
try {
url = new URL(getProperty(GNUOB_SOAP_CATEGORY_WEBSERVICE_WSDL,
HTTP_LOCALHOST_8080_GNUOB_SOAP_CATEGORY_WEB_SERVICE_IMPL_WSDL));
} catch (final MalformedURLException e) {
LOGGER.info(CAN_NOT_INITIALIZE_THE_DEFAULT_WSDL_FROM_0, getProperty(GNUOB_SOAP_CATEGORY_WEBSERVICE_WSDL,
HTTP_LOCALHOST_8080_GNUOB_SOAP_CATEGORY_WEB_SERVICE_IMPL_WSDL));
}
WSDL_LOCATION = url;
}
private transient CategoryWebServiceImpl categoryWebServiceImpl = null;
private CategoryWebServiceImpl getCategoryWebServiceImpl() {
if (categoryWebServiceImpl == null) {
categoryWebServiceImpl = new CategoryWebServiceImplService(WSDL_LOCATION).getCategoryWebServiceImplPort();
}
return categoryWebServiceImpl;
}
@Override
public long count(final MetaData credentials, final C categoryExample) {
return getCategoryWebServiceImpl().countCategory(wrapToCountCategory(categoryExample), credentials).getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public List<C> find(final MetaData credentials, final C categoryExample, final Paging paging,
final OrderBy orderingProperty) {
return (List<C>) getCategoryWebServiceImpl()
.findCategory(wrapToFindCategory(categoryExample, paging, orderingProperty), credentials).getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public C find(final MetaData credentials, final C categoryExample) {
return (C) getCategoryWebServiceImpl().findCategoryById(wrapToFindCategoryById(categoryExample), credentials)
.getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public C persist(final MetaData credentials, final C category) {
return (C) getCategoryWebServiceImpl().persistCategory(wrapToPersistCategory(category), credentials).getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public C merge(final MetaData credentials, final C category) {
return (C) getCategoryWebServiceImpl().mergeCategory(wrapToMergeCategory(category), credentials).getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public C refresh(final MetaData credentials, final C category) {
return (C) getCategoryWebServiceImpl().refreshCategory(wrapToRefreshCategory(category), credentials).getReturn();
}
@Override
public void remove(final MetaData credentials, final C category) {
getCategoryWebServiceImpl().removeCategory(wrapToRemoveCategory(category), credentials);
}
}
| Netbrasoft/gnuob-api | src/main/java/br/com/netbrasoft/gnuob/api/category/CategoryWebServiceRepository.java | Java | apache-2.0 | 5,406 |
package org.wso2.carbon.apimgt.rest.api.publisher.v1.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import java.util.ArrayList;
import java.util.List;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.AlertTypeDTO;
import javax.validation.constraints.*;
import io.swagger.annotations.*;
import java.util.Objects;
import javax.xml.bind.annotation.*;
import org.wso2.carbon.apimgt.rest.api.util.annotations.Scope;
public class AlertTypesListDTO {
private Integer count = null;
private List<AlertTypeDTO> alerts = new ArrayList<>();
/**
* The number of alerts
**/
public AlertTypesListDTO count(Integer count) {
this.count = count;
return this;
}
@ApiModelProperty(example = "3", value = "The number of alerts")
@JsonProperty("count")
public Integer getCount() {
return count;
}
public void setCount(Integer count) {
this.count = count;
}
/**
**/
public AlertTypesListDTO alerts(List<AlertTypeDTO> alerts) {
this.alerts = alerts;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("alerts")
public List<AlertTypeDTO> getAlerts() {
return alerts;
}
public void setAlerts(List<AlertTypeDTO> alerts) {
this.alerts = alerts;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AlertTypesListDTO alertTypesList = (AlertTypesListDTO) o;
return Objects.equals(count, alertTypesList.count) &&
Objects.equals(alerts, alertTypesList.alerts);
}
@Override
public int hashCode() {
return Objects.hash(count, alerts);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class AlertTypesListDTO {\n");
sb.append(" count: ").append(toIndentedString(count)).append("\n");
sb.append(" alerts: ").append(toIndentedString(alerts)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| nuwand/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/gen/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/dto/AlertTypesListDTO.java | Java | apache-2.0 | 2,369 |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.agent.monitor.inventory;
import org.hawkular.agent.monitor.inventory.dmr.DMRResource;
import org.hawkular.agent.monitor.inventory.dmr.DMRResourceType;
import org.hawkular.dmrclient.Address;
import org.jboss.dmr.ModelNode;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.traverse.BreadthFirstIterator;
import org.jgrapht.traverse.DepthFirstIterator;
import org.junit.Assert;
import org.junit.Test;
public class ResourceManagerTest {
@Test
public void testEmptyResourceManager() {
ResourceManager<DMRResource> rm = new ResourceManager<>();
Assert.assertNull(rm.getResource(new ID("foo")));
Assert.assertTrue(rm.getAllResources().isEmpty());
Assert.assertTrue(rm.getRootResources().isEmpty());
Assert.assertFalse(rm.getBreadthFirstIterator().hasNext());
Assert.assertFalse(rm.getDepthFirstIterator().hasNext());
}
@Test
public void testResourceManager() {
DMRResourceType type = new DMRResourceType(new ID("resType"), new Name("resTypeName"));
ResourceManager<DMRResource> rm = new ResourceManager<>();
DMRResource root1 = new DMRResource(new ID("root1"), new Name("root1Name"), null, type, null, new Address(),
new ModelNode());
DMRResource root2 = new DMRResource(new ID("root2"), new Name("root2Name"), null, type, null, new Address(),
new ModelNode());
DMRResource child1 = new DMRResource(new ID("child1"), new Name("child1Name"), null, type, root1,
new Address(), new ModelNode());
DMRResource child2 = new DMRResource(new ID("child2"), new Name("child2Name"), null, type, root1,
new Address(), new ModelNode());
DMRResource grandChild1 = new DMRResource(new ID("grand1"), new Name("grand1Name"), null, type, child1,
new Address(), new ModelNode());
// add root1
rm.addResource(root1);
Assert.assertEquals(1, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(root1));
Assert.assertEquals(root1, rm.getResource(root1.getID()));
DepthFirstIterator<DMRResource, DefaultEdge> dIter = rm.getDepthFirstIterator();
Assert.assertEquals(root1, dIter.next());
Assert.assertFalse(dIter.hasNext());
BreadthFirstIterator<DMRResource, DefaultEdge> bIter = rm.getBreadthFirstIterator();
Assert.assertEquals(root1, bIter.next());
Assert.assertFalse(bIter.hasNext());
Assert.assertEquals(1, rm.getRootResources().size());
Assert.assertTrue(rm.getRootResources().contains(root1));
// add child1
rm.addResource(child1);
Assert.assertEquals(2, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(child1));
Assert.assertEquals(child1, rm.getResource(child1.getID()));
// add grandChild1
rm.addResource(grandChild1);
Assert.assertEquals(3, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(grandChild1));
Assert.assertEquals(grandChild1, rm.getResource(grandChild1.getID()));
// add root2
rm.addResource(root2);
Assert.assertEquals(4, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(root2));
Assert.assertEquals(root2, rm.getResource(root2.getID()));
Assert.assertEquals(2, rm.getRootResources().size());
Assert.assertTrue(rm.getRootResources().contains(root2));
// add child2
rm.addResource(child2);
Assert.assertEquals(5, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(child2));
Assert.assertEquals(child2, rm.getResource(child2.getID()));
//
// the tree now looks like:
//
// root1 root2
// / \
// child1 child2
// |
// grandchild1
//
Assert.assertEquals(2, rm.getChildren(root1).size());
Assert.assertTrue(rm.getChildren(root1).contains(child1));
Assert.assertTrue(rm.getChildren(root1).contains(child2));
Assert.assertEquals(1, rm.getChildren(child1).size());
Assert.assertTrue(rm.getChildren(child1).contains(grandChild1));
Assert.assertEquals(0, rm.getChildren(grandChild1).size());
Assert.assertEquals(0, rm.getChildren(root2).size());
Assert.assertEquals(null, rm.getParent(root1));
Assert.assertEquals(null, rm.getParent(root2));
Assert.assertEquals(root1, rm.getParent(child1));
Assert.assertEquals(root1, rm.getParent(child2));
Assert.assertEquals(child1, rm.getParent(grandChild1));
/*
* WHY DOESN'T THIS ITERATE LIKE IT SHOULD?
*
// iterate depth first which should be:
// root1 -> child1 -> grandchild1 -> child2 -> root2
dIter = rm.getDepthFirstIterator();
Assert.assertEquals(root1, dIter.next());
Assert.assertEquals(child1, dIter.next());
Assert.assertEquals(grandChild1, dIter.next());
Assert.assertEquals(child2, dIter.next());
Assert.assertEquals(root2, dIter.next());
Assert.assertFalse(dIter.hasNext());
// iterate breadth first which should be (assuming roots are done in order)
// root1 -> child1 -> child2 -> grandchild1 -> root2
bIter = rm.getBreadthFirstIterator();
Assert.assertEquals(root1, bIter.next());
Assert.assertEquals(child1, bIter.next());
Assert.assertEquals(child2, bIter.next());
Assert.assertEquals(grandChild1, bIter.next());
Assert.assertEquals(root2, bIter.next());
Assert.assertFalse(bIter.hasNext());
*
* THE ABOVE DOESN'T WORK AS EXPECTED
*/
}
}
| pavolloffay/hawkular-agent | hawkular-wildfly-monitor/src/test/java/org/hawkular/agent/monitor/inventory/ResourceManagerTest.java | Java | apache-2.0 | 6,531 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.docker.headers;
import java.util.Map;
import com.github.dockerjava.api.command.ListContainersCmd;
import org.apache.camel.component.docker.DockerConstants;
import org.apache.camel.component.docker.DockerOperation;
import org.junit.Test;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.Mockito;
/**
* Validates List Containers Request headers are applied properly
*/
public class ListContainersCmdHeaderTest extends BaseDockerHeaderTest<ListContainersCmd> {
@Mock
private ListContainersCmd mockObject;
@Test
public void listContainerHeaderTest() {
boolean showSize = true;
boolean showAll = false;
int limit = 2;
String since = "id1";
String before = "id2";
Map<String, Object> headers = getDefaultParameters();
headers.put(DockerConstants.DOCKER_LIMIT, limit);
headers.put(DockerConstants.DOCKER_SHOW_ALL, showAll);
headers.put(DockerConstants.DOCKER_SHOW_SIZE, showSize);
headers.put(DockerConstants.DOCKER_SINCE, since);
headers.put(DockerConstants.DOCKER_BEFORE, before);
template.sendBodyAndHeaders("direct:in", "", headers);
Mockito.verify(dockerClient, Mockito.times(1)).listContainersCmd();
Mockito.verify(mockObject, Mockito.times(1)).withShowAll(Matchers.eq(showAll));
Mockito.verify(mockObject, Mockito.times(1)).withShowSize(Matchers.eq(showSize));
Mockito.verify(mockObject, Mockito.times(1)).withLimit(Matchers.eq(limit));
Mockito.verify(mockObject, Mockito.times(1)).withSince(Matchers.eq(since));
Mockito.verify(mockObject, Mockito.times(1)).withBefore(Matchers.eq(before));
}
@Override
protected void setupMocks() {
Mockito.when(dockerClient.listContainersCmd()).thenReturn(mockObject);
}
@Override
protected DockerOperation getOperation() {
return DockerOperation.LIST_CONTAINERS;
}
}
| koscejev/camel | components/camel-docker/src/test/java/org/apache/camel/component/docker/headers/ListContainersCmdHeaderTest.java | Java | apache-2.0 | 2,786 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.elasticjob.lite.spring.boot.reg.snapshot;
import static org.junit.Assert.assertNotNull;
import org.apache.shardingsphere.elasticjob.lite.internal.snapshot.SnapshotService;
import org.apache.shardingsphere.elasticjob.lite.spring.boot.job.fixture.EmbedTestingServer;
import org.junit.BeforeClass;
import org.junit.Test;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests;
@SpringBootTest
@SpringBootApplication
@ActiveProfiles("snapshot")
public class ElasticJobSnapshotServiceConfigurationTest extends AbstractJUnit4SpringContextTests {
@BeforeClass
public static void init() {
EmbedTestingServer.start();
}
@Test
public void assertSnapshotServiceConfiguration() {
assertNotNull(applicationContext);
assertNotNull(applicationContext.getBean(SnapshotService.class));
}
}
| elasticjob/elastic-job | elasticjob-lite/elasticjob-lite-spring/elasticjob-lite-spring-boot-starter/src/test/java/org/apache/shardingsphere/elasticjob/lite/spring/boot/reg/snapshot/ElasticJobSnapshotServiceConfigurationTest.java | Java | apache-2.0 | 1,874 |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.template.macro;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.template.*;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.util.TypeConversionUtil;
import org.jetbrains.annotations.NotNull;
/**
* @author ven
*/
public class IterableComponentTypeMacro implements Macro {
public String getName() {
return "iterableComponentType";
}
public String getDescription() {
return CodeInsightBundle.message("macro.iterable.component.type");
}
public String getDefaultValue() {
return "a";
}
public Result calculateResult(@NotNull Expression[] params, ExpressionContext context) {
if (params.length != 1) return null;
final Result result = params[0].calculateResult(context);
if (result == null) return null;
Project project = context.getProject();
PsiDocumentManager.getInstance(project).commitAllDocuments();
PsiExpression expr = MacroUtil.resultToPsiExpression(result, context);
if (expr == null) return null;
PsiType type = expr.getType();
if (type instanceof PsiArrayType) {
return new PsiTypeResult(((PsiArrayType)type).getComponentType(), project);
}
if (type instanceof PsiClassType) {
PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)type).resolveGenerics();
PsiClass aClass = resolveResult.getElement();
if (aClass != null) {
PsiClass iterableClass = JavaPsiFacade.getInstance(project).findClass("java.lang.Iterable", aClass.getResolveScope());
if (iterableClass != null) {
PsiSubstitutor substitutor = TypeConversionUtil.getClassSubstitutor(iterableClass, aClass, resolveResult.getSubstitutor());
if (substitutor != null) {
PsiType parameterType = substitutor.substitute(iterableClass.getTypeParameters()[0]);
if (parameterType instanceof PsiCapturedWildcardType) {
parameterType = ((PsiCapturedWildcardType)parameterType).getWildcard();
}
if (parameterType != null) {
if (parameterType instanceof PsiWildcardType) {
if (((PsiWildcardType)parameterType).isExtends()) {
return new PsiTypeResult(((PsiWildcardType)parameterType).getBound(), project);
}
else return null;
}
return new PsiTypeResult(parameterType, project);
}
}
}
}
}
return null;
}
public Result calculateQuickResult(@NotNull Expression[] params, ExpressionContext context) {
return calculateResult(params, context);
}
public LookupElement[] calculateLookupItems(@NotNull Expression[] params, ExpressionContext context) {
return LookupElement.EMPTY_ARRAY;
}
}
| joewalnes/idea-community | java/java-impl/src/com/intellij/codeInsight/template/macro/IterableComponentTypeMacro.java | Java | apache-2.0 | 3,485 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor.impl;
import com.intellij.AppTopics;
import com.intellij.CommonBundle;
import com.intellij.codeStyle.CodeStyleFacade;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.TransactionGuard;
import com.intellij.openapi.application.TransactionGuardImpl;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.PrioritizedDocumentListener;
import com.intellij.openapi.editor.impl.EditorFactoryImpl;
import com.intellij.openapi.editor.impl.TrailingSpacesStripper;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.impl.text.TextEditorImpl;
import com.intellij.openapi.fileTypes.BinaryFileTypeDecompilers;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.UnknownFileType;
import com.intellij.openapi.project.*;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.newvfs.NewVirtualFileSystem;
import com.intellij.pom.core.impl.PomModelImpl;
import com.intellij.psi.ExternalChangeAction;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.SingleRootFileViewProvider;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.ui.UIBundle;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.nio.charset.Charset;
import java.util.*;
import java.util.List;
public class FileDocumentManagerImpl extends FileDocumentManager implements VirtualFileListener, VetoableProjectManagerListener, SafeWriteRequestor {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl");
public static final Key<Document> HARD_REF_TO_DOCUMENT_KEY = Key.create("HARD_REF_TO_DOCUMENT_KEY");
private static final Key<String> LINE_SEPARATOR_KEY = Key.create("LINE_SEPARATOR_KEY");
private static final Key<VirtualFile> FILE_KEY = Key.create("FILE_KEY");
private static final Key<Boolean> MUST_RECOMPUTE_FILE_TYPE = Key.create("Must recompute file type");
private final Set<Document> myUnsavedDocuments = ContainerUtil.newConcurrentSet();
private final MessageBus myBus;
private static final Object lock = new Object();
private final FileDocumentManagerListener myMultiCaster;
private final TrailingSpacesStripper myTrailingSpacesStripper = new TrailingSpacesStripper();
private boolean myOnClose;
private volatile MemoryDiskConflictResolver myConflictResolver = new MemoryDiskConflictResolver();
private final PrioritizedDocumentListener myPhysicalDocumentChangeTracker = new PrioritizedDocumentListener() {
@Override
public int getPriority() {
return Integer.MIN_VALUE;
}
@Override
public void documentChanged(DocumentEvent e) {
final Document document = e.getDocument();
if (!ApplicationManager.getApplication().hasWriteAction(ExternalChangeAction.ExternalDocumentChange.class)) {
myUnsavedDocuments.add(document);
}
final Runnable currentCommand = CommandProcessor.getInstance().getCurrentCommand();
Project project = currentCommand == null ? null : CommandProcessor.getInstance().getCurrentCommandProject();
if (project == null)
project = ProjectUtil.guessProjectForFile(getFile(document));
String lineSeparator = CodeStyleFacade.getInstance(project).getLineSeparator();
document.putUserData(LINE_SEPARATOR_KEY, lineSeparator);
// avoid documents piling up during batch processing
if (areTooManyDocumentsInTheQueue(myUnsavedDocuments)) {
saveAllDocumentsLater();
}
}
};
public FileDocumentManagerImpl(@NotNull VirtualFileManager virtualFileManager, @NotNull ProjectManager projectManager) {
virtualFileManager.addVirtualFileListener(this);
projectManager.addProjectManagerListener(this);
myBus = ApplicationManager.getApplication().getMessageBus();
myBus.connect().subscribe(ProjectManager.TOPIC, this);
InvocationHandler handler = (proxy, method, args) -> {
multiCast(method, args);
return null;
};
final ClassLoader loader = FileDocumentManagerListener.class.getClassLoader();
myMultiCaster = (FileDocumentManagerListener)Proxy.newProxyInstance(loader, new Class[]{FileDocumentManagerListener.class}, handler);
}
private static void unwrapAndRethrow(Exception e) {
Throwable unwrapped = e;
if (e instanceof InvocationTargetException) {
unwrapped = e.getCause() == null ? e : e.getCause();
}
if (unwrapped instanceof Error) throw (Error)unwrapped;
if (unwrapped instanceof RuntimeException) throw (RuntimeException)unwrapped;
LOG.error(unwrapped);
}
@SuppressWarnings("OverlyBroadCatchBlock")
private void multiCast(@NotNull Method method, Object[] args) {
try {
method.invoke(myBus.syncPublisher(AppTopics.FILE_DOCUMENT_SYNC), args);
}
catch (ClassCastException e) {
LOG.error("Arguments: "+ Arrays.toString(args), e);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
// Allows pre-save document modification
for (FileDocumentManagerListener listener : getListeners()) {
try {
method.invoke(listener, args);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
}
// stripping trailing spaces
try {
method.invoke(myTrailingSpacesStripper, args);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
}
@Override
@Nullable
public Document getDocument(@NotNull final VirtualFile file) {
ApplicationManager.getApplication().assertReadAccessAllowed();
DocumentEx document = (DocumentEx)getCachedDocument(file);
if (document == null) {
if (!file.isValid() || file.isDirectory() || isBinaryWithoutDecompiler(file)) return null;
boolean tooLarge = FileUtilRt.isTooLarge(file.getLength());
if (file.getFileType().isBinary() && tooLarge) return null;
final CharSequence text = tooLarge ? LoadTextUtil.loadText(file, getPreviewCharCount(file)) : LoadTextUtil.loadText(file);
synchronized (lock) {
document = (DocumentEx)getCachedDocument(file);
if (document != null) return document; // Double checking
document = (DocumentEx)createDocument(text, file);
document.setModificationStamp(file.getModificationStamp());
final FileType fileType = file.getFileType();
document.setReadOnly(tooLarge || !file.isWritable() || fileType.isBinary());
if (!(file instanceof LightVirtualFile || file.getFileSystem() instanceof NonPhysicalFileSystem)) {
document.addDocumentListener(myPhysicalDocumentChangeTracker);
}
if (file instanceof LightVirtualFile) {
registerDocument(document, file);
}
else {
document.putUserData(FILE_KEY, file);
cacheDocument(file, document);
}
}
myMultiCaster.fileContentLoaded(file, document);
}
return document;
}
public static boolean areTooManyDocumentsInTheQueue(Collection<Document> documents) {
if (documents.size() > 100) return true;
int totalSize = 0;
for (Document document : documents) {
totalSize += document.getTextLength();
if (totalSize > FileUtilRt.LARGE_FOR_CONTENT_LOADING) return true;
}
return false;
}
private static Document createDocument(final CharSequence text, VirtualFile file) {
boolean acceptSlashR = file instanceof LightVirtualFile && StringUtil.indexOf(text, '\r') >= 0;
boolean freeThreaded = Boolean.TRUE.equals(file.getUserData(SingleRootFileViewProvider.FREE_THREADED));
return ((EditorFactoryImpl)EditorFactory.getInstance()).createDocument(text, acceptSlashR, freeThreaded);
}
@Override
@Nullable
public Document getCachedDocument(@NotNull VirtualFile file) {
Document hard = file.getUserData(HARD_REF_TO_DOCUMENT_KEY);
return hard != null ? hard : getDocumentFromCache(file);
}
public static void registerDocument(@NotNull final Document document, @NotNull VirtualFile virtualFile) {
synchronized (lock) {
document.putUserData(FILE_KEY, virtualFile);
virtualFile.putUserData(HARD_REF_TO_DOCUMENT_KEY, document);
}
}
@Override
@Nullable
public VirtualFile getFile(@NotNull Document document) {
return document.getUserData(FILE_KEY);
}
@TestOnly
public void dropAllUnsavedDocuments() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
throw new RuntimeException("This method is only for test mode!");
}
ApplicationManager.getApplication().assertWriteAccessAllowed();
if (!myUnsavedDocuments.isEmpty()) {
myUnsavedDocuments.clear();
fireUnsavedDocumentsDropped();
}
}
private void saveAllDocumentsLater() {
// later because some document might have been blocked by PSI right now
ApplicationManager.getApplication().invokeLater(() -> {
if (ApplicationManager.getApplication().isDisposed()) {
return;
}
final Document[] unsavedDocuments = getUnsavedDocuments();
for (Document document : unsavedDocuments) {
VirtualFile file = getFile(document);
if (file == null) continue;
Project project = ProjectUtil.guessProjectForFile(file);
if (project == null) continue;
if (PsiDocumentManager.getInstance(project).isDocumentBlockedByPsi(document)) continue;
saveDocument(document);
}
});
}
@Override
public void saveAllDocuments() {
saveAllDocuments(true);
}
/**
* @param isExplicit caused by user directly (Save action) or indirectly (e.g. Compile)
*/
public void saveAllDocuments(boolean isExplicit) {
ApplicationManager.getApplication().assertIsDispatchThread();
((TransactionGuardImpl)TransactionGuard.getInstance()).assertWriteActionAllowed();
myMultiCaster.beforeAllDocumentsSaving();
if (myUnsavedDocuments.isEmpty()) return;
final Map<Document, IOException> failedToSave = new HashMap<>();
final Set<Document> vetoed = new HashSet<>();
while (true) {
int count = 0;
for (Document document : myUnsavedDocuments) {
if (failedToSave.containsKey(document)) continue;
if (vetoed.contains(document)) continue;
try {
doSaveDocument(document, isExplicit);
}
catch (IOException e) {
//noinspection ThrowableResultOfMethodCallIgnored
failedToSave.put(document, e);
}
catch (SaveVetoException e) {
vetoed.add(document);
}
count++;
}
if (count == 0) break;
}
if (!failedToSave.isEmpty()) {
handleErrorsOnSave(failedToSave);
}
}
@Override
public void saveDocument(@NotNull final Document document) {
saveDocument(document, true);
}
public void saveDocument(@NotNull final Document document, final boolean explicit) {
ApplicationManager.getApplication().assertIsDispatchThread();
((TransactionGuardImpl)TransactionGuard.getInstance()).assertWriteActionAllowed();
if (!myUnsavedDocuments.contains(document)) return;
try {
doSaveDocument(document, explicit);
}
catch (IOException e) {
handleErrorsOnSave(Collections.singletonMap(document, e));
}
catch (SaveVetoException ignored) {
}
}
@Override
public void saveDocumentAsIs(@NotNull Document document) {
VirtualFile file = getFile(document);
boolean spaceStrippingEnabled = true;
if (file != null) {
spaceStrippingEnabled = TrailingSpacesStripper.isEnabled(file);
TrailingSpacesStripper.setEnabled(file, false);
}
try {
saveDocument(document);
}
finally {
if (file != null) {
TrailingSpacesStripper.setEnabled(file, spaceStrippingEnabled);
}
}
}
private static class SaveVetoException extends Exception {}
private void doSaveDocument(@NotNull final Document document, boolean isExplicit) throws IOException, SaveVetoException {
VirtualFile file = getFile(document);
if (file == null || file instanceof LightVirtualFile || file.isValid() && !isFileModified(file)) {
removeFromUnsaved(document);
return;
}
if (file.isValid() && needsRefresh(file)) {
file.refresh(false, false);
if (!myUnsavedDocuments.contains(document)) return;
}
if (!maySaveDocument(file, document, isExplicit)) {
throw new SaveVetoException();
}
WriteAction.run(() -> doSaveDocumentInWriteAction(document, file));
}
private boolean maySaveDocument(VirtualFile file, Document document, boolean isExplicit) {
return !myConflictResolver.hasConflict(file) &&
Arrays.stream(Extensions.getExtensions(FileDocumentSynchronizationVetoer.EP_NAME)).allMatch(vetoer -> vetoer.maySaveDocument(document, isExplicit));
}
private void doSaveDocumentInWriteAction(@NotNull final Document document, @NotNull final VirtualFile file) throws IOException {
if (!file.isValid()) {
removeFromUnsaved(document);
return;
}
if (!file.equals(getFile(document))) {
registerDocument(document, file);
}
if (!isSaveNeeded(document, file)) {
if (document instanceof DocumentEx) {
((DocumentEx)document).setModificationStamp(file.getModificationStamp());
}
removeFromUnsaved(document);
updateModifiedProperty(file);
return;
}
PomModelImpl.guardPsiModificationsIn(() -> {
myMultiCaster.beforeDocumentSaving(document);
LOG.assertTrue(file.isValid());
String text = document.getText();
String lineSeparator = getLineSeparator(document, file);
if (!lineSeparator.equals("\n")) {
text = StringUtil.convertLineSeparators(text, lineSeparator);
}
Project project = ProjectLocator.getInstance().guessProjectForFile(file);
LoadTextUtil.write(project, file, this, text, document.getModificationStamp());
myUnsavedDocuments.remove(document);
LOG.assertTrue(!myUnsavedDocuments.contains(document));
myTrailingSpacesStripper.clearLineModificationFlags(document);
});
}
private static void updateModifiedProperty(@NotNull VirtualFile file) {
for (Project project : ProjectManager.getInstance().getOpenProjects()) {
FileEditorManager fileEditorManager = FileEditorManager.getInstance(project);
for (FileEditor editor : fileEditorManager.getAllEditors(file)) {
if (editor instanceof TextEditorImpl) {
((TextEditorImpl)editor).updateModifiedProperty();
}
}
}
}
private void removeFromUnsaved(@NotNull Document document) {
myUnsavedDocuments.remove(document);
fireUnsavedDocumentsDropped();
LOG.assertTrue(!myUnsavedDocuments.contains(document));
}
private static boolean isSaveNeeded(@NotNull Document document, @NotNull VirtualFile file) throws IOException {
if (file.getFileType().isBinary() || document.getTextLength() > 1000 * 1000) { // don't compare if the file is too big
return true;
}
byte[] bytes = file.contentsToByteArray();
CharSequence loaded = LoadTextUtil.getTextByBinaryPresentation(bytes, file, false, false);
return !Comparing.equal(document.getCharsSequence(), loaded);
}
private static boolean needsRefresh(final VirtualFile file) {
final VirtualFileSystem fs = file.getFileSystem();
return fs instanceof NewVirtualFileSystem && file.getTimeStamp() != ((NewVirtualFileSystem)fs).getTimeStamp(file);
}
@NotNull
public static String getLineSeparator(@NotNull Document document, @NotNull VirtualFile file) {
String lineSeparator = LoadTextUtil.getDetectedLineSeparator(file);
if (lineSeparator == null) {
lineSeparator = document.getUserData(LINE_SEPARATOR_KEY);
assert lineSeparator != null : document;
}
return lineSeparator;
}
@Override
@NotNull
public String getLineSeparator(@Nullable VirtualFile file, @Nullable Project project) {
String lineSeparator = file == null ? null : LoadTextUtil.getDetectedLineSeparator(file);
if (lineSeparator == null) {
CodeStyleFacade settingsManager = project == null
? CodeStyleFacade.getInstance()
: CodeStyleFacade.getInstance(project);
lineSeparator = settingsManager.getLineSeparator();
}
return lineSeparator;
}
@Override
public boolean requestWriting(@NotNull Document document, Project project) {
final VirtualFile file = getInstance().getFile(document);
if (project != null && file != null && file.isValid()) {
return !file.getFileType().isBinary() && ReadonlyStatusHandler.ensureFilesWritable(project, file);
}
if (document.isWritable()) {
return true;
}
document.fireReadOnlyModificationAttempt();
return false;
}
@Override
public void reloadFiles(@NotNull final VirtualFile... files) {
for (VirtualFile file : files) {
if (file.exists()) {
final Document doc = getCachedDocument(file);
if (doc != null) {
reloadFromDisk(doc);
}
}
}
}
@Override
@NotNull
public Document[] getUnsavedDocuments() {
if (myUnsavedDocuments.isEmpty()) {
return Document.EMPTY_ARRAY;
}
List<Document> list = new ArrayList<>(myUnsavedDocuments);
return list.toArray(new Document[list.size()]);
}
@Override
public boolean isDocumentUnsaved(@NotNull Document document) {
return myUnsavedDocuments.contains(document);
}
@Override
public boolean isFileModified(@NotNull VirtualFile file) {
final Document doc = getCachedDocument(file);
return doc != null && isDocumentUnsaved(doc) && doc.getModificationStamp() != file.getModificationStamp();
}
@Override
public void propertyChanged(@NotNull VirtualFilePropertyEvent event) {
final VirtualFile file = event.getFile();
if (VirtualFile.PROP_WRITABLE.equals(event.getPropertyName())) {
final Document document = getCachedDocument(file);
if (document != null) {
ApplicationManager.getApplication().runWriteAction((ExternalChangeAction)() -> document.setReadOnly(!file.isWritable()));
}
}
else if (VirtualFile.PROP_NAME.equals(event.getPropertyName())) {
Document document = getCachedDocument(file);
if (document != null) {
// a file is linked to a document - chances are it is an "unknown text file" now
if (isBinaryWithoutDecompiler(file)) {
unbindFileFromDocument(file, document);
}
}
}
}
private void unbindFileFromDocument(@NotNull VirtualFile file, @NotNull Document document) {
removeDocumentFromCache(file);
file.putUserData(HARD_REF_TO_DOCUMENT_KEY, null);
document.putUserData(FILE_KEY, null);
}
private static boolean isBinaryWithDecompiler(@NotNull VirtualFile file) {
final FileType ft = file.getFileType();
return ft.isBinary() && BinaryFileTypeDecompilers.INSTANCE.forFileType(ft) != null;
}
private static boolean isBinaryWithoutDecompiler(@NotNull VirtualFile file) {
final FileType fileType = file.getFileType();
return fileType.isBinary() && BinaryFileTypeDecompilers.INSTANCE.forFileType(fileType) == null;
}
@Override
public void contentsChanged(@NotNull VirtualFileEvent event) {
if (event.isFromSave()) return;
final VirtualFile file = event.getFile();
final Document document = getCachedDocument(file);
if (document == null) {
myMultiCaster.fileWithNoDocumentChanged(file);
return;
}
if (isBinaryWithDecompiler(file)) {
myMultiCaster.fileWithNoDocumentChanged(file); // This will generate PSI event at FileManagerImpl
}
if (document.getModificationStamp() == event.getOldModificationStamp() || !isDocumentUnsaved(document)) {
reloadFromDisk(document);
}
}
@Override
public void reloadFromDisk(@NotNull final Document document) {
ApplicationManager.getApplication().assertIsDispatchThread();
final VirtualFile file = getFile(document);
assert file != null;
if (!fireBeforeFileContentReload(file, document)) {
return;
}
final Project project = ProjectLocator.getInstance().guessProjectForFile(file);
boolean[] isReloadable = {isReloadable(file, document, project)};
if (isReloadable[0]) {
CommandProcessor.getInstance().executeCommand(project, () -> ApplicationManager.getApplication().runWriteAction(
new ExternalChangeAction.ExternalDocumentChange(document, project) {
@Override
public void run() {
if (!isBinaryWithoutDecompiler(file)) {
LoadTextUtil.setCharsetWasDetectedFromBytes(file, null);
file.setBOM(null); // reset BOM in case we had one and the external change stripped it away
file.setCharset(null, null, false);
boolean wasWritable = document.isWritable();
document.setReadOnly(false);
boolean tooLarge = FileUtilRt.isTooLarge(file.getLength());
CharSequence reloaded = tooLarge ? LoadTextUtil.loadText(file, getPreviewCharCount(file)) : LoadTextUtil.loadText(file);
isReloadable[0] = isReloadable(file, document, project);
if (isReloadable[0]) {
DocumentEx documentEx = (DocumentEx)document;
documentEx.replaceText(reloaded, file.getModificationStamp());
}
document.setReadOnly(!wasWritable);
}
}
}
), UIBundle.message("file.cache.conflict.action"), null, UndoConfirmationPolicy.REQUEST_CONFIRMATION);
}
if (isReloadable[0]) {
myMultiCaster.fileContentReloaded(file, document);
}
else {
unbindFileFromDocument(file, document);
myMultiCaster.fileWithNoDocumentChanged(file);
}
myUnsavedDocuments.remove(document);
}
private static boolean isReloadable(@NotNull VirtualFile file, @NotNull Document document, @Nullable Project project) {
PsiFile cachedPsiFile = project == null ? null : PsiDocumentManager.getInstance(project).getCachedPsiFile(document);
return !(FileUtilRt.isTooLarge(file.getLength()) && file.getFileType().isBinary()) &&
(cachedPsiFile == null || cachedPsiFile instanceof PsiFileImpl || isBinaryWithDecompiler(file));
}
@TestOnly
void setAskReloadFromDisk(@NotNull Disposable disposable, @NotNull MemoryDiskConflictResolver newProcessor) {
final MemoryDiskConflictResolver old = myConflictResolver;
myConflictResolver = newProcessor;
Disposer.register(disposable, () -> myConflictResolver = old);
}
@Override
public void fileDeleted(@NotNull VirtualFileEvent event) {
Document doc = getCachedDocument(event.getFile());
if (doc != null) {
myTrailingSpacesStripper.documentDeleted(doc);
}
}
@Override
public void beforeContentsChange(@NotNull VirtualFileEvent event) {
VirtualFile virtualFile = event.getFile();
// check file type in second order to avoid content detection running
if (virtualFile.getLength() == 0 && virtualFile.getFileType() == UnknownFileType.INSTANCE) {
virtualFile.putUserData(MUST_RECOMPUTE_FILE_TYPE, Boolean.TRUE);
}
myConflictResolver.beforeContentChange(event);
}
public static boolean recomputeFileTypeIfNecessary(@NotNull VirtualFile virtualFile) {
if (virtualFile.getUserData(MUST_RECOMPUTE_FILE_TYPE) != null) {
virtualFile.getFileType();
virtualFile.putUserData(MUST_RECOMPUTE_FILE_TYPE, null);
return true;
}
return false;
}
@Override
public boolean canClose(@NotNull Project project) {
if (!myUnsavedDocuments.isEmpty()) {
myOnClose = true;
try {
saveAllDocuments();
}
finally {
myOnClose = false;
}
}
return myUnsavedDocuments.isEmpty();
}
private void fireUnsavedDocumentsDropped() {
myMultiCaster.unsavedDocumentsDropped();
}
private boolean fireBeforeFileContentReload(final VirtualFile file, @NotNull Document document) {
for (FileDocumentSynchronizationVetoer vetoer : Extensions.getExtensions(FileDocumentSynchronizationVetoer.EP_NAME)) {
try {
if (!vetoer.mayReloadFileContent(file, document)) {
return false;
}
}
catch (Exception e) {
LOG.error(e);
}
}
myMultiCaster.beforeFileContentReload(file, document);
return true;
}
@NotNull
private static FileDocumentManagerListener[] getListeners() {
return FileDocumentManagerListener.EP_NAME.getExtensions();
}
private static int getPreviewCharCount(@NotNull VirtualFile file) {
Charset charset = EncodingManager.getInstance().getEncoding(file, false);
float bytesPerChar = charset == null ? 2 : charset.newEncoder().averageBytesPerChar();
return (int)(FileUtilRt.LARGE_FILE_PREVIEW_SIZE / bytesPerChar);
}
private void handleErrorsOnSave(@NotNull Map<Document, IOException> failures) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
IOException ioException = ContainerUtil.getFirstItem(failures.values());
if (ioException != null) {
throw new RuntimeException(ioException);
}
return;
}
for (IOException exception : failures.values()) {
LOG.warn(exception);
}
final String text = StringUtil.join(failures.values(), Throwable::getMessage, "\n");
final DialogWrapper dialog = new DialogWrapper(null) {
{
init();
setTitle(UIBundle.message("cannot.save.files.dialog.title"));
}
@Override
protected void createDefaultActions() {
super.createDefaultActions();
myOKAction.putValue(Action.NAME, UIBundle
.message(myOnClose ? "cannot.save.files.dialog.ignore.changes" : "cannot.save.files.dialog.revert.changes"));
myOKAction.putValue(DEFAULT_ACTION, null);
if (!myOnClose) {
myCancelAction.putValue(Action.NAME, CommonBundle.getCloseButtonText());
}
}
@Override
protected JComponent createCenterPanel() {
final JPanel panel = new JPanel(new BorderLayout(0, 5));
panel.add(new JLabel(UIBundle.message("cannot.save.files.dialog.message")), BorderLayout.NORTH);
final JTextPane area = new JTextPane();
area.setText(text);
area.setEditable(false);
area.setMinimumSize(new Dimension(area.getMinimumSize().width, 50));
panel.add(new JBScrollPane(area, ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER),
BorderLayout.CENTER);
return panel;
}
};
if (dialog.showAndGet()) {
for (Document document : failures.keySet()) {
reloadFromDisk(document);
}
}
}
private final Map<VirtualFile, Document> myDocumentCache = ContainerUtil.createConcurrentWeakValueMap();
// used in Upsource
protected void cacheDocument(@NotNull VirtualFile file, @NotNull Document document) {
myDocumentCache.put(file, document);
}
// used in Upsource
protected void removeDocumentFromCache(@NotNull VirtualFile file) {
myDocumentCache.remove(file);
}
// used in Upsource
protected Document getDocumentFromCache(@NotNull VirtualFile file) {
return myDocumentCache.get(file);
}
}
| semonte/intellij-community | platform/platform-impl/src/com/intellij/openapi/fileEditor/impl/FileDocumentManagerImpl.java | Java | apache-2.0 | 29,248 |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cluster.impl;
import com.hazelcast.config.Config;
import com.hazelcast.config.InterfacesConfig;
import com.hazelcast.config.NetworkConfig;
import com.hazelcast.config.TcpIpConfig;
import com.hazelcast.instance.Node;
import com.hazelcast.internal.cluster.impl.AbstractJoiner;
import com.hazelcast.internal.cluster.impl.ClusterServiceImpl;
import com.hazelcast.internal.cluster.impl.SplitBrainJoinMessage;
import com.hazelcast.internal.cluster.impl.operations.JoinMastershipClaimOp;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.Connection;
import com.hazelcast.spi.properties.GroupProperty;
import com.hazelcast.util.AddressUtil;
import com.hazelcast.util.AddressUtil.AddressMatcher;
import com.hazelcast.util.AddressUtil.InvalidAddressException;
import com.hazelcast.util.Clock;
import com.hazelcast.util.EmptyStatement;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Set;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.util.AddressUtil.AddressHolder;
public class TcpIpJoiner extends AbstractJoiner {
private static final long JOIN_RETRY_WAIT_TIME = 1000L;
private static final int LOOK_FOR_MASTER_MAX_TRY_COUNT = 20;
private final int maxPortTryCount;
private volatile boolean claimingMaster;
public TcpIpJoiner(Node node) {
super(node);
int tryCount = node.getProperties().getInteger(GroupProperty.TCP_JOIN_PORT_TRY_COUNT);
if (tryCount <= 0) {
throw new IllegalArgumentException(String.format("%s should be greater than zero! Current value: %d",
GroupProperty.TCP_JOIN_PORT_TRY_COUNT, tryCount));
}
maxPortTryCount = tryCount;
}
public boolean isClaimingMaster() {
return claimingMaster;
}
protected int getConnTimeoutSeconds() {
return config.getNetworkConfig().getJoin().getTcpIpConfig().getConnectionTimeoutSeconds();
}
@Override
public void doJoin() {
final Address targetAddress = getTargetAddress();
if (targetAddress != null) {
long maxJoinMergeTargetMillis = node.getProperties().getMillis(GroupProperty.MAX_JOIN_MERGE_TARGET_SECONDS);
joinViaTargetMember(targetAddress, maxJoinMergeTargetMillis);
if (!clusterService.isJoined()) {
joinViaPossibleMembers();
}
} else if (config.getNetworkConfig().getJoin().getTcpIpConfig().getRequiredMember() != null) {
Address requiredMember = getRequiredMemberAddress();
long maxJoinMillis = getMaxJoinMillis();
joinViaTargetMember(requiredMember, maxJoinMillis);
} else {
joinViaPossibleMembers();
}
}
private void joinViaTargetMember(Address targetAddress, long maxJoinMillis) {
try {
if (targetAddress == null) {
throw new IllegalArgumentException("Invalid target address -> NULL");
}
if (logger.isFineEnabled()) {
logger.fine("Joining over target member " + targetAddress);
}
if (targetAddress.equals(node.getThisAddress()) || isLocalAddress(targetAddress)) {
clusterJoinManager.setThisMemberAsMaster();
return;
}
long joinStartTime = Clock.currentTimeMillis();
Connection connection;
while (shouldRetry() && (Clock.currentTimeMillis() - joinStartTime < maxJoinMillis)) {
connection = node.connectionManager.getOrConnect(targetAddress);
if (connection == null) {
//noinspection BusyWait
Thread.sleep(JOIN_RETRY_WAIT_TIME);
continue;
}
if (logger.isFineEnabled()) {
logger.fine("Sending joinRequest " + targetAddress);
}
clusterJoinManager.sendJoinRequest(targetAddress, true);
//noinspection BusyWait
Thread.sleep(JOIN_RETRY_WAIT_TIME);
}
} catch (final Exception e) {
logger.warning(e);
}
}
private void joinViaPossibleMembers() {
try {
blacklistedAddresses.clear();
Collection<Address> possibleAddresses = getPossibleAddresses();
boolean foundConnection = tryInitialConnection(possibleAddresses);
if (!foundConnection) {
logger.fine("This node will assume master role since no possible member where connected to.");
clusterJoinManager.setThisMemberAsMaster();
return;
}
long maxJoinMillis = getMaxJoinMillis();
long startTime = Clock.currentTimeMillis();
while (shouldRetry() && (Clock.currentTimeMillis() - startTime < maxJoinMillis)) {
tryToJoinPossibleAddresses(possibleAddresses);
if (clusterService.isJoined()) {
return;
}
if (isAllBlacklisted(possibleAddresses)) {
logger.fine(
"This node will assume master role since none of the possible members accepted join request.");
clusterJoinManager.setThisMemberAsMaster();
return;
}
boolean masterCandidate = isThisNodeMasterCandidate(possibleAddresses);
if (masterCandidate) {
boolean consensus = claimMastership(possibleAddresses);
if (consensus) {
if (logger.isFineEnabled()) {
Set<Address> votingEndpoints = new HashSet<Address>(possibleAddresses);
votingEndpoints.removeAll(blacklistedAddresses.keySet());
logger.fine("Setting myself as master after consensus!"
+ " Voting endpoints: " + votingEndpoints);
}
clusterJoinManager.setThisMemberAsMaster();
claimingMaster = false;
return;
}
} else {
if (logger.isFineEnabled()) {
logger.fine("Cannot claim myself as master! Will try to connect a possible master...");
}
}
claimingMaster = false;
lookForMaster(possibleAddresses);
}
} catch (Throwable t) {
logger.severe(t);
}
}
@SuppressWarnings("checkstyle:npathcomplexity")
private boolean claimMastership(Collection<Address> possibleAddresses) {
if (logger.isFineEnabled()) {
Set<Address> votingEndpoints = new HashSet<Address>(possibleAddresses);
votingEndpoints.removeAll(blacklistedAddresses.keySet());
logger.fine("Claiming myself as master node! Asking to endpoints: " + votingEndpoints);
}
claimingMaster = true;
Collection<Future<Boolean>> responses = new LinkedList<Future<Boolean>>();
for (Address address : possibleAddresses) {
if (isBlacklisted(address)) {
continue;
}
if (node.getConnectionManager().getConnection(address) != null) {
Future<Boolean> future = node.nodeEngine.getOperationService()
.createInvocationBuilder(ClusterServiceImpl.SERVICE_NAME,
new JoinMastershipClaimOp(), address).setTryCount(1).invoke();
responses.add(future);
}
}
final long maxWait = TimeUnit.SECONDS.toMillis(10);
long waitTime = 0L;
boolean consensus = true;
for (Future<Boolean> response : responses) {
long t = Clock.currentTimeMillis();
try {
consensus = response.get(1, TimeUnit.SECONDS);
} catch (Exception e) {
logger.finest(e);
consensus = false;
} finally {
waitTime += (Clock.currentTimeMillis() - t);
}
if (!consensus) {
break;
}
if (waitTime > maxWait) {
consensus = false;
break;
}
}
return consensus;
}
private boolean isThisNodeMasterCandidate(Collection<Address> possibleAddresses) {
int thisHashCode = node.getThisAddress().hashCode();
for (Address address : possibleAddresses) {
if (isBlacklisted(address)) {
continue;
}
if (node.connectionManager.getConnection(address) != null) {
if (thisHashCode > address.hashCode()) {
return false;
}
}
}
return true;
}
private void tryToJoinPossibleAddresses(Collection<Address> possibleAddresses) throws InterruptedException {
long connectionTimeoutMillis = TimeUnit.SECONDS.toMillis(getConnTimeoutSeconds());
long start = Clock.currentTimeMillis();
while (!clusterService.isJoined() && Clock.currentTimeMillis() - start < connectionTimeoutMillis) {
Address masterAddress = clusterService.getMasterAddress();
if (isAllBlacklisted(possibleAddresses) && masterAddress == null) {
return;
}
if (masterAddress != null) {
if (logger.isFineEnabled()) {
logger.fine("Sending join request to " + masterAddress);
}
clusterJoinManager.sendJoinRequest(masterAddress, true);
} else {
sendMasterQuestion(possibleAddresses);
}
if (!clusterService.isJoined()) {
Thread.sleep(JOIN_RETRY_WAIT_TIME);
}
}
}
private boolean tryInitialConnection(Collection<Address> possibleAddresses) throws InterruptedException {
long connectionTimeoutMillis = TimeUnit.SECONDS.toMillis(getConnTimeoutSeconds());
long start = Clock.currentTimeMillis();
while (Clock.currentTimeMillis() - start < connectionTimeoutMillis) {
if (isAllBlacklisted(possibleAddresses)) {
return false;
}
if (logger.isFineEnabled()) {
logger.fine("Will send master question to each address in: " + possibleAddresses);
}
if (sendMasterQuestion(possibleAddresses)) {
return true;
}
Thread.sleep(JOIN_RETRY_WAIT_TIME);
}
return false;
}
private boolean isAllBlacklisted(Collection<Address> possibleAddresses) {
return blacklistedAddresses.keySet().containsAll(possibleAddresses);
}
@SuppressWarnings({"checkstyle:npathcomplexity", "checkstyle:cyclomaticcomplexity"})
private void lookForMaster(Collection<Address> possibleAddresses) throws InterruptedException {
int tryCount = 0;
while (clusterService.getMasterAddress() == null && tryCount++ < LOOK_FOR_MASTER_MAX_TRY_COUNT) {
sendMasterQuestion(possibleAddresses);
//noinspection BusyWait
Thread.sleep(JOIN_RETRY_WAIT_TIME);
if (isAllBlacklisted(possibleAddresses)) {
break;
}
}
if (clusterService.isJoined()) {
return;
}
if (isAllBlacklisted(possibleAddresses) && clusterService.getMasterAddress() == null) {
if (logger.isFineEnabled()) {
logger.fine("Setting myself as master! No possible addresses remaining to connect...");
}
clusterJoinManager.setThisMemberAsMaster();
return;
}
long maxMasterJoinTime = getMaxJoinTimeToMasterNode();
long start = Clock.currentTimeMillis();
while (shouldRetry() && Clock.currentTimeMillis() - start < maxMasterJoinTime) {
Address master = clusterService.getMasterAddress();
if (master != null) {
if (logger.isFineEnabled()) {
logger.fine("Joining to master " + master);
}
clusterJoinManager.sendJoinRequest(master, true);
} else {
break;
}
//noinspection BusyWait
Thread.sleep(JOIN_RETRY_WAIT_TIME);
}
if (!clusterService.isJoined()) {
Address master = clusterService.getMasterAddress();
if (master != null) {
logger.warning("Couldn't join to the master: " + master);
} else {
if (logger.isFineEnabled()) {
logger.fine("Couldn't find a master! But there was connections available: " + possibleAddresses);
}
}
}
}
private boolean sendMasterQuestion(Collection<Address> possibleAddresses) {
if (logger.isFineEnabled()) {
logger.fine("NOT sending master question to blacklisted endpoints: " + blacklistedAddresses);
}
boolean sent = false;
for (Address address : possibleAddresses) {
if (isBlacklisted(address)) {
continue;
}
if (logger.isFineEnabled()) {
logger.fine("Sending master question to " + address);
}
if (clusterJoinManager.sendMasterQuestion(address)) {
sent = true;
}
}
return sent;
}
private Address getRequiredMemberAddress() {
TcpIpConfig tcpIpConfig = config.getNetworkConfig().getJoin().getTcpIpConfig();
String host = tcpIpConfig.getRequiredMember();
try {
AddressHolder addressHolder = AddressUtil.getAddressHolder(host, config.getNetworkConfig().getPort());
if (AddressUtil.isIpAddress(addressHolder.getAddress())) {
return new Address(addressHolder.getAddress(), addressHolder.getPort());
}
InterfacesConfig interfaces = config.getNetworkConfig().getInterfaces();
if (interfaces.isEnabled()) {
InetAddress[] inetAddresses = InetAddress.getAllByName(addressHolder.getAddress());
if (inetAddresses.length > 1) {
for (InetAddress inetAddress : inetAddresses) {
if (AddressUtil.matchAnyInterface(inetAddress.getHostAddress(), interfaces.getInterfaces())) {
return new Address(inetAddress, addressHolder.getPort());
}
}
} else if (AddressUtil.matchAnyInterface(inetAddresses[0].getHostAddress(), interfaces.getInterfaces())) {
return new Address(addressHolder.getAddress(), addressHolder.getPort());
}
} else {
return new Address(addressHolder.getAddress(), addressHolder.getPort());
}
} catch (final Exception e) {
logger.warning(e);
}
return null;
}
@SuppressWarnings({"checkstyle:npathcomplexity", "checkstyle:cyclomaticcomplexity"})
protected Collection<Address> getPossibleAddresses() {
final Collection<String> possibleMembers = getMembers();
final Set<Address> possibleAddresses = new HashSet<Address>();
final NetworkConfig networkConfig = config.getNetworkConfig();
for (String possibleMember : possibleMembers) {
AddressHolder addressHolder = AddressUtil.getAddressHolder(possibleMember);
try {
boolean portIsDefined = addressHolder.getPort() != -1 || !networkConfig.isPortAutoIncrement();
int count = portIsDefined ? 1 : maxPortTryCount;
int port = addressHolder.getPort() != -1 ? addressHolder.getPort() : networkConfig.getPort();
AddressMatcher addressMatcher = null;
try {
addressMatcher = AddressUtil.getAddressMatcher(addressHolder.getAddress());
} catch (InvalidAddressException ignore) {
EmptyStatement.ignore(ignore);
}
if (addressMatcher != null) {
final Collection<String> matchedAddresses;
if (addressMatcher.isIPv4()) {
matchedAddresses = AddressUtil.getMatchingIpv4Addresses(addressMatcher);
} else {
// for IPv6 we are not doing wildcard matching
matchedAddresses = Collections.singleton(addressHolder.getAddress());
}
for (String matchedAddress : matchedAddresses) {
addPossibleAddresses(possibleAddresses, null, InetAddress.getByName(matchedAddress), port, count);
}
} else {
final String host = addressHolder.getAddress();
final InterfacesConfig interfaces = networkConfig.getInterfaces();
if (interfaces.isEnabled()) {
final InetAddress[] inetAddresses = InetAddress.getAllByName(host);
for (InetAddress inetAddress : inetAddresses) {
if (AddressUtil.matchAnyInterface(inetAddress.getHostAddress(),
interfaces.getInterfaces())) {
addPossibleAddresses(possibleAddresses, host, inetAddress, port, count);
}
}
} else {
addPossibleAddresses(possibleAddresses, host, null, port, count);
}
}
} catch (UnknownHostException e) {
logger.warning("Cannot resolve hostname '" + addressHolder.getAddress()
+ "'. Please make sure host is valid and reachable.");
if (logger.isFineEnabled()) {
logger.fine("Error during resolving possible target!", e);
}
}
}
possibleAddresses.remove(node.getThisAddress());
return possibleAddresses;
}
private void addPossibleAddresses(final Set<Address> possibleAddresses,
final String host, final InetAddress inetAddress,
final int port, final int count) throws UnknownHostException {
for (int i = 0; i < count; i++) {
int currentPort = port + i;
Address address;
if (host != null && inetAddress != null) {
address = new Address(host, inetAddress, currentPort);
} else if (host != null) {
address = new Address(host, currentPort);
} else {
address = new Address(inetAddress, currentPort);
}
if (!isLocalAddress(address)) {
possibleAddresses.add(address);
}
}
}
private boolean isLocalAddress(final Address address) throws UnknownHostException {
final Address thisAddress = node.getThisAddress();
final boolean local = thisAddress.getInetSocketAddress().equals(address.getInetSocketAddress());
if (logger.isFineEnabled()) {
logger.fine(address + " is local? " + local);
}
return local;
}
protected Collection<String> getMembers() {
return getConfigurationMembers(config);
}
public static Collection<String> getConfigurationMembers(Config config) {
final TcpIpConfig tcpIpConfig = config.getNetworkConfig().getJoin().getTcpIpConfig();
final Collection<String> configMembers = tcpIpConfig.getMembers();
final Set<String> possibleMembers = new HashSet<String>();
for (String member : configMembers) {
// split members defined in tcp-ip configuration by comma(,) semi-colon(;) space( ).
String[] members = member.split("[,; ]");
Collections.addAll(possibleMembers, members);
}
return possibleMembers;
}
@Override
public void searchForOtherClusters() {
final Collection<Address> possibleAddresses;
try {
possibleAddresses = getPossibleAddresses();
} catch (Throwable e) {
logger.severe(e);
return;
}
possibleAddresses.remove(node.getThisAddress());
possibleAddresses.removeAll(node.getClusterService().getMemberAddresses());
if (possibleAddresses.isEmpty()) {
return;
}
for (Address address : possibleAddresses) {
SplitBrainJoinMessage response = sendSplitBrainJoinMessage(address);
if (shouldMerge(response)) {
logger.warning(node.getThisAddress() + " is merging [tcp/ip] to " + address);
setTargetAddress(address);
startClusterMerge(address);
return;
}
}
}
@Override
public String getType() {
return "tcp-ip";
}
}
| tombujok/hazelcast | hazelcast/src/main/java/com/hazelcast/cluster/impl/TcpIpJoiner.java | Java | apache-2.0 | 22,055 |
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.perf.commands;
import com.thoughtworks.go.config.Agent;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.server.service.AgentService;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Optional;
import java.util.UUID;
public class RegisterAgentCommand extends AgentPerformanceCommand {
public RegisterAgentCommand(AgentService agentService) {
this.agentService = agentService;
}
@Override
Optional<String> execute() {
return registerAgent();
}
private Optional<String> registerAgent() {
InetAddress localHost = getInetAddress();
Agent agent = new Agent("Perf-Test-Agent-" + UUID.randomUUID(), localHost.getHostName(), localHost.getHostAddress(), UUID.randomUUID().toString());
AgentRuntimeInfo agentRuntimeInfo = AgentRuntimeInfo.fromServer(agent, false, "location", 233232L, "osx");
agentService.requestRegistration(agentRuntimeInfo);
return Optional.ofNullable(agent.getUuid());
}
private InetAddress getInetAddress() {
InetAddress localHost;
try {
localHost = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
return localHost;
}
}
| marques-work/gocd | server/src/test-shared/java/com/thoughtworks/go/server/perf/commands/RegisterAgentCommand.java | Java | apache-2.0 | 1,951 |
/*
* (C) Johannes Kepler University Linz, Austria, 2005-2013
* Institute for Systems Engineering and Automation (SEA)
*
* The software may only be used for academic purposes (teaching, scientific
* research). Any redistribution or commercialization of the software program
* and documentation (or any part thereof) requires prior written permission of
* the JKU. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* This software program and documentation are copyrighted by Johannes Kepler
* University Linz, Austria (the JKU). The software program and documentation
* are supplied AS IS, without any accompanying services from the JKU. The JKU
* does not warrant that the operation of the program will be uninterrupted or
* error-free. The end-user understands that the program was developed for
* research purposes and is advised not to rely exclusively on the program for
* any reason.
*
* IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
* SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS,
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE
* AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. THE AUTHOR
* SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
* THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THE AUTHOR HAS
* NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS,
* OR MODIFICATIONS.
*/
/*
* ArtifactIsNotACollectionException.java created on 13.03.2013
*
* (c) alexander noehrer
*/
package at.jku.sea.cloud.exceptions;
/**
* @author alexander noehrer
*/
public class ArtifactIsNotACollectionException extends RuntimeException {
private static final long serialVersionUID = 1L;
public ArtifactIsNotACollectionException(final long version, final long id) {
super("artifact (id=" + id + ", version=" + version + ") is not a collection");
}
}
| OnurKirkizoglu/master_thesis | at.jku.sea.cloud/src/main/java/at/jku/sea/cloud/exceptions/ArtifactIsNotACollectionException.java | Java | apache-2.0 | 2,111 |
/*
* Copyright 2010-2012 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.ning.metrics.collector.filtering;
import com.ning.metrics.collector.endpoint.ParsedRequest;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Pattern;
public class TestPatternSetFilter
{
@Test(groups = "fast")
public void testNullValue() throws Exception
{
final Filter<ParsedRequest> filter = new PatternSetFilter(createFieldExtractor(null), createPatternSet("pattern1", "pattern2"));
Assert.assertEquals(filter.passesFilter(null, null), false);
}
@Test(groups = "fast")
public void testEmptySetPatternEventRESTRequestFilter() throws Exception
{
final Filter<ParsedRequest> filter = new PatternSetFilter(createFieldExtractor("test-host"), Collections.<Pattern>emptySet());
Assert.assertEquals(filter.passesFilter(null, null), false);
}
@Test(groups = "fast")
public void testSinglePatternEventRESTRequestFilter() throws Exception
{
final Filter<ParsedRequest> filterShouldMatch = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("test-host"));
Assert.assertEquals(filterShouldMatch.passesFilter(null, null), true);
final Filter<ParsedRequest> filterDoesNotMatch = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("mugen"));
Assert.assertEquals(filterDoesNotMatch.passesFilter(null, null), false);
}
@Test(groups = "fast")
public void testMultiplePatternEventRESTRequestFilter() throws Exception
{
final Filter<ParsedRequest> trueFilter = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("test-host", "nothing"));
Assert.assertTrue(trueFilter.passesFilter(null, null));
final Filter<ParsedRequest> falseFilter = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("mugen", "nothing"));
Assert.assertFalse(falseFilter.passesFilter(null, null));
}
@Test(groups = "fast")
public void testSinglePatternEventInclusionFilter() throws Exception
{
final Filter<ParsedRequest> filterShouldMatch = new EventInclusionFilter(createFieldExtractor("test-host"), createPatternSet("test-host"));
Assert.assertEquals(filterShouldMatch.passesFilter(null, null), false);
final Filter<ParsedRequest> filterDoesNotMatch = new EventInclusionFilter(createFieldExtractor("test-host"), createPatternSet("mugen"));
Assert.assertEquals(filterDoesNotMatch.passesFilter(null, null), true);
}
private Set<Pattern> createPatternSet(final String... patterns)
{
final Set<Pattern> patternSet = new HashSet<Pattern>();
for (final String str : patterns) {
patternSet.add(Pattern.compile(str));
}
return patternSet;
}
private FieldExtractor createFieldExtractor(final String value)
{
return new FieldExtractor()
{
@Override
public String getField(final String eventName, final ParsedRequest annotation)
{
return value;
}
};
}
} | ning/collector | src/test/java/com/ning/metrics/collector/filtering/TestPatternSetFilter.java | Java | apache-2.0 | 3,816 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.geode.test.junit.rules.GfshShellConnectionRule;
import org.apache.geode.test.junit.categories.DistributedTest;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.util.Arrays;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
@Category(DistributedTest.class)
public class ExportLogsStatsOverHttpDUnitTest extends ExportLogsStatsDUnitTest {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Override
public void connectIfNeeded() throws Exception {
if (!connector.isConnected())
connector.connect(httpPort, GfshShellConnectionRule.PortType.http);
}
@Test
public void testExportWithDir() throws Exception {
connectIfNeeded();
File dir = temporaryFolder.newFolder();
// export the logs
connector.executeCommand("export logs --dir=" + dir.getAbsolutePath());
// verify that the message contains a path to the user.dir
String message = connector.getGfshOutput();
assertThat(message).contains("Logs exported to: ");
assertThat(message).contains(dir.getAbsolutePath());
String zipPath = getZipPathFromCommandResult(message);
Set<String> actualZipEntries =
new ZipFile(zipPath).stream().map(ZipEntry::getName).collect(Collectors.toSet());
assertThat(actualZipEntries).isEqualTo(expectedZipEntries);
// also verify that the zip file on locator is deleted
assertThat(Arrays.stream(locator.getWorkingDir().listFiles())
.filter(file -> file.getName().endsWith(".zip")).collect(Collectors.toSet())).isEmpty();
}
protected String getZipPathFromCommandResult(String message) {
return message.replaceAll("Logs exported to: ", "").trim();
}
}
| pivotal-amurmann/geode | geode-web/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsStatsOverHttpDUnitTest.java | Java | apache-2.0 | 2,773 |
/*
* Copyright 2014 Click Travel Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.clicktravel.infrastructure.persistence.aws.dynamodb;
import static com.clicktravel.common.random.Randoms.randomId;
import static com.clicktravel.common.random.Randoms.randomString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.powermock.api.mockito.PowerMockito.whenNew;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.document.DynamoDB;
import com.amazonaws.services.dynamodbv2.document.Item;
import com.amazonaws.services.dynamodbv2.document.Table;
import com.amazonaws.services.dynamodbv2.document.spec.DeleteItemSpec;
import com.amazonaws.services.dynamodbv2.document.spec.GetItemSpec;
import com.amazonaws.services.dynamodbv2.document.spec.PutItemSpec;
import com.amazonaws.services.dynamodbv2.model.ConditionalCheckFailedException;
import com.clicktravel.cheddar.infrastructure.persistence.database.ItemId;
import com.clicktravel.cheddar.infrastructure.persistence.database.configuration.DatabaseSchemaHolder;
import com.clicktravel.cheddar.infrastructure.persistence.database.configuration.ItemConfiguration;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.NonExistentItemException;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.OptimisticLockException;
import com.clicktravel.common.random.Randoms;
@RunWith(PowerMockRunner.class)
@PrepareForTest({ DynamoDocumentStoreTemplate.class })
public class DynamoDocumentStoreTemplateTest {
private DatabaseSchemaHolder mockDatabaseSchemaHolder;
private String schemaName;
private String tableName;
private AmazonDynamoDB mockAmazonDynamoDbClient;
private DynamoDB mockDynamoDBClient;
@Before
public void setup() throws Exception {
schemaName = randomString(10);
tableName = randomString(10);
mockDatabaseSchemaHolder = mock(DatabaseSchemaHolder.class);
when(mockDatabaseSchemaHolder.schemaName()).thenReturn(schemaName);
mockAmazonDynamoDbClient = mock(AmazonDynamoDB.class);
mockDynamoDBClient = mock(DynamoDB.class);
whenNew(DynamoDB.class).withParameterTypes(AmazonDynamoDB.class).withArguments(eq(mockAmazonDynamoDbClient))
.thenReturn(mockDynamoDBClient);
}
@SuppressWarnings("deprecation")
@Test
public void shouldCreate_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Item mockTableItem = mock(Item.class);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
// When
final StubItem returnedItem = dynamoDocumentStoreTemplate.create(stubItem);
// Then
final ArgumentCaptor<PutItemSpec> getItemRequestCaptor = ArgumentCaptor.forClass(PutItemSpec.class);
verify(mockTable).putItem(getItemRequestCaptor.capture());
final PutItemSpec spec = getItemRequestCaptor.getValue();
assertEquals(itemId.value(), spec.getItem().get("id"));
assertEquals(itemId.value(), returnedItem.getId());
assertEquals(stubItem.getStringProperty(), returnedItem.getStringProperty());
assertEquals(stubItem.getStringProperty2(), returnedItem.getStringProperty2());
assertEquals(stubItem.getStringSetProperty(), returnedItem.getStringSetProperty());
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotCreate_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Item mockTableItem = mock(Item.class);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
doThrow(RuntimeException.class).when(mockTable).putItem(any(PutItemSpec.class));
RuntimeException thrownException = null;
// When
try {
dynamoDocumentStoreTemplate.create(stubItem);
} catch (final RuntimeException runtimeException) {
thrownException = runtimeException;
}
// Then
assertNotNull(thrownException);
}
@SuppressWarnings("deprecation")
@Test
public void shouldRead_withItemIdAndItemClass() throws Exception {
// Given
final ItemId itemId = new ItemId(randomId());
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final Item mockTableItem = mock(Item.class);
when(mockTable.getItem(any(GetItemSpec.class))).thenReturn(mockTableItem);
final StubItem stubItem = generateRandomStubItem(itemId);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
// When
final StubItem returnedItem = dynamoDocumentStoreTemplate.read(itemId, StubItem.class);
// Then
final ArgumentCaptor<GetItemSpec> getItemRequestCaptor = ArgumentCaptor.forClass(GetItemSpec.class);
verify(mockTable).getItem(getItemRequestCaptor.capture());
final GetItemSpec spec = getItemRequestCaptor.getValue();
assertEquals(1, spec.getKeyComponents().size());
assertEquals(itemId.value(), spec.getKeyComponents().iterator().next().getValue());
assertEquals(itemId.value(), returnedItem.getId());
assertEquals(stubItem.getStringProperty(), returnedItem.getStringProperty());
assertEquals(stubItem.getStringProperty2(), returnedItem.getStringProperty2());
assertEquals(stubItem.getStringSetProperty(), returnedItem.getStringSetProperty());
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotRead_withNonExistentItemExceptionNoItem() throws Exception {
// Given
final ItemId itemId = new ItemId(randomId());
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
when(mockTable.getItem(any(GetItemSpec.class))).thenReturn(null);
NonExistentItemException thrownException = null;
// When
try {
dynamoDocumentStoreTemplate.read(itemId, StubItem.class);
} catch (final NonExistentItemException nonExistentItemException) {
thrownException = nonExistentItemException;
}
// Then
assertNotNull(thrownException);
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotRead_withNonExistentItemExceptionNoContent() throws Exception {
// Given
final ItemId itemId = new ItemId(randomId());
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final Item mockTableItem = mock(Item.class);
when(mockTable.getItem(any(GetItemSpec.class))).thenReturn(mockTableItem);
when(mockTableItem.toJSON()).thenReturn("");
NonExistentItemException thrownException = null;
// When
try {
dynamoDocumentStoreTemplate.read(itemId, StubItem.class);
} catch (final NonExistentItemException nonExistentItemException) {
thrownException = nonExistentItemException;
}
// Then
assertNotNull(thrownException);
}
@SuppressWarnings("deprecation")
@Test
public void shouldUpdate_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Item mockTableItem = mock(Item.class);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
// When
final StubItem returnedItem = dynamoDocumentStoreTemplate.update(stubItem);
// Then
final ArgumentCaptor<PutItemSpec> getItemRequestCaptor = ArgumentCaptor.forClass(PutItemSpec.class);
verify(mockTable).putItem(getItemRequestCaptor.capture());
final PutItemSpec spec = getItemRequestCaptor.getValue();
assertEquals(itemId.value(), spec.getItem().get("id"));
assertEquals(itemId.value(), returnedItem.getId());
assertEquals(stubItem.getStringProperty(), returnedItem.getStringProperty());
assertEquals(stubItem.getStringProperty2(), returnedItem.getStringProperty2());
assertEquals(stubItem.getStringSetProperty(), returnedItem.getStringSetProperty());
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotUpdate_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
final Item mockTableItem = mock(Item.class);
when(mockTableItem.toJSON()).thenReturn(dynamoDocumentStoreTemplate.itemToString(stubItem));
doThrow(ConditionalCheckFailedException.class).when(mockTable).putItem(any(PutItemSpec.class));
OptimisticLockException thrownException = null;
// When
try {
dynamoDocumentStoreTemplate.update(stubItem);
} catch (final OptimisticLockException optimisticLockException) {
thrownException = optimisticLockException;
}
// Then
assertNotNull(thrownException);
}
@SuppressWarnings("deprecation")
@Test
public void shouldDelete_withItem() {
// Given
final ItemId itemId = new ItemId(randomId());
final StubItem stubItem = generateRandomStubItem(itemId);
final ItemConfiguration itemConfiguration = new ItemConfiguration(StubItem.class, tableName);
final Collection<ItemConfiguration> itemConfigurations = Arrays.asList(itemConfiguration);
when(mockDatabaseSchemaHolder.itemConfigurations()).thenReturn(itemConfigurations);
final Table mockTable = mock(Table.class);
when(mockDynamoDBClient.getTable(any(String.class))).thenReturn(mockTable);
final DynamoDocumentStoreTemplate dynamoDocumentStoreTemplate = new DynamoDocumentStoreTemplate(
mockDatabaseSchemaHolder);
dynamoDocumentStoreTemplate.initialize(mockAmazonDynamoDbClient);
// When
dynamoDocumentStoreTemplate.delete(stubItem);
// Then
final ArgumentCaptor<DeleteItemSpec> getItemRequestCaptor = ArgumentCaptor.forClass(DeleteItemSpec.class);
verify(mockTable).deleteItem(getItemRequestCaptor.capture());
}
private StubItem generateRandomStubItem(final ItemId itemId) {
final StubItem item = new StubItem();
item.setBooleanProperty(Randoms.randomBoolean());
item.setId(itemId.value());
item.setStringProperty(Randoms.randomString());
item.setStringProperty2(Randoms.randomString());
item.setVersion(Randoms.randomLong());
final Set<String> stringSet = new HashSet<String>();
for (int i = 0; i < Randoms.randomInt(20); i++) {
stringSet.add(Randoms.randomString());
}
item.setStringSetProperty(stringSet);
return item;
}
}
| clicktravel-martindimitrov/Cheddar | cheddar/cheddar-integration-aws/src/test/java/com/clicktravel/infrastructure/persistence/aws/dynamodb/DynamoDocumentStoreTemplateTest.java | Java | apache-2.0 | 16,505 |
/*
* RemoveRelationKnowhow.java
* Created on 2013/06/28
*
* Copyright (C) 2011-2013 Nippon Telegraph and Telephone Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tubame.knowhow.plugin.ui.view.remove;
import tubame.common.util.CmnStringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import tubame.knowhow.plugin.logic.KnowhowManagement;
import tubame.knowhow.plugin.model.view.CategoryViewType;
import tubame.knowhow.plugin.model.view.KnowhowDetailType;
import tubame.knowhow.plugin.model.view.KnowhowViewType;
import tubame.knowhow.plugin.model.view.PortabilityKnowhowListViewOperation;
import tubame.knowhow.plugin.ui.editor.multi.MaintenanceKnowhowMultiPageEditor;
import tubame.knowhow.plugin.ui.editor.multi.docbook.KnowhowDetailEditor;
import tubame.knowhow.util.PluginUtil;
/**
* Make a related item deletion process know-how information.<br/>
* Delete stick know-how related to the item to be deleted,<br/>
* the item that you want to match the key of its own from the reference list of
* key know-how detailed information,<br/>
* the parent category.<br/>
*/
public class RemoveRelationKnowhow implements RemoveRelationItemStrategy {
/** Logger */
private static final Logger LOGGER = LoggerFactory
.getLogger(RemoveRelationKnowhow.class);
/** Know-how entry view item */
private KnowhowViewType knowhowViewType;
/** Deleted items */
private PortabilityKnowhowListViewOperation portabilityKnowhowListViewOperation;
/**
* Constructor.<br/>
*
* @param portabilityKnowhowListViewOperation
* Deleted items
*/
public RemoveRelationKnowhow(
PortabilityKnowhowListViewOperation portabilityKnowhowListViewOperation) {
this.portabilityKnowhowListViewOperation = portabilityKnowhowListViewOperation;
this.knowhowViewType = (KnowhowViewType) portabilityKnowhowListViewOperation
.getKnowhowViewType();
}
/**
* {@inheritDoc}
*/
@Override
public void removeRelationItem() {
RemoveRelationKnowhow.LOGGER.debug(CmnStringUtil.EMPTY);
removeKnowhowDetail();
removeEntryViewItem();
}
/**
* Delete key reference to itself from the parent category that is
* registered in the entry view.<br/>
*
*/
private void removeEntryViewItem() {
CategoryViewType categoryViewType = (CategoryViewType) portabilityKnowhowListViewOperation
.getParent().getKnowhowViewType();
String removeTargetKey = null;
for (String knowhowRefKey : categoryViewType.getKnowhowRefKeies()) {
if (knowhowViewType.getRegisterKey().equals(knowhowRefKey)) {
removeTargetKey = knowhowRefKey;
}
}
if (removeTargetKey != null) {
categoryViewType.getKnowhowRefKeies().remove(removeTargetKey);
}
}
/**
* Delete the data that matches the key from its own know-how detail data
* list.<br/>
* Remove know-how detail data that matches the reference key know-how from
* its own know-how detail data list.<br/>
*
*/
private void removeKnowhowDetail() {
KnowhowDetailType removeTargetItem = null;
for (KnowhowDetailType knowhowDetailType : KnowhowManagement
.getKnowhowDetailTypes()) {
if (knowhowDetailType.getKnowhowDetailId().equals(
knowhowViewType.getKnowhowDetailRefKey())) {
removeTargetItem = knowhowDetailType;
}
}
if (removeTargetItem != null) {
KnowhowManagement.getKnowhowDetailTypes().remove(removeTargetItem);
clearKnowhoweDetaileditor(removeTargetItem);
}
}
/**
* Initialization of know-how detail page editor.<br/>
*
* @param removeTargetItem
* Deleted items
*/
private void clearKnowhoweDetaileditor(KnowhowDetailType removeTargetItem) {
MaintenanceKnowhowMultiPageEditor knowhowMultiPageEditor = PluginUtil
.getKnowhowEditor();
KnowhowDetailEditor detailEditor = knowhowMultiPageEditor
.getKnowhowDetailEditor();
if (detailEditor.getKnowhowDetailType() != null) {
if (removeTargetItem.getKnowhowDetailId().equals(
detailEditor.getKnowhowDetailType().getKnowhowDetailId())) {
knowhowMultiPageEditor.clearKnowhowDetail();
}
}
}
}
| azkaoru/migration-tool | src/tubame.knowhow/src/tubame/knowhow/plugin/ui/view/remove/RemoveRelationKnowhow.java | Java | apache-2.0 | 5,041 |
/*
* Copyright (C) 2013 Leszek Mzyk
* Modifications Copyright (C) 2015 eccyan <g00.eccyan@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.eccyan.widget;
import android.content.Context;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.AttributeSet;
/**
* A ViewPager subclass enabling infinte scrolling of the viewPager elements
*
* When used for paginating views (in opposite to fragments), no code changes
* should be needed only change xml's from <android.support.v4.view.ViewPager>
* to <com.imbryk.viewPager.LoopViewPager>
*
* If "blinking" can be seen when paginating to first or last view, simply call
* seBoundaryCaching( true ), or change DEFAULT_BOUNDARY_CASHING to true
*
* When using a FragmentPagerAdapter or FragmentStatePagerAdapter,
* additional changes in the adapter must be done.
* The adapter must be prepared to create 2 extra items e.g.:
*
* The original adapter creates 4 items: [0,1,2,3]
* The modified adapter will have to create 6 items [0,1,2,3,4,5]
* with mapping realPosition=(position-1)%count
* [0->3, 1->0, 2->1, 3->2, 4->3, 5->0]
*/
public class SpinningViewPager extends ViewPager {
private static final boolean DEFAULT_BOUNDARY_CASHING = false;
OnPageChangeListener mOuterPageChangeListener;
private LoopPagerAdapterWrapper mAdapter;
private boolean mBoundaryCaching = DEFAULT_BOUNDARY_CASHING;
/**
* helper function which may be used when implementing FragmentPagerAdapter
*
* @param position
* @param count
* @return (position-1)%count
*/
public static int toRealPosition( int position, int count ){
position = position-1;
if( position < 0 ){
position += count;
}else{
position = position%count;
}
return position;
}
/**
* If set to true, the boundary views (i.e. first and last) will never be destroyed
* This may help to prevent "blinking" of some views
*
* @param flag
*/
public void setBoundaryCaching(boolean flag) {
mBoundaryCaching = flag;
if (mAdapter != null) {
mAdapter.setBoundaryCaching(flag);
}
}
@Override
public void setAdapter(PagerAdapter adapter) {
mAdapter = new LoopPagerAdapterWrapper(adapter);
mAdapter.setBoundaryCaching(mBoundaryCaching);
super.setAdapter(mAdapter);
}
@Override
public PagerAdapter getAdapter() {
return mAdapter != null ? mAdapter.getRealAdapter() : mAdapter;
}
@Override
public int getCurrentItem() {
return mAdapter != null ? mAdapter.toRealPosition(super.getCurrentItem()) : 0;
}
public void setCurrentItem(int item, boolean smoothScroll) {
int realItem = mAdapter.toInnerPosition(item);
super.setCurrentItem(realItem, smoothScroll);
}
@Override
public void setCurrentItem(int item) {
if (getCurrentItem() != item) {
setCurrentItem(item, true);
}
}
@Override
public void setOnPageChangeListener(OnPageChangeListener listener) {
mOuterPageChangeListener = listener;
};
public SpinningViewPager(Context context) {
super(context);
init();
}
public SpinningViewPager(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init() {
super.setOnPageChangeListener(onPageChangeListener);
}
private OnPageChangeListener onPageChangeListener = new OnPageChangeListener() {
private float mPreviousOffset = -1;
private float mPreviousPosition = -1;
@Override
public void onPageSelected(int position) {
int realPosition = mAdapter.toRealPosition(position);
if (mPreviousPosition != realPosition) {
mPreviousPosition = realPosition;
if (mOuterPageChangeListener != null) {
mOuterPageChangeListener.onPageSelected(realPosition);
}
}
}
@Override
public void onPageScrolled(int position, float positionOffset,
int positionOffsetPixels) {
int realPosition = position;
if (mAdapter != null) {
realPosition = mAdapter.toRealPosition(position);
if (positionOffset == 0
&& mPreviousOffset == 0
&& (position == 0 || position == mAdapter.getCount() - 1)) {
setCurrentItem(realPosition, false);
}
}
mPreviousOffset = positionOffset;
if (mOuterPageChangeListener != null) {
mOuterPageChangeListener.onPageScrolled(realPosition,
positionOffset, positionOffsetPixels);
}
}
@Override
public void onPageScrollStateChanged(int state) {
if (mAdapter != null) {
int position = SpinningViewPager.super.getCurrentItem();
int realPosition = mAdapter.toRealPosition(position);
if (state == ViewPager.SCROLL_STATE_IDLE
&& (position == 0 || position == mAdapter.getCount() - 1)) {
setCurrentItem(realPosition, false);
}
}
if (mOuterPageChangeListener != null) {
mOuterPageChangeListener.onPageScrollStateChanged(state);
}
}
};
}
| eccyan/SpinningTabStrip | spinning/src/main/java/com/eccyan/widget/SpinningViewPager.java | Java | apache-2.0 | 6,099 |
/* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.etch.util.core.io;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import org.apache.etch.util.FlexBuffer;
import org.apache.etch.util.core.Who;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
/** Test UdpConnection. */
public class TestUdpConnection
{
/** @throws Exception */
@Before @Ignore
public void init() throws Exception
{
aph = new MyPacketHandler();
ac = new UdpConnection( "udp://localhost:4011" );
ac.setSession( aph );
ac.start();
ac.waitUp( 4000 );
System.out.println( "ac up" );
bph = new MyPacketHandler();
bc = new UdpConnection( "udp://localhost:4010" );
bc.setSession( bph );
bc.start();
bc.waitUp( 4000 );
System.out.println( "bc up" );
}
/** @throws Exception */
@After @Ignore
public void fini() throws Exception
{
ac.close( false );
bc.close( false );
}
private MyPacketHandler aph;
private UdpConnection ac;
private MyPacketHandler bph;
private UdpConnection bc;
/** @throws Exception */
@Test @Ignore
public void blah() throws Exception
{
assertEquals( What.UP, aph.what );
assertEquals( What.UP, bph.what );
FlexBuffer buf = new FlexBuffer();
buf.put( 1 );
buf.put( 2 );
buf.put( 3 );
buf.put( 4 );
buf.put( 5 );
buf.setIndex( 0 );
ac.transportPacket( null, buf );
Thread.sleep( 500 );
assertEquals( What.PACKET, bph.what );
assertNotNull( bph.xsender );
assertNotSame( buf, bph.xbuf );
assertEquals( 0, bph.xbuf.index() );
assertEquals( 5, bph.xbuf.length() );
assertEquals( 1, bph.xbuf.get() );
assertEquals( 2, bph.xbuf.get() );
assertEquals( 3, bph.xbuf.get() );
assertEquals( 4, bph.xbuf.get() );
assertEquals( 5, bph.xbuf.get() );
}
/** */
public enum What
{
/** */ UP,
/** */ PACKET,
/** */ DOWN
}
/**
* receive packets from the udp connection
*/
public static class MyPacketHandler implements SessionPacket
{
/** */
public What what;
/** */
public Who xsender;
/** */
public FlexBuffer xbuf;
public void sessionPacket( Who sender, FlexBuffer buf ) throws Exception
{
assertEquals( What.UP, what );
what = What.PACKET;
xsender = sender;
xbuf = buf;
}
public void sessionControl( Object control, Object value )
{
// ignore.
}
public void sessionNotify( Object event )
{
if (event.equals( Session.UP ))
{
assertNull( what );
what = What.UP;
return;
}
if (event.equals( Session.DOWN ))
{
assertTrue( what == What.UP || what == What.PACKET );
what = What.DOWN;
return;
}
}
public Object sessionQuery( Object query )
{
// ignore.
return null;
}
}
}
| OBIGOGIT/etch | util/src/test/java/org/apache/etch/util/core/io/TestUdpConnection.java | Java | apache-2.0 | 3,671 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.library.sql.generator.implementation.grammar.builders.query;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import org.apache.polygene.library.sql.generator.grammar.builders.query.OrderByBuilder;
import org.apache.polygene.library.sql.generator.grammar.query.OrderByClause;
import org.apache.polygene.library.sql.generator.grammar.query.SortSpecification;
import org.apache.polygene.library.sql.generator.implementation.grammar.common.SQLBuilderBase;
import org.apache.polygene.library.sql.generator.implementation.grammar.query.OrderByClauseImpl;
import org.apache.polygene.library.sql.generator.implementation.transformation.spi.SQLProcessorAggregator;
/**
* @author Stanislav Muhametsin
*/
public class OrderByBuilderImpl extends SQLBuilderBase
implements OrderByBuilder
{
private final List<SortSpecification> _sortSpecs;
public OrderByBuilderImpl( SQLProcessorAggregator processor )
{
super( processor );
this._sortSpecs = new ArrayList<SortSpecification>();
}
public OrderByBuilder addSortSpecs( SortSpecification... specs )
{
for( SortSpecification spec : specs )
{
Objects.requireNonNull( spec, "specification" );
}
this._sortSpecs.addAll( Arrays.asList( specs ) );
return this;
}
public List<SortSpecification> getSortSpecs()
{
return Collections.unmodifiableList( this._sortSpecs );
}
public OrderByClause createExpression()
{
return new OrderByClauseImpl( this.getProcessor(), this._sortSpecs );
}
}
| apache/zest-qi4j | libraries/sql-generator/src/main/java/org/apache/polygene/library/sql/generator/implementation/grammar/builders/query/OrderByBuilderImpl.java | Java | apache-2.0 | 2,500 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.springframework.shell.event.ParseResult;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.internal.cli.shell.GfshExecutionStrategy;
import org.apache.geode.management.internal.cli.shell.OperationInvoker;
/**
* Immutable representation of the outcome of parsing a given shell line. * Extends
* {@link ParseResult} to add a field to specify the command string that was input by the user.
*
* <p>
* Some commands are required to be executed on a remote GemFire managing member. These should be
* marked with the annotation {@link CliMetaData#shellOnly()} set to <code>false</code>.
* {@link GfshExecutionStrategy} will detect whether the command is a remote command and send it to
* ManagementMBean via {@link OperationInvoker}.
*
*
* @since GemFire 7.0
*/
public class GfshParseResult extends ParseResult {
private String userInput;
private String commandName;
private Map<String, String> paramValueStringMap = new HashMap<>();
/**
* Creates a GfshParseResult instance to represent parsing outcome.
*
* @param method Method associated with the command
* @param instance Instance on which this method has to be executed
* @param arguments arguments of the method
* @param userInput user specified commands string
*/
protected GfshParseResult(final Method method, final Object instance, final Object[] arguments,
final String userInput) {
super(method, instance, arguments);
this.userInput = userInput.trim();
CliCommand cliCommand = method.getAnnotation(CliCommand.class);
commandName = cliCommand.value()[0];
Annotation[][] parameterAnnotations = method.getParameterAnnotations();
if (arguments == null) {
return;
}
for (int i = 0; i < arguments.length; i++) {
Object argument = arguments[i];
if (argument == null) {
continue;
}
CliOption cliOption = getCliOption(parameterAnnotations, i);
String argumentAsString;
if (argument instanceof Object[]) {
argumentAsString = StringUtils.join((Object[]) argument, ",");
} else {
argumentAsString = argument.toString();
}
// this maps are used for easy access of option values in String form.
// It's used in tests and validation of option values in pre-execution
paramValueStringMap.put(cliOption.key()[0], argumentAsString);
}
}
/**
* @return the userInput
*/
public String getUserInput() {
return userInput;
}
/**
* Used only in tests and command pre-execution for validating arguments
*/
public String getParamValue(String param) {
return paramValueStringMap.get(param);
}
/**
* Used only in tests and command pre-execution for validating arguments
*
* @return the unmodifiable paramValueStringMap
*/
public Map<String, String> getParamValueStrings() {
return Collections.unmodifiableMap(paramValueStringMap);
}
public String getCommandName() {
return commandName;
}
private CliOption getCliOption(Annotation[][] parameterAnnotations, int index) {
Annotation[] annotations = parameterAnnotations[index];
for (Annotation annotation : annotations) {
if (annotation instanceof CliOption) {
return (CliOption) annotation;
}
}
return null;
}
}
| pivotal-amurmann/geode | geode-core/src/main/java/org/apache/geode/management/internal/cli/GfshParseResult.java | Java | apache-2.0 | 4,495 |
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.corecomponents;
import java.awt.Insets;
import java.io.File;
import java.util.Collection;
import java.util.Map;
import java.util.TreeMap;
import java.util.logging.Level;
import javax.swing.BorderFactory;
import javax.swing.UIManager;
import javax.swing.UIManager.LookAndFeelInfo;
import javax.swing.UnsupportedLookAndFeelException;
import org.netbeans.spi.sendopts.OptionProcessor;
import org.netbeans.swing.tabcontrol.plaf.DefaultTabbedContainerUI;
import org.openide.modules.ModuleInstall;
import org.openide.util.Lookup;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.CaseActionException;
import org.sleuthkit.autopsy.casemodule.OpenFromArguments;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
* Manages this module's life cycle. Opens the startup dialog during startup.
*/
public class Installer extends ModuleInstall {
private static Installer instance;
private static final Logger logger = Logger.getLogger(Installer.class.getName());
public synchronized static Installer getDefault() {
if (instance == null) {
instance = new Installer();
}
return instance;
}
private Installer() {
super();
}
@Override
public void restored() {
super.restored();
setupLAF();
UIManager.put("ViewTabDisplayerUI", "org.sleuthkit.autopsy.corecomponents.NoTabsTabDisplayerUI");
UIManager.put(DefaultTabbedContainerUI.KEY_VIEW_CONTENT_BORDER, BorderFactory.createEmptyBorder());
UIManager.put("TabbedPane.contentBorderInsets", new Insets(0, 0, 0, 0));
/*
* Open the passed in case, if an aut file was double clicked.
*/
WindowManager.getDefault().invokeWhenUIReady(() -> {
Collection<? extends OptionProcessor> processors = Lookup.getDefault().lookupAll(OptionProcessor.class);
for (OptionProcessor processor : processors) {
if (processor instanceof OpenFromArguments) {
OpenFromArguments argsProcessor = (OpenFromArguments) processor;
final String caseFile = argsProcessor.getDefaultArg();
if (caseFile != null && !caseFile.equals("") && caseFile.endsWith(".aut") && new File(caseFile).exists()) { //NON-NLS
new Thread(() -> {
// Create case.
try {
Case.open(caseFile);
} catch (Exception ex) {
logger.log(Level.SEVERE, "Error opening case: ", ex); //NON-NLS
}
}).start();
return;
}
}
}
Case.invokeStartupDialog(); // bring up the startup dialog
});
}
@Override
public void uninstalled() {
super.uninstalled();
}
@Override
public void close() {
new Thread(() -> {
try {
if (Case.isCaseOpen()) {
Case.getCurrentCase().closeCase();
}
} catch (CaseActionException | IllegalStateException unused) {
// Exception already logged. Shutting down, no need to do popup.
}
}).start();
}
private void setupLAF() {
//TODO apply custom skinning
//UIManager.put("nimbusBase", new Color());
//UIManager.put("nimbusBlueGrey", new Color());
//UIManager.put("control", new Color());
if (System.getProperty("os.name").toLowerCase().contains("mac")) { //NON-NLS
setupMacOsXLAF();
}
}
/**
* Set the look and feel to be the Cross Platform 'Metal', but keep Aqua
* dependent elements that set the Menu Bar to be in the correct place on
* Mac OS X.
*/
private void setupMacOsXLAF() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
logger.log(Level.WARNING, "Unable to set theme. ", ex); //NON-NLS
}
final String[] UI_MENU_ITEM_KEYS = new String[]{"MenuBarUI", //NON-NLS
};
Map<Object, Object> uiEntries = new TreeMap<>();
// Store the keys that deal with menu items
for (String key : UI_MENU_ITEM_KEYS) {
uiEntries.put(key, UIManager.get(key));
}
//use Metal if available
for (LookAndFeelInfo info : UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) { //NON-NLS
try {
UIManager.setLookAndFeel(info.getClassName());
} catch (ClassNotFoundException | InstantiationException |
IllegalAccessException | UnsupportedLookAndFeelException ex) {
logger.log(Level.WARNING, "Unable to set theme. ", ex); //NON-NLS
}
break;
}
}
// Overwrite the Metal menu item keys to use the Aqua versions
uiEntries.entrySet().stream().forEach((entry) -> {
UIManager.put(entry.getKey(), entry.getValue());
});
}
}
| mhmdfy/autopsy | Core/src/org/sleuthkit/autopsy/corecomponents/Installer.java | Java | apache-2.0 | 6,087 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.