code
stringlengths 3
1.04M
| repo_name
stringlengths 5
109
| path
stringlengths 6
306
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.04M
|
---|---|---|---|---|---|
/**
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openejb.core.mdb;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.ObjectMessage;
import javax.jms.Session;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.TreeMap;
public class MdbInvoker implements MessageListener {
private final Map<String, Method> signatures = new TreeMap<String, Method>();
private final Object target;
private Connection connection;
private Session session;
private ConnectionFactory connectionFactory;
public MdbInvoker(ConnectionFactory connectionFactory, Object target) throws JMSException {
this.target = target;
this.connectionFactory = connectionFactory;
for (Method method : target.getClass().getMethods()) {
String signature = MdbUtil.getSignature(method);
signatures.put(signature, method);
}
}
public synchronized void destroy() {
MdbUtil.close(session);
session = null;
MdbUtil.close(connection);
connection = null;
}
private synchronized Session getSession() throws JMSException {
connection = connectionFactory.createConnection();
connection.start();
session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
return session;
}
public void onMessage(Message message) {
if (!(message instanceof ObjectMessage)) return;
try {
Session session = getSession();
if (session == null) throw new IllegalStateException("Invoker has been destroyed");
if (message == null) throw new NullPointerException("request message is null");
if (!(message instanceof ObjectMessage)) throw new IllegalArgumentException("Expected a ObjectMessage request but got a " + message.getClass().getName());
ObjectMessage objectMessage = (ObjectMessage) message;
Serializable object = objectMessage.getObject();
if (object == null) throw new NullPointerException("object in ObjectMessage is null");
if (!(object instanceof Map)) {
if (message instanceof ObjectMessage) throw new IllegalArgumentException("Expected a Map contained in the ObjectMessage request but got a " + object.getClass().getName());
}
Map request = (Map) object;
String signature = (String) request.get("method");
Method method = signatures.get(signature);
Object[] args = (Object[]) request.get("args");
boolean exception = false;
Object result = null;
try {
result = method.invoke(target, args);
} catch (IllegalAccessException e) {
result = e;
exception = true;
} catch (InvocationTargetException e) {
result = e.getCause();
if (result == null) result = e;
exception = true;
}
MessageProducer producer = null;
try {
// create response
Map<String, Object> response = new TreeMap<String, Object>();
if (exception) {
response.put("exception", "true");
}
response.put("return", result);
// create response message
ObjectMessage resMessage = session.createObjectMessage();
resMessage.setJMSCorrelationID(objectMessage.getJMSCorrelationID());
resMessage.setObject((Serializable) response);
// send response message
producer = session.createProducer(objectMessage.getJMSReplyTo());
producer.send(resMessage);
} catch (Exception e) {
e.printStackTrace();
} finally {
MdbUtil.close(producer);
destroy();
}
} catch (Throwable e) {
e.printStackTrace();
}
}
}
| apache/openejb | container/openejb-core/src/test/java/org/apache/openejb/core/mdb/MdbInvoker.java | Java | apache-2.0 | 5,024 |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.collector.cluster.route;
import com.navercorp.pinpoint.collector.cluster.ClusterPointLocator;
import com.navercorp.pinpoint.collector.cluster.TargetClusterPoint;
import com.navercorp.pinpoint.collector.cluster.route.filter.RouteFilter;
import com.navercorp.pinpoint.rpc.Future;
import com.navercorp.pinpoint.rpc.ResponseMessage;
import com.navercorp.pinpoint.thrift.dto.command.TCommandTransferResponse;
import com.navercorp.pinpoint.thrift.dto.command.TRouteResult;
import com.navercorp.pinpoint.thrift.io.TCommandTypeVersion;
import org.apache.thrift.TBase;
/**
* @author koo.taejin
* @author HyunGil Jeong
*/
public class DefaultRouteHandler extends AbstractRouteHandler<RequestEvent> {
private final RouteFilterChain<RequestEvent> requestFilterChain;
private final RouteFilterChain<ResponseEvent> responseFilterChain;
public DefaultRouteHandler(ClusterPointLocator<TargetClusterPoint> targetClusterPointLocator,
RouteFilterChain<RequestEvent> requestFilterChain,
RouteFilterChain<ResponseEvent> responseFilterChain) {
super(targetClusterPointLocator);
this.requestFilterChain = requestFilterChain;
this.responseFilterChain = responseFilterChain;
}
@Override
public void addRequestFilter(RouteFilter<RequestEvent> filter) {
this.requestFilterChain.addLast(filter);
}
@Override
public void addResponseFilter(RouteFilter<ResponseEvent> filter) {
this.responseFilterChain.addLast(filter);
}
@Override
public TCommandTransferResponse onRoute(RequestEvent event) {
requestFilterChain.doEvent(event);
TCommandTransferResponse routeResult = onRoute0(event);
responseFilterChain.doEvent(new ResponseEvent(event, event.getRequestId(), routeResult));
return routeResult;
}
private TCommandTransferResponse onRoute0(RequestEvent event) {
TBase<?,?> requestObject = event.getRequestObject();
if (requestObject == null) {
return createResponse(TRouteResult.EMPTY_REQUEST);
}
TargetClusterPoint clusterPoint = findClusterPoint(event.getDeliveryCommand());
if (clusterPoint == null) {
return createResponse(TRouteResult.NOT_FOUND);
}
TCommandTypeVersion commandVersion = TCommandTypeVersion.getVersion(clusterPoint.gerVersion());
if (!commandVersion.isSupportCommand(requestObject)) {
return createResponse(TRouteResult.NOT_SUPPORTED_REQUEST);
}
Future<ResponseMessage> future = clusterPoint.request(event.getDeliveryCommand().getPayload());
boolean isCompleted = future.await();
if (!isCompleted) {
return createResponse(TRouteResult.TIMEOUT);
}
ResponseMessage responseMessage = future.getResult();
if (responseMessage == null) {
return createResponse(TRouteResult.EMPTY_RESPONSE);
}
byte[] responsePayload = responseMessage.getMessage();
if (responsePayload == null || responsePayload.length == 0) {
return createResponse(TRouteResult.EMPTY_RESPONSE, new byte[0]);
}
return createResponse(TRouteResult.OK, responsePayload);
}
private TCommandTransferResponse createResponse(TRouteResult result) {
return createResponse(result, new byte[0]);
}
private TCommandTransferResponse createResponse(TRouteResult result, byte[] payload) {
TCommandTransferResponse response = new TCommandTransferResponse();
response.setRouteResult(result);
response.setPayload(payload);
return response;
}
}
| dawidmalina/pinpoint | collector/src/main/java/com/navercorp/pinpoint/collector/cluster/route/DefaultRouteHandler.java | Java | apache-2.0 | 4,382 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.twitter.search;
import java.util.Collections;
import java.util.List;
import org.apache.camel.Exchange;
import org.apache.camel.component.twitter.TwitterEndpoint;
import org.apache.camel.component.twitter.consumer.AbstractTwitterConsumerHandler;
import org.apache.camel.component.twitter.consumer.TwitterEventType;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import twitter4j.GeoLocation;
import twitter4j.Query;
import twitter4j.Query.Unit;
import twitter4j.QueryResult;
import twitter4j.Status;
import twitter4j.Twitter;
import twitter4j.TwitterException;
/**
* Consumes search requests
*/
public class SearchConsumerHandler extends AbstractTwitterConsumerHandler {
private static final Logger LOG = LoggerFactory.getLogger(SearchConsumerHandler.class);
private String keywords;
public SearchConsumerHandler(TwitterEndpoint te, String keywords) {
super(te);
this.keywords = keywords;
}
@Override
public List<Exchange> pollConsume() throws TwitterException {
String keywords = this.keywords;
Query query;
if (keywords != null && keywords.trim().length() > 0) {
query = new Query(keywords);
LOG.debug("Searching twitter with keywords: {}", keywords);
} else {
query = new Query();
LOG.debug("Searching twitter without keywords.");
}
if (endpoint.getProperties().isFilterOld()) {
query.setSinceId(getLastId());
}
return search(query);
}
@Override
public List<Exchange> directConsume() throws TwitterException {
String keywords = this.keywords;
if (keywords == null || keywords.trim().length() == 0) {
return Collections.emptyList();
}
Query query = new Query(keywords);
LOG.debug("Searching twitter with keywords: {}", keywords);
return search(query);
}
private List<Exchange> search(Query query) throws TwitterException {
Integer numberOfPages = 1;
if (ObjectHelper.isNotEmpty(endpoint.getProperties().getLang())) {
query.setLang(endpoint.getProperties().getLang());
}
if (ObjectHelper.isNotEmpty(endpoint.getProperties().getCount())) {
query.setCount(endpoint.getProperties().getCount());
}
if (ObjectHelper.isNotEmpty(endpoint.getProperties().getNumberOfPages())) {
numberOfPages = endpoint.getProperties().getNumberOfPages();
}
if (ObjectHelper.isNotEmpty(endpoint.getProperties().getLatitude())
&& ObjectHelper.isNotEmpty(endpoint.getProperties().getLongitude())
&& ObjectHelper.isNotEmpty(endpoint.getProperties().getRadius())) {
GeoLocation location
= new GeoLocation(endpoint.getProperties().getLatitude(), endpoint.getProperties().getLongitude());
query.setGeoCode(location, endpoint.getProperties().getRadius(),
Unit.valueOf(endpoint.getProperties().getDistanceMetric()));
LOG.debug("Searching with additional geolocation parameters.");
}
LOG.debug("Searching with {} pages.", numberOfPages);
Twitter twitter = getTwitter();
QueryResult qr = twitter.search(query);
List<Status> tweets = qr.getTweets();
for (int i = 1; i < numberOfPages; i++) {
if (!qr.hasNext()) {
break;
}
qr = twitter.search(qr.nextQuery());
tweets.addAll(qr.getTweets());
}
if (endpoint.getProperties().isFilterOld()) {
for (Status status : tweets) {
setLastIdIfGreater(status.getId());
}
}
return TwitterEventType.STATUS.createExchangeList(endpoint, tweets);
}
}
| nikhilvibhav/camel | components/camel-twitter/src/main/java/org/apache/camel/component/twitter/search/SearchConsumerHandler.java | Java | apache-2.0 | 4,705 |
/*
*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package springfox.documentation.spring.web.dummy;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import io.swagger.annotations.Authorization;
import io.swagger.annotations.AuthorizationScope;
import io.swagger.annotations.Extension;
import io.swagger.annotations.ExtensionProperty;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.multipart.MultipartFile;
import springfox.documentation.annotations.ApiIgnore;
import springfox.documentation.spring.web.dummy.DummyModels.Ignorable;
import springfox.documentation.spring.web.dummy.models.EnumType;
import springfox.documentation.spring.web.dummy.models.Example;
import springfox.documentation.spring.web.dummy.models.FoobarDto;
import springfox.documentation.spring.web.dummy.models.Treeish;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Date;
import java.util.List;
import java.util.Map;
@RequestMapping(produces = {"application/json"}, consumes = {"application/json", "application/xml"})
public class DummyClass {
@ApiParam
public void annotatedWithApiParam() {
}
public void dummyMethod() {
}
public void methodWithOneArgs(int a) {
}
public void methodWithTwoArgs(int a, String b) {
}
public void methodWithNoArgs() {
}
@ApiOperation(value = "description", httpMethod = "GET")
public void methodWithHttpGETMethod() {
}
@ApiOperation(value = "description", nickname = "unique")
public void methodWithNickName() {
}
@ApiOperation(value = "description", httpMethod = "GET", hidden = true)
public void methodThatIsHidden() {
}
@ApiOperation(value = "description", httpMethod = "RUBBISH")
public void methodWithInvalidHttpMethod() {
}
@ApiOperation(value = "summary", httpMethod = "RUBBISH")
public void methodWithSummary() {
}
@ApiOperation(value = "", notes = "some notes")
public void methodWithNotes() {
}
@ApiOperation(value = "", nickname = "a nickname")
public void methodWithNickname() {
}
@ApiOperation(value = "", position = 5)
public void methodWithPosition() {
}
@ApiOperation(value = "", consumes = "application/xml")
public void methodWithXmlConsumes() {
}
@ApiOperation(value = "", produces = "application/xml")
public void methodWithXmlProduces() {
}
@ApiOperation(value = "", produces = "application/xml, application/json", consumes = "application/xml, " +
"application/json")
public void methodWithMultipleMediaTypes() {
}
@ApiOperation(value = "", produces = "application/xml", consumes = "application/xml")
public void methodWithBothXmlMediaTypes() {
}
@ApiOperation(value = "", produces = "application/json", consumes = "application/xml")
public void methodWithMediaTypeAndFile(MultipartFile multipartFile) {
}
@ApiOperation(value = "", response = DummyModels.FunkyBusiness.class)
public void methodApiResponseClass() {
}
@ApiResponses({
@ApiResponse(code = 201, response = Void.class, message = "Rule Scheduled successfuly"),
@ApiResponse(code = 500, response = RestError.class, message = "Internal Server Error"),
@ApiResponse(code = 406, response = RestError.class, message = "Not acceptable")})
public void methodAnnotatedWithApiResponse() {
}
@ApiOperation(value = "methodWithExtensions",
extensions = {
@Extension(properties = @ExtensionProperty(name="x-test1", value="value1")),
@Extension(name="test2", properties = @ExtensionProperty(name="name2", value="value2"))
}
)
public void methodWithExtensions() {
}
@ApiOperation(value = "SomeVal",
authorizations = @Authorization(value = "oauth2",
scopes = {@AuthorizationScope(scope = "scope", description = "scope description")
}))
public void methodWithAuth() {
}
@ApiOperation(value = "")
public DummyModels.FunkyBusiness methodWithAPiAnnotationButWithoutResponseClass() {
return null;
}
@ApiOperation(value = "")
public DummyModels.Paginated<BusinessType> methodWithGenericType() {
return null;
}
public ResponseEntity<byte[]> methodWithGenericPrimitiveArray() {
return null;
}
public ResponseEntity<DummyClass[]> methodWithGenericComplexArray() {
return null;
}
public ResponseEntity<EnumType> methodWithEnumResponse() {
return null;
}
@Deprecated
public void methodWithDeprecated() {
}
public void methodWithServletRequest(ServletRequest req) {
}
public void methodWithBindingResult(BindingResult res) {
}
public void methodWithInteger(Integer integer) {
}
public void methodWithAnnotatedInteger(@Ignorable Integer integer) {
}
public void methodWithModelAttribute(@ModelAttribute Example example) {
}
public void methodWithoutModelAttribute(Example example) {
}
public void methodWithTreeishModelAttribute(@ModelAttribute Treeish example) {
}
@RequestMapping("/businesses/{businessId}")
public void methodWithSinglePathVariable(@PathVariable String businessId) {
}
@RequestMapping("/businesses/{businessId}")
public void methodWithSingleEnum(BusinessType businessType) {
}
@RequestMapping("/businesses/{businessId}")
public void methodWithSingleEnumArray(BusinessType[] businessTypes) {
}
@RequestMapping("/businesses/{businessId}/employees/{employeeId}/salary")
public void methodWithRatherLongRequestPath() {
}
@RequestMapping(value = "/parameter-conditions", params = "test=testValue")
public void methodWithParameterRequestCondition() {
}
@ApiImplicitParam(name = "Authentication", dataType = "string", required = true, paramType = "header",
value = "Authentication token")
public void methodWithApiImplicitParam() {
}
@ApiImplicitParam(name = "Authentication", dataType = "string", required = true, paramType = "header",
value = "Authentication token")
public void methodWithApiImplicitParamAndInteger(Integer integer) {
}
@ApiImplicitParams({
@ApiImplicitParam(name = "lang", dataType = "string", required = true, paramType = "query",
value = "Language", defaultValue = "EN", allowableValues = "EN,FR"),
@ApiImplicitParam(name = "Authentication", dataType = "string", required = true, paramType = "header",
value = "Authentication token")
})
public void methodWithApiImplicitParams(Integer integer) {
}
public interface ApiImplicitParamsInterface {
@ApiImplicitParams({
@ApiImplicitParam(name = "lang", dataType = "string", required = true, paramType = "query",
value = "Language", defaultValue = "EN", allowableValues = "EN,FR")
})
@ApiImplicitParam(name = "Authentication", dataType = "string", required = true, paramType = "header",
value = "Authentication token")
void methodWithApiImplicitParam();
}
public static class ApiImplicitParamsClass implements ApiImplicitParamsInterface {
@Override
public void methodWithApiImplicitParam() {
}
}
@ResponseBody
public DummyModels.BusinessModel methodWithConcreteResponseBody() {
return null;
}
@ResponseBody
public Map<String, DummyModels.BusinessModel> methodWithMapReturn() {
return null;
}
@ResponseBody
@ResponseStatus(value = HttpStatus.ACCEPTED, reason = "Accepted request")
public DummyModels.BusinessModel methodWithResponseStatusAnnotation() {
return null;
}
@ResponseBody
@ResponseStatus(value = HttpStatus.NO_CONTENT)
public void methodWithResponseStatusAnnotationAndEmptyReason() {
}
@ResponseBody
public DummyModels.AnnotatedBusinessModel methodWithModelPropertyAnnotations() {
return null;
}
@ResponseBody
public DummyModels.NamedBusinessModel methodWithModelAnnotations() {
return null;
}
@ResponseBody
public List<DummyModels.BusinessModel> methodWithListOfBusinesses() {
return null;
}
@ResponseBody
public DummyModels.CorporationModel methodWithConcreteCorporationModel() {
return null;
}
@ResponseBody
public Date methodWithDateResponseBody() {
return null;
}
public void methodParameterWithRequestBodyAnnotation(
@RequestBody DummyModels.BusinessModel model,
HttpServletResponse response,
DummyModels.AnnotatedBusinessModel annotatedBusinessModel) {
}
public void methodParameterWithRequestPartAnnotation(
@RequestPart DummyModels.BusinessModel model,
HttpServletResponse response,
DummyModels.AnnotatedBusinessModel annotatedBusinessModel) {
}
public void methodParameterWithRequestPartAnnotationOnSimpleType(
@RequestPart String model,
HttpServletResponse response,
DummyModels.AnnotatedBusinessModel annotatedBusinessModel) {
}
@ResponseBody
public DummyModels.AnnotatedBusinessModel methodWithSameAnnotatedModelInReturnAndRequestBodyParam(
@RequestBody DummyModels.AnnotatedBusinessModel model) {
return null;
}
@ApiResponses({@ApiResponse(code = 413, message = "a message")})
public void methodWithApiResponses() {
}
@ApiIgnore
public static class ApiIgnorableClass {
@ApiIgnore
public void dummyMethod() {
}
}
@ResponseBody
public DummyModels.ModelWithSerializeOnlyProperty methodWithSerializeOnlyPropInReturnAndRequestBodyParam(
@RequestBody DummyModels.ModelWithSerializeOnlyProperty model) {
return null;
}
@ResponseBody
public FoobarDto methodToTestFoobarDto(@RequestBody FoobarDto model) {
return null;
}
public enum BusinessType {
PRODUCT(1),
SERVICE(2);
private int value;
private BusinessType(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}
public class CustomClass {
}
public class MethodsWithSameName {
public ResponseEntity methodToTest(Integer integer, Parent child) {
return null;
}
public void methodToTest(Integer integer, Child child) {
}
}
class Parent {
}
class Child extends Parent {
}
}
| zhiqinghuang/springfox | springfox-spring-web/src/test/java/springfox/documentation/spring/web/dummy/DummyClass.java | Java | apache-2.0 | 11,454 |
package com.mozu.api.utils;
public class Endpoints {
public static final String AUTH_URL = "api/platform/applications/authtickets";
public static final String AUTH_REFRESH_URL = "api/platform/applications/authtickets/refresh-ticket/%s";
public static final String TENANT_END_POINT = "api/platform/tenants";
public static final String SITES_END_POINT = "api/platform/tenants/%s/sites";
public static final String ATTRIBUTE_END_POINT = "api/commerce/catalog/admin/attributedefinition/attributes";
public static final String VOCABULARY_END_POINT = "api/commerce/catalog/admin/attributedefinition/attributes/%s/VocabularyValues";
public static final String PRODUCTTYPE_END_POINT = "api/commerce/catalog/admin/attributedefinition/producttypes";
public static final String ORDER_END_POINT = "api/commerce/orders";
public static final String APPLICATIONSTATUS_END_POINT = "api/commerce/settings/applicationstatus";
public static final String MZDB_APP_DATA_ENDPOINT = "api/platform/appdata";
public static final String MZDB_SITE_DATA_ENDPOINT = "api/platform/sitedata";
public static final String MZDB_TENANT_DATA_ENDPOINT = "api/platform/tenantdata";
}
| carsonreinke/mozu-java-sdk | src/main/java/com/mozu/api/utils/Endpoints.java | Java | mit | 1,200 |
/**
* Copyright © 2002 Instituto Superior Técnico
*
* This file is part of FenixEdu Academic.
*
* FenixEdu Academic is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* FenixEdu Academic is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>.
*/
package org.fenixedu.academic.ui.faces.components.util;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.struts.util.MessageResources;
import org.fenixedu.academic.domain.Exam;
import org.fenixedu.academic.domain.ExecutionCourse;
import org.fenixedu.academic.domain.Project;
import org.fenixedu.academic.domain.WrittenEvaluation;
import org.fenixedu.academic.domain.WrittenTest;
import org.fenixedu.academic.util.Bundle;
import org.fenixedu.academic.util.DateFormatUtil;
public class CalendarLink {
private Calendar objectOccurrence;
private String objectLinkLabel;
private Map<String, String> linkParameters = new HashMap<String, String>();
private boolean asLink;
public CalendarLink(boolean asLink) {
setAsLink(asLink);
}
public CalendarLink() {
this(true);
}
public CalendarLink(final ExecutionCourse executionCourse, final WrittenEvaluation writtenEvaluation, final Locale locale) {
setObjectOccurrence(writtenEvaluation.getDay());
setObjectLinkLabel(constructCalendarPresentation(executionCourse, writtenEvaluation, locale));
}
public CalendarLink(final ExecutionCourse executionCourse, final Project project, final Date date, final String tail,
final Locale locale) {
setObjectOccurrence(date);
setObjectLinkLabel(constructCalendarPresentation(executionCourse, project, date, tail, locale));
}
public void setObjectOccurrence(Calendar objectOccurrence) {
this.objectOccurrence = objectOccurrence;
}
public void setObjectOccurrence(Date objectOccurrence) {
final Calendar calendar = Calendar.getInstance();
calendar.setTime(objectOccurrence);
this.objectOccurrence = calendar;
}
public Calendar getObjectOccurrence() {
return this.objectOccurrence;
}
public void setObjectLinkLabel(String objectLinkLabel) {
this.objectLinkLabel = objectLinkLabel;
}
public String getObjectLinkLabel() {
return this.objectLinkLabel;
}
public void setLinkParameters(Map<String, String> linkParameters) {
this.linkParameters = linkParameters;
}
public String giveLink(String editLinkPage) {
final StringBuilder linkParameters = new StringBuilder();
linkParameters.append(editLinkPage);
if (this.linkParameters != null && !this.linkParameters.isEmpty()) {
linkParameters.append(editLinkPage.indexOf('?') > 0 ? '&' : '?');
for (final Iterator<Entry<String, String>> iterator = this.linkParameters.entrySet().iterator(); iterator.hasNext();) {
final Entry<String, String> entry = iterator.next();
linkParameters.append(entry.getKey());
linkParameters.append('=');
linkParameters.append(entry.getValue());
if (iterator.hasNext()) {
linkParameters.append('&');
}
}
}
return linkParameters.toString();
}
public void addLinkParameter(final String key, final String value) {
linkParameters.put(key, value);
}
private static final MessageResources messages = MessageResources.getMessageResources(Bundle.DEGREE);
private String constructCalendarPresentation(final ExecutionCourse executionCourse,
final WrittenEvaluation writtenEvaluation, final Locale locale) {
final StringBuilder stringBuilder = new StringBuilder();
if (writtenEvaluation instanceof WrittenTest) {
stringBuilder.append(messages.getMessage(locale, "label.evaluation.shortname.test"));
} else if (writtenEvaluation instanceof Exam) {
stringBuilder.append(messages.getMessage(locale, "label.evaluation.shortname.exam"));
}
stringBuilder.append(" ");
stringBuilder.append(executionCourse.getSigla());
stringBuilder.append(" (");
stringBuilder.append(DateFormatUtil.format("HH:mm", writtenEvaluation.getBeginningDate()));
stringBuilder.append("-");
stringBuilder.append(DateFormatUtil.format("HH:mm", writtenEvaluation.getEndDate()));
stringBuilder.append(")");
return stringBuilder.toString();
}
private String constructCalendarPresentation(final ExecutionCourse executionCourse, final Project project, final Date time,
final String tail, final Locale locale) {
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(messages.getMessage(locale, "label.evaluation.shortname.project"));
stringBuilder.append(" ");
stringBuilder.append(executionCourse.getSigla());
stringBuilder.append(" (");
stringBuilder.append(DateFormatUtil.format("HH:mm", time));
stringBuilder.append(") ");
stringBuilder.append(tail);
return stringBuilder.toString();
}
public boolean isAsLink() {
return asLink;
}
public void setAsLink(boolean asLink) {
this.asLink = asLink;
}
}
| pedrosan7os/fenixedu-academic | src/main/java/org/fenixedu/academic/ui/faces/components/util/CalendarLink.java | Java | lgpl-3.0 | 5,981 |
package uk.co.alt236.bluetoothlelib.device.beacon.ibeacon;
/**
*
*/
public class IBeaconConstants {
public static final byte[] MANUFACTURER_DATA_IBEACON_PREFIX = {0x4C, 0x00, 0x02, 0x15};
}
| StuartGuo/Bluetooth-LE-Library---Android | library/src/main/java/uk/co/alt236/bluetoothlelib/device/beacon/ibeacon/IBeaconConstants.java | Java | apache-2.0 | 198 |
// This is a generated file. Not intended for manual editing.
package com.intellij.sh.psi.impl;
import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.util.PsiTreeUtil;
import static com.intellij.sh.ShTypes.*;
import com.intellij.sh.psi.*;
public class ShUnaryExpressionImpl extends ShExpressionImpl implements ShUnaryExpression {
public ShUnaryExpressionImpl(ASTNode node) {
super(node);
}
@Override
public void accept(@NotNull ShVisitor visitor) {
visitor.visitUnaryExpression(this);
}
@Override
public void accept(@NotNull PsiElementVisitor visitor) {
if (visitor instanceof ShVisitor) accept((ShVisitor)visitor);
else super.accept(visitor);
}
@Override
@Nullable
public ShExpression getExpression() {
return findChildByClass(ShExpression.class);
}
@Override
@Nullable
public PsiElement getMinus() {
return findChildByType(MINUS);
}
@Override
@Nullable
public PsiElement getPlus() {
return findChildByType(PLUS);
}
}
| smmribeiro/intellij-community | plugins/sh/gen/com/intellij/sh/psi/impl/ShUnaryExpressionImpl.java | Java | apache-2.0 | 1,138 |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.das.analytics.rest.beans;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
/**
* This class represents a facet object bean. facet object defines the hierarchical fieldName,
* which can be drilled down. This can be used as a value in a record.
* Example :
* Assume a record represents a book.
* Then the record field : value pairs will be, e.g.
* Price : $50.00
* Author : firstName LastName
* ISBN : 234325435445435436
* Published Date : "1987" , "March", "21"
*
* Here Publish Date will be a facet/categoryPath, since it can be drilled down to Year, then month and date
* and categorizes by each level.
*
*/
@XmlRootElement(name = "categoryPath")
@XmlAccessorType(XmlAccessType.FIELD)
public class DrillDownPathBean {
@XmlElement(name = "path")
private String[] path;
@XmlElement(name = "fieldName")
private String fieldName;
/**
* This constructor is for jax-rs json serialization/deserialization
*/
public DrillDownPathBean() {
}
public String[] getPath() {
return path;
}
public String getFieldName() {
return fieldName;
}
public void setPath(String[] path) {
this.path = path;
}
public void setFieldName(String fieldName) {
this.fieldName = fieldName;
}
}
| wso2/product-das | modules/integration/tests-common/integration-test-utils/src/main/java/org/wso2/das/analytics/rest/beans/DrillDownPathBean.java | Java | apache-2.0 | 2,162 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.assertions;
import com.facebook.presto.Session;
import com.facebook.presto.cost.PlanNodeCost;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import static com.google.common.base.Preconditions.checkState;
public class LimitMatcher
implements Matcher
{
private final long limit;
public LimitMatcher(long limit)
{
this.limit = limit;
}
@Override
public boolean shapeMatches(PlanNode node)
{
if (!(node instanceof LimitNode)) {
return false;
}
LimitNode limitNode = (LimitNode) node;
return limitNode.getCount() == limit;
}
@Override
public MatchResult detailMatches(PlanNode node, PlanNodeCost planNodeCost, Session session, Metadata metadata, SymbolAliases symbolAliases)
{
checkState(shapeMatches(node));
return MatchResult.match();
}
}
| gh351135612/presto | presto-main/src/test/java/com/facebook/presto/sql/planner/assertions/LimitMatcher.java | Java | apache-2.0 | 1,577 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.util;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* NOTE: If you add a new method here you must also add it to {@link io.undertow.server.protocol.http.HttpRequestParser}
*
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
*/
public final class Methods {
private Methods() {
}
public static final String OPTIONS_STRING = "OPTIONS";
public static final String GET_STRING = "GET";
public static final String HEAD_STRING = "HEAD";
public static final String POST_STRING = "POST";
public static final String PUT_STRING = "PUT";
public static final String DELETE_STRING = "DELETE";
public static final String TRACE_STRING = "TRACE";
public static final String CONNECT_STRING = "CONNECT";
public static final String PROPFIND_STRING = "PROPFIND";
public static final String PROPPATCH_STRING = "PROPPATCH";
public static final String MKCOL_STRING = "MKCOL";
public static final String COPY_STRING = "COPY";
public static final String MOVE_STRING = "MOVE";
public static final String LOCK_STRING = "LOCK";
public static final String UNLOCK_STRING = "UNLOCK";
public static final String ACL_STRING = "ACL";
public static final String REPORT_STRING = "REPORT";
public static final String VERSION_CONTROL_STRING = "VERSION-CONTROL";
public static final String CHECKIN_STRING = "CHECKIN";
public static final String CHECKOUT_STRING = "CHECKOUT";
public static final String UNCHECKOUT_STRING = "UNCHECKOUT";
public static final String SEARCH_STRING = "SEARCH";
public static final String MKWORKSPACE_STRING = "MKWORKSPACE";
public static final String UPDATE_STRING = "UPDATE";
public static final String LABEL_STRING = "LABEL";
public static final String MERGE_STRING = "MERGE";
public static final String BASELINE_CONTROL_STRING = "BASELINE_CONTROL";
public static final String MKACTIVITY_STRING = "MKACTIVITY";
public static final HttpString OPTIONS = new HttpString(OPTIONS_STRING);
public static final HttpString GET = new HttpString(GET_STRING);
public static final HttpString HEAD = new HttpString(HEAD_STRING);
public static final HttpString POST = new HttpString(POST_STRING);
public static final HttpString PUT = new HttpString(PUT_STRING);
public static final HttpString DELETE = new HttpString(DELETE_STRING);
public static final HttpString TRACE = new HttpString(TRACE_STRING);
public static final HttpString CONNECT = new HttpString(CONNECT_STRING);
public static final HttpString PROPFIND = new HttpString(PROPFIND_STRING);
public static final HttpString PROPPATCH = new HttpString(PROPPATCH_STRING);
public static final HttpString MKCOL = new HttpString(MKCOL_STRING);
public static final HttpString COPY = new HttpString(COPY_STRING);
public static final HttpString MOVE = new HttpString(MOVE_STRING);
public static final HttpString LOCK = new HttpString(LOCK_STRING);
public static final HttpString UNLOCK = new HttpString(UNLOCK_STRING);
public static final HttpString ACL = new HttpString(ACL_STRING);
public static final HttpString REPORT = new HttpString(REPORT_STRING);
public static final HttpString VERSION_CONTROL = new HttpString(VERSION_CONTROL_STRING);
public static final HttpString CHECKIN = new HttpString(CHECKIN_STRING);
public static final HttpString CHECKOUT = new HttpString(CHECKOUT_STRING);
public static final HttpString UNCHECKOUT = new HttpString(UNCHECKOUT_STRING);
public static final HttpString SEARCH = new HttpString(SEARCH_STRING);
public static final HttpString MKWORKSPACE = new HttpString(MKWORKSPACE_STRING);
public static final HttpString UPDATE = new HttpString(UPDATE_STRING);
public static final HttpString LABEL = new HttpString(LABEL_STRING);
public static final HttpString MERGE = new HttpString(MERGE_STRING);
public static final HttpString BASELINE_CONTROL = new HttpString(BASELINE_CONTROL_STRING);
public static final HttpString MKACTIVITY = new HttpString(MKACTIVITY_STRING);
private static final Map<String, HttpString> METHODS;
static {
Map<String, HttpString> methods = new HashMap<>();
putString(methods, OPTIONS);
putString(methods, GET);
putString(methods, HEAD);
putString(methods, POST);
putString(methods, PUT);
putString(methods, DELETE);
putString(methods, TRACE);
putString(methods, CONNECT);
putString(methods, PROPFIND);
putString(methods, PROPPATCH);
putString(methods, MKCOL);
putString(methods, COPY);
putString(methods, MOVE);
putString(methods, LOCK);
putString(methods, UNLOCK);
putString(methods, ACL);
putString(methods, REPORT);
putString(methods, VERSION_CONTROL);
putString(methods, CHECKIN);
putString(methods, CHECKOUT);
putString(methods, UNCHECKOUT);
putString(methods, SEARCH);
putString(methods, MKWORKSPACE);
putString(methods, UPDATE);
putString(methods, LABEL);
putString(methods, MERGE);
putString(methods, BASELINE_CONTROL);
putString(methods, MKACTIVITY);
METHODS = Collections.unmodifiableMap(methods);
}
private static void putString(Map<String, HttpString> methods, HttpString options) {
methods.put(options.toString(), options);
}
public static HttpString fromString(String method) {
HttpString res = METHODS.get(method);
if(res == null) {
return new HttpString(method);
}
return res;
}
}
| TomasHofman/undertow | core/src/main/java/io/undertow/util/Methods.java | Java | apache-2.0 | 6,437 |
package com.intellij.execution.configurations;
/**
* Configuration of such type can't be manually added or removed by the user; the template entry is hidden.
*/
public interface VirtualConfigurationType {
}
| siosio/intellij-community | platform/lang-api/src/com/intellij/execution/configurations/VirtualConfigurationType.java | Java | apache-2.0 | 210 |
@org.osgi.annotation.bundle.Export
@org.osgi.annotation.versioning.Version("2.0.0")
package bndtools.editor.model;
| psoreide/bnd | bndtools.core/src/bndtools/editor/model/package-info.java | Java | apache-2.0 | 115 |
/*
* Copyright 2003-2013 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.abstraction;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.fixes.IntroduceConstantFix;
import com.siyeh.ig.fixes.SuppressForTestsScopeFix;
import com.siyeh.ig.psiutils.ClassUtils;
import com.siyeh.ig.psiutils.ExpressionUtils;
import com.siyeh.ig.psiutils.MethodUtils;
import com.siyeh.ig.psiutils.TypeUtils;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
public class MagicNumberInspection extends BaseInspection {
@SuppressWarnings("PublicField")
public boolean ignoreInHashCode = true;
@SuppressWarnings({"PublicField", "UnusedDeclaration"})
public boolean ignoreInTestCode = false; // keep for compatibility
@SuppressWarnings("PublicField")
public boolean ignoreInAnnotations = true;
@SuppressWarnings("PublicField")
public boolean ignoreInitialCapacity = false;
@Override
protected InspectionGadgetsFix @NotNull [] buildFixes(Object... infos) {
final PsiElement context = (PsiElement)infos[0];
final InspectionGadgetsFix fix = SuppressForTestsScopeFix.build(this, context);
if (fix == null) {
return new InspectionGadgetsFix[] {new IntroduceConstantFix()};
}
return new InspectionGadgetsFix[] {new IntroduceConstantFix(), fix};
}
@Override
protected boolean buildQuickFixesOnlyForOnTheFlyErrors() {
return true;
}
@Override
@NotNull
public String buildErrorString(Object... infos) {
return InspectionGadgetsBundle.message("magic.number.problem.descriptor");
}
@Override
public JComponent createOptionsPanel() {
final MultipleCheckboxOptionsPanel panel = new MultipleCheckboxOptionsPanel(this);
panel.addCheckbox(InspectionGadgetsBundle.message("inspection.option.ignore.in.hashcode"), "ignoreInHashCode");
panel.addCheckbox(InspectionGadgetsBundle.message("inspection.option.ignore.in.annotations"), "ignoreInAnnotations");
panel.addCheckbox(InspectionGadgetsBundle.message("inspection.option.ignore.as.initial.capacity"), "ignoreInitialCapacity");
return panel;
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new MagicNumberVisitor();
}
private class MagicNumberVisitor extends BaseInspectionVisitor {
@Override
public void visitLiteralExpression(@NotNull PsiLiteralExpression expression) {
super.visitLiteralExpression(expression);
final PsiType type = expression.getType();
if (!ClassUtils.isPrimitiveNumericType(type) || PsiType.CHAR.equals(type)) {
return;
}
if (isSpecialCaseLiteral(expression) || isFinalVariableInitialization(expression)) {
return;
}
if (ignoreInHashCode) {
final PsiMethod containingMethod = PsiTreeUtil.getParentOfType(expression, PsiMethod.class, true,
PsiClass.class, PsiLambdaExpression.class);
if (MethodUtils.isHashCode(containingMethod)) {
return;
}
}
if (ignoreInAnnotations) {
final boolean insideAnnotation = AnnotationUtil.isInsideAnnotation(expression);
if (insideAnnotation) {
return;
}
}
if (ignoreInitialCapacity && isInitialCapacity(expression)) {
return;
}
final PsiField field = PsiTreeUtil.getParentOfType(expression, PsiField.class, true, PsiCallExpression.class);
if (field != null && PsiUtil.isCompileTimeConstant(field)) {
return;
}
final PsiElement parent = expression.getParent();
if (parent instanceof PsiPrefixExpression) {
registerError(parent, parent);
}
else {
registerError(expression, expression);
}
}
private boolean isInitialCapacity(PsiLiteralExpression expression) {
final PsiElement element =
PsiTreeUtil.skipParentsOfType(expression, PsiTypeCastExpression.class, PsiParenthesizedExpression.class);
if (!(element instanceof PsiExpressionList)) {
return false;
}
final PsiElement parent = element.getParent();
if (!(parent instanceof PsiNewExpression)) {
return false;
}
final PsiNewExpression newExpression = (PsiNewExpression)parent;
return TypeUtils.expressionHasTypeOrSubtype(newExpression,
CommonClassNames.JAVA_LANG_ABSTRACT_STRING_BUILDER,
CommonClassNames.JAVA_UTIL_MAP,
CommonClassNames.JAVA_UTIL_COLLECTION,
"java.io.ByteArrayOutputStream",
"java.awt.Dimension") != null;
}
private boolean isSpecialCaseLiteral(PsiLiteralExpression expression) {
final Object object = ExpressionUtils.computeConstantExpression(expression);
if (object instanceof Integer) {
final int i = ((Integer)object).intValue();
return i >= 0 && i <= 10 || i == 100 || i == 1000;
}
else if (object instanceof Long) {
final long l = ((Long)object).longValue();
return l >= 0L && l <= 2L;
}
else if (object instanceof Double) {
final double d = ((Double)object).doubleValue();
return d == 1.0 || d == 0.0;
}
else if (object instanceof Float) {
final float f = ((Float)object).floatValue();
return f == 1.0f || f == 0.0f;
}
return false;
}
public boolean isFinalVariableInitialization(PsiExpression expression) {
final PsiElement parent =
PsiTreeUtil.getParentOfType(expression, PsiVariable.class, PsiAssignmentExpression.class);
final PsiVariable variable;
if (!(parent instanceof PsiVariable)) {
if (!(parent instanceof PsiAssignmentExpression)) {
return false;
}
final PsiAssignmentExpression assignmentExpression = (PsiAssignmentExpression)parent;
final PsiExpression lhs = assignmentExpression.getLExpression();
if (!(lhs instanceof PsiReferenceExpression)) {
return false;
}
final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)lhs;
final PsiElement target = referenceExpression.resolve();
if (!(target instanceof PsiVariable)) {
return false;
}
variable = (PsiVariable)target;
}
else {
variable = (PsiVariable)parent;
}
return variable.hasModifierProperty(PsiModifier.FINAL);
}
}
}
| siosio/intellij-community | plugins/InspectionGadgets/src/com/siyeh/ig/abstraction/MagicNumberInspection.java | Java | apache-2.0 | 7,475 |
/*
* Copyright 2014 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.provider.pcep.tunnel.impl;
import org.onosproject.net.DeviceId;
import org.onosproject.pcep.api.PcepController;
import org.onosproject.pcep.api.PcepDpid;
import org.onosproject.pcep.api.PcepLinkListener;
import org.onosproject.pcep.api.PcepSwitch;
import org.onosproject.pcep.api.PcepSwitchListener;
import org.onosproject.pcep.api.PcepTunnel;
import org.onosproject.pcep.api.PcepTunnelListener;
public class PcepControllerAdapter implements PcepController {
@Override
public Iterable<PcepSwitch> getSwitches() {
return null;
}
@Override
public PcepSwitch getSwitch(PcepDpid did) {
return null;
}
@Override
public void addListener(PcepSwitchListener listener) {
}
@Override
public void removeListener(PcepSwitchListener listener) {
}
@Override
public void addLinkListener(PcepLinkListener listener) {
}
@Override
public void removeLinkListener(PcepLinkListener listener) {
}
@Override
public void addTunnelListener(PcepTunnelListener listener) {
}
@Override
public void removeTunnelListener(PcepTunnelListener listener) {
}
@Override
public PcepTunnel applyTunnel(DeviceId srcDid, DeviceId dstDid, long srcPort, long dstPort, long bandwidth,
String name) {
return null;
}
@Override
public Boolean deleteTunnel(String id) {
return null;
}
@Override
public Boolean updateTunnelBandwidth(String id, long bandwidth) {
return null;
}
@Override
public void getTunnelStatistics(String pcepTunnelId) {
}
}
| packet-tracker/onos | providers/pcep/tunnel/src/test/java/org/onosproject/provider/pcep/tunnel/impl/PcepControllerAdapter.java | Java | apache-2.0 | 2,267 |
// "Replace 'collect(toUnmodifiableList())' with 'toList()'" "true"
import java.util.List;
import java.util.stream.*;
class X {
void test(Stream<String> stream) {
List<String> list = stream.collect<caret>(Collectors.toUnmodifiableList());
}
} | smmribeiro/intellij-community | java/java-tests/testData/inspection/streamApiCallChains/beforeUnmodifiableListJava16.java | Java | apache-2.0 | 251 |
/*
* Copyright 2005-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.ldap.core.support;
import org.springframework.ldap.core.ContextMapper;
import javax.naming.NamingException;
import javax.naming.ldap.HasControls;
/**
* Extension of the {@link org.springframework.ldap.core.ContextMapper} interface that allows
* controls to be passed to the mapper implementation. Uses Java 5 covariant
* return types to override the return type of the
* {@link #mapFromContextWithControls(Object, javax.naming.ldap.HasControls)} method to be the
* type parameter T.
*
* @author Tim Terry
* @author Ulrik Sandberg
* @param <T> return type of the
* {@link #mapFromContextWithControls(Object, javax.naming.ldap.HasControls)} method
*/
public interface ContextMapperWithControls<T> extends ContextMapper<T> {
T mapFromContextWithControls(final Object ctx, final HasControls hasControls) throws NamingException;
}
| likaiwalkman/spring-ldap | core/src/main/java/org/springframework/ldap/core/support/ContextMapperWithControls.java | Java | apache-2.0 | 1,490 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.nodetype;
import java.util.List;
import java.util.Set;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.addAll;
import static com.google.common.collect.Iterables.concat;
import static com.google.common.collect.Iterables.contains;
import static com.google.common.collect.Lists.newArrayListWithCapacity;
import static com.google.common.collect.Sets.newHashSet;
import static org.apache.jackrabbit.JcrConstants.JCR_DEFAULTPRIMARYTYPE;
import static org.apache.jackrabbit.JcrConstants.JCR_MANDATORY;
import static org.apache.jackrabbit.JcrConstants.JCR_MIXINTYPES;
import static org.apache.jackrabbit.JcrConstants.JCR_NODETYPENAME;
import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
import static org.apache.jackrabbit.JcrConstants.JCR_SAMENAMESIBLINGS;
import static org.apache.jackrabbit.JcrConstants.JCR_UUID;
import static org.apache.jackrabbit.oak.api.Type.UNDEFINED;
import static org.apache.jackrabbit.oak.api.Type.UNDEFINEDS;
import static org.apache.jackrabbit.oak.commons.PathUtils.dropIndexFromName;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_MANDATORY_CHILD_NODES;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_MANDATORY_PROPERTIES;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_NAMED_CHILD_NODE_DEFINITIONS;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_NAMED_PROPERTY_DEFINITIONS;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_RESIDUAL_CHILD_NODE_DEFINITIONS;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_RESIDUAL_PROPERTY_DEFINITIONS;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_SUPERTYPES;
class EffectiveType {
private final List<NodeState> types;
EffectiveType(@Nonnull List<NodeState> types) {
this.types = checkNotNull(types);
}
/**
* Checks whether this effective type contains the named type.
*
* @param name node type name
* @return {@code true} if the named type is included,
* {@code false} otherwise
*/
boolean isNodeType(@Nonnull String name) {
for (NodeState type : types) {
if (name.equals(type.getName(JCR_NODETYPENAME))
|| contains(type.getNames(REP_SUPERTYPES), name)) {
return true;
}
}
return false;
}
boolean isMandatoryProperty(@Nonnull String name) {
return nameSetContains(REP_MANDATORY_PROPERTIES, name);
}
@Nonnull
Set<String> getMandatoryProperties() {
return getNameSet(REP_MANDATORY_PROPERTIES);
}
boolean isMandatoryChildNode(@Nonnull String name) {
return nameSetContains(REP_MANDATORY_CHILD_NODES, name);
}
@Nonnull
Set<String> getMandatoryChildNodes() {
return getNameSet(REP_MANDATORY_CHILD_NODES);
}
/**
* Finds a matching definition for a property with the given name and type.
*
* @param property modified property
* @return matching property definition, or {@code null}
*/
@CheckForNull
NodeState getDefinition(@Nonnull PropertyState property) {
String propertyName = property.getName();
Type<?> propertyType = property.getType();
String escapedName;
if (JCR_PRIMARYTYPE.equals(propertyName)) {
escapedName = NodeTypeConstants.REP_PRIMARY_TYPE;
} else if (JCR_MIXINTYPES.equals(propertyName)) {
escapedName = NodeTypeConstants.REP_MIXIN_TYPES;
} else if (JCR_UUID.equals(propertyName)) {
escapedName = NodeTypeConstants.REP_UUID;
} else {
escapedName = propertyName;
}
String definedType = propertyType.toString();
String undefinedType;
if (propertyType.isArray()) {
undefinedType = UNDEFINEDS.toString();
} else {
undefinedType = UNDEFINED.toString();
}
// Find matching named property definition
for (NodeState type : types) {
NodeState definitions = type
.getChildNode(REP_NAMED_PROPERTY_DEFINITIONS)
.getChildNode(escapedName);
NodeState definition = definitions.getChildNode(definedType);
if (definition.exists()) {
return definition;
}
definition = definitions.getChildNode(undefinedType);
if (definition.exists()) {
return definition;
}
// OAK-822: a mandatory definition always overrides residual ones
// TODO: unnecessary if the OAK-713 fallback wasn't needed below
for (ChildNodeEntry entry : definitions.getChildNodeEntries()) {
definition = entry.getNodeState();
if (definition.getBoolean(JCR_MANDATORY)) {
return definition;
}
}
// TODO: Fall back to residual definitions until we have consensus on OAK-713
// throw new ConstraintViolationException(
// "No matching definition found for property " + propertyName);
}
// Find matching residual property definition
for (NodeState type : types) {
NodeState residual =
type.getChildNode(REP_RESIDUAL_PROPERTY_DEFINITIONS);
NodeState definition = residual.getChildNode(definedType);
if (!definition.exists()) {
definition = residual.getChildNode(undefinedType);
}
if (definition.exists()) {
return definition;
}
}
return null;
}
/**
* Finds a matching definition for a child node with the given name and
* types.
*
* @param nameWithIndex child node name, possibly with an SNS index
* @param effective effective types of the child node
* @return {@code true} if there's a matching child node definition,
* {@code false} otherwise
*/
boolean isValidChildNode(@Nonnull String nameWithIndex, @Nonnull EffectiveType effective) {
String name = dropIndexFromName(nameWithIndex);
boolean sns = !name.equals(nameWithIndex);
Set<String> typeNames = effective.getTypeNames();
// Find matching named child node definition
for (NodeState type : types) {
NodeState definitions = type
.getChildNode(REP_NAMED_CHILD_NODE_DEFINITIONS)
.getChildNode(name);
for (String typeName : typeNames) {
NodeState definition = definitions.getChildNode(typeName);
if (definition.exists() && snsMatch(sns, definition)) {
return true;
}
}
// OAK-822: a mandatory definition always overrides alternatives
// TODO: unnecessary if the OAK-713 fallback wasn't needed below
for (ChildNodeEntry entry : definitions.getChildNodeEntries()) {
NodeState definition = entry.getNodeState();
if (definition.getBoolean(JCR_MANDATORY)) {
return false;
}
}
// TODO: Fall back to residual definitions until we have consensus on OAK-713
// throw new ConstraintViolationException(
// "Incorrect node type of child node " + nodeName);
}
// Find matching residual child node definition
for (NodeState type : types) {
NodeState residual =
type.getChildNode(REP_RESIDUAL_CHILD_NODE_DEFINITIONS);
for (String typeName : typeNames) {
NodeState definition = residual.getChildNode(typeName);
if (definition.exists() && snsMatch(sns, definition)) {
return true;
}
}
}
return false;
}
/**
* Finds the default node type for a child node with the given name.
*
* @param nameWithIndex child node name, possibly with an SNS index
* @return default type, or {@code null} if not found
*/
@CheckForNull
String getDefaultType(@Nonnull String nameWithIndex) {
String name = dropIndexFromName(nameWithIndex);
boolean sns = !name.equals(nameWithIndex);
for (NodeState type : types) {
NodeState named = type
.getChildNode(REP_NAMED_CHILD_NODE_DEFINITIONS)
.getChildNode(name);
NodeState residual = type
.getChildNode(REP_RESIDUAL_CHILD_NODE_DEFINITIONS);
for (ChildNodeEntry entry : concat(
named.getChildNodeEntries(),
residual.getChildNodeEntries())) {
NodeState definition = entry.getNodeState();
String defaultType = definition.getName(JCR_DEFAULTPRIMARYTYPE);
if (defaultType != null && snsMatch(sns, definition)) {
return defaultType;
}
}
}
return null;
}
@Nonnull
Set<String> getTypeNames() {
Set<String> names = newHashSet();
for (NodeState type : types) {
names.add(type.getName(JCR_NODETYPENAME));
addAll(names, type.getNames(REP_SUPERTYPES));
}
return names;
}
//------------------------------------------------------------< Object >--
@Override
public String toString() {
List<String> names = newArrayListWithCapacity(types.size());
for (NodeState type : types) {
names.add(type.getName(JCR_NODETYPENAME));
}
return names.toString();
}
//-----------------------------------------------------------< private >--
/**
* Depending on the given SNS flag, checks whether the given child node
* definition allows same-name-siblings.
*
* @param sns SNS flag, {@code true} if processing an SNS node
* @param definition child node definition
*/
private boolean snsMatch(boolean sns, @Nonnull NodeState definition) {
return !sns || definition.getBoolean(JCR_SAMENAMESIBLINGS);
}
private boolean nameSetContains(@Nonnull String set, @Nonnull String name) {
for (NodeState type : types) {
if (contains(type.getNames(set), name)) {
return true;
}
}
return false;
}
@Nonnull
private Set<String> getNameSet(@Nonnull String set) {
Set<String> names = newHashSet();
for (NodeState type : types) {
addAll(names, type.getNames(set));
}
return names;
}
}
| AndreasAbdi/jackrabbit-oak | oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/nodetype/EffectiveType.java | Java | apache-2.0 | 11,952 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.plugins.groovy.lang.psi.impl.synthetic;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.psi.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.GrModifierList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.util.GrTraitUtil;
import org.jetbrains.plugins.groovy.transformations.TransformationContext;
public class GrTraitField extends GrLightField implements PsiMirrorElement {
private static final Logger LOG = Logger.getInstance(GrTraitField.class);
private final PsiField myField;
public GrTraitField(@NotNull GrField field, GrTypeDefinition clazz, PsiSubstitutor substitutor, @Nullable TransformationContext context) {
super(clazz, getNewNameForField(field), substitutor.substitute(field.getType()), field);
GrLightModifierList modifierList = getModifierList();
for (String modifier : PsiModifier.MODIFIERS) {
boolean hasModifierProperty;
GrModifierList fieldModifierList = field.getModifierList();
if (context == null || fieldModifierList == null) {
hasModifierProperty = field.hasModifierProperty(modifier);
} else {
hasModifierProperty = context.hasModifierProperty(fieldModifierList, modifier);
}
if (hasModifierProperty) {
modifierList.addModifier(modifier);
}
}
modifierList.copyAnnotations(field.getModifierList());
myField = field;
}
@NotNull
private static String getNewNameForField(@NotNull PsiField field) {
PsiClass containingClass = field.getContainingClass();
LOG.assertTrue(containingClass != null);
return GrTraitUtil.getTraitFieldPrefix(containingClass) + field.getName();
}
@NotNull
@Override
public PsiField getPrototype() {
return myField;
}
}
| siosio/intellij-community | plugins/groovy/groovy-psi/src/org/jetbrains/plugins/groovy/lang/psi/impl/synthetic/GrTraitField.java | Java | apache-2.0 | 2,141 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search.type;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.action.SearchServiceListener;
import org.elasticsearch.search.action.SearchServiceTransportAction;
import org.elasticsearch.search.controller.SearchPhaseController;
import org.elasticsearch.search.fetch.FetchSearchResultProvider;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.QuerySearchResultProvider;
import org.elasticsearch.threadpool.ThreadPool;
import static org.elasticsearch.action.search.type.TransportSearchHelper.buildScrollId;
/**
*
*/
public class TransportSearchCountAction extends TransportSearchTypeAction {
@Inject
public TransportSearchCountAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController, ActionFilters actionFilters) {
super(settings, threadPool, clusterService, searchService, searchPhaseController, actionFilters);
}
@Override
protected void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
new AsyncAction(searchRequest, listener).start();
}
private class AsyncAction extends BaseAsyncAction<QuerySearchResultProvider> {
private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
super(request, listener);
}
@Override
protected String firstPhaseName() {
return "query";
}
@Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<QuerySearchResultProvider> listener) {
searchService.sendExecuteQuery(node, request, listener);
}
@Override
protected void moveToSecondPhase() throws Exception {
// no need to sort, since we know we have no hits back
final InternalSearchResponse internalResponse = searchPhaseController.merge(SearchPhaseController.EMPTY_DOCS, firstResults, (AtomicArray<? extends FetchSearchResultProvider>) AtomicArray.empty());
String scrollId = null;
if (request.scroll() != null) {
scrollId = buildScrollId(request.searchType(), firstResults, null);
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successfulOps.get(), buildTookInMillis(), buildShardFailures()));
}
}
}
| dmiszkiewicz/elasticsearch | src/main/java/org/elasticsearch/action/search/type/TransportSearchCountAction.java | Java | apache-2.0 | 3,920 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.stratos.common.beans.topology;
import org.apache.stratos.common.beans.PropertyBean;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.List;
@XmlRootElement(name = "clusters")
public class ClusterBean {
private String alias;
private String serviceName;
private String clusterId;
private List<MemberBean> member;
private String tenantRange;
private List<String> hostNames;
private boolean isLbCluster;
private List<PropertyBean> property;
private List<InstanceBean> instances;
public List<InstanceBean> getInstances() {
return instances;
}
public void setInstances(List<InstanceBean> instances) {
this.instances = instances;
}
@Override
public String toString() {
return "Cluster [serviceName=" + getServiceName() + ", clusterId=" + getClusterId() + ", member=" + getMember()
+ ", tenantRange=" + getTenantRange() + ", hostNames=" + getHostNames() + ", isLbCluster=" + isLbCluster()
+ ", property=" + getProperty() + "]";
}
public String getAlias() {
return alias;
}
public void setAlias(String alias) {
this.alias = alias;
}
public String getServiceName() {
return serviceName;
}
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
public String getClusterId() {
return clusterId;
}
public void setClusterId(String clusterId) {
this.clusterId = clusterId;
}
public List<MemberBean> getMember() {
return member;
}
public void setMember(List<MemberBean> member) {
this.member = member;
}
public String getTenantRange() {
return tenantRange;
}
public void setTenantRange(String tenantRange) {
this.tenantRange = tenantRange;
}
public List<String> getHostNames() {
return hostNames;
}
public void setHostNames(List<String> hostNames) {
this.hostNames = hostNames;
}
public boolean isLbCluster() {
return isLbCluster;
}
public void setLbCluster(boolean isLbCluster) {
this.isLbCluster = isLbCluster;
}
public List<PropertyBean> getProperty() {
return property;
}
public void setProperty(List<PropertyBean> property) {
this.property = property;
}
}
| pkdevbox/stratos | components/org.apache.stratos.common/src/main/java/org/apache/stratos/common/beans/topology/ClusterBean.java | Java | apache-2.0 | 3,225 |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.screens.datasource.management.client.dbexplorer.schemas;
import com.google.gwt.view.client.AsyncDataProvider;
import org.uberfire.client.mvp.UberElement;
import org.uberfire.ext.widgets.common.client.common.HasBusyIndicator;
public interface DatabaseSchemaExplorerView
extends UberElement< DatabaseSchemaExplorerView.Presenter >, HasBusyIndicator {
interface Presenter {
void onOpen( DatabaseSchemaRow row );
}
interface Handler {
void onOpen( String schemaName );
}
void setDataProvider( AsyncDataProvider< DatabaseSchemaRow > dataProvider );
void redraw( );
} | romartin/kie-wb-common | kie-wb-common-screens/kie-wb-common-datasource-mgmt/kie-wb-common-datasource-mgmt-client/src/main/java/org/kie/workbench/common/screens/datasource/management/client/dbexplorer/schemas/DatabaseSchemaExplorerView.java | Java | apache-2.0 | 1,272 |
/*
*------------------------------------------------------------------------------
* Copyright (C) 2006-2010 University of Dundee. All rights reserved.
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
*------------------------------------------------------------------------------
*/
package org.openmicroscopy.shoola.env.data.model;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import omero.IllegalArgumentException;
import org.openmicroscopy.shoola.env.data.login.UserCredentials;
import omero.gateway.model.ExperimenterData;
import omero.gateway.model.GroupData;
/**
* Holds information about the group, users to handle.
*
* @author Jean-Marie Burel
* <a href="mailto:j.burel@dundee.ac.uk">j.burel@dundee.ac.uk</a>
* @author Donald MacDonald
* <a href="mailto:donald@lifesci.dundee.ac.uk">donald@lifesci.dundee.ac.uk</a>
* @version 3.0
* @since 3.0-Beta4
*/
public class AdminObject
{
/** Indicates to create a group. */
public static final int CREATE_GROUP = 0;
/** Indicates to create a group. */
public static final int CREATE_EXPERIMENTER = 1;
/** Indicates to update a group. */
public static final int UPDATE_GROUP = 2;
/** Indicates to update experimenter. */
public static final int UPDATE_EXPERIMENTER = 3;
/** Indicates to reset the password. */
public static final int RESET_PASSWORD = 4;
/** Indicates to add experimenters to group. */
public static final int ADD_EXPERIMENTER_TO_GROUP = 5;
/** Indicates to reset the password. */
public static final int ACTIVATE_USER = 6;
/**
* Validates the index.
*
* @param index The value to control.
*/
private void checkIndex(int index)
{
switch (index) {
case CREATE_EXPERIMENTER:
case CREATE_GROUP:
case UPDATE_GROUP:
case UPDATE_EXPERIMENTER:
case RESET_PASSWORD:
case ADD_EXPERIMENTER_TO_GROUP:
case ACTIVATE_USER:
return;
default:
throw new IllegalArgumentException("Index not supported");
}
}
/**
* Can be the group to create or the group to add the experimenters to
* depending on the index.
*/
private GroupData group;
/** The collection of groups to create. */
private List<GroupData> groups;
/**
* Can be the owners of the group or the experimenters to create
* depending on the index.
*/
private Map<ExperimenterData, UserCredentials> experimenters;
/** One of the constants defined by this class. */
private int index;
/** Indicates the permissions associated to the group. */
private int permissions = -1;
/**
* Creates a new instance.
*
* @param group The group to handle.
* @param experimenters The experimenters to handle.
* @param index One of the constants defined by this class.
*/
public AdminObject(GroupData group, Map<ExperimenterData, UserCredentials>
experimenters, int index)
{
checkIndex(index);
this.group = group;
this.experimenters = experimenters;
this.index = index;
this.permissions = -1;
}
/**
* Creates a new instance.
*
* @param group The group to handle.
* @param values The experimenters to handle.
*/
public AdminObject(GroupData group, Collection<ExperimenterData> values)
{
if (values != null) {
Iterator<ExperimenterData> i = values.iterator();
experimenters = new HashMap<ExperimenterData, UserCredentials>();
while (i.hasNext()) {
experimenters.put(i.next(), null);
}
}
this.group = group;
this.index = ADD_EXPERIMENTER_TO_GROUP;
this.permissions = -1;
}
/**
* Creates a new instance.
*
* @param experimenters The experimenters to handle.
* @param index One of the constants defined by this class.
*/
public AdminObject(Map<ExperimenterData, UserCredentials> experimenters,
int index)
{
this(null, experimenters, index);
}
/**
* Sets the permissions associated to the group.
*
* @param permissions The value to set. One of the constants defined
* by this class.
*/
public void setPermissions(int permissions)
{
switch (permissions) {
case GroupData.PERMISSIONS_PRIVATE:
case GroupData.PERMISSIONS_GROUP_READ:
case GroupData.PERMISSIONS_GROUP_READ_LINK:
case GroupData.PERMISSIONS_GROUP_READ_WRITE:
case GroupData.PERMISSIONS_PUBLIC_READ:
case GroupData.PERMISSIONS_PUBLIC_READ_WRITE:
this.permissions = permissions;
break;
default:
this.permissions = GroupData.PERMISSIONS_PRIVATE;
}
}
/**
* Returns the permissions associated to the group.
*
* @return See above.
*/
public int getPermissions() { return permissions; }
/**
* Returns the experimenters to create.
*
* @return See above
*/
public Map<ExperimenterData, UserCredentials> getExperimenters()
{
return experimenters;
}
/**
* Returns the group to create or to add the experimenters to.
*
* @return See above.
*/
public GroupData getGroup() { return group; }
/**
* Sets the groups.
*
* @param groups The value to handle.
*/
public void setGroups(List<GroupData> groups) { this.groups = groups; }
/**
* Returns the groups to add the new users to.
*
* @return See above.
*/
public List<GroupData> getGroups() { return groups; }
/**
* Returns one of the constants defined by this class.
*
* @return See above.
*/
public int getIndex() { return index; }
}
| knabar/openmicroscopy | components/insight/SRC/org/openmicroscopy/shoola/env/data/model/AdminObject.java | Java | gpl-2.0 | 6,106 |
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package p2;
public class c2 {
int i;
public void method2() { i = 5; System.out.println("c2 method2 called"); }
}
| JetBrains/jdk8u_hotspot | test/runtime/ClassUnload/p2/c2.java | Java | gpl-2.0 | 1,173 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util.json;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class JsonSimpleOrderedTest {
@Test
public void testOrdered() throws Exception {
InputStream is = new FileInputStream("src/test/resources/bean.json");
String json = loadText(is);
JsonObject output = Jsoner.deserialize(json, new JsonObject());
assertNotNull(output);
// should preserve order
Map<?, ?> map = output.getMap("component");
assertTrue(map instanceof LinkedHashMap);
Iterator<?> it = map.keySet().iterator();
assertEquals("kind", it.next());
assertEquals("scheme", it.next());
assertEquals("syntax", it.next());
assertEquals("title", it.next());
assertEquals("description", it.next());
assertEquals("label", it.next());
assertEquals("deprecated", it.next());
assertEquals("deprecationNote", it.next());
assertEquals("async", it.next());
assertEquals("consumerOnly", it.next());
assertEquals("producerOnly", it.next());
assertEquals("lenientProperties", it.next());
assertEquals("javaType", it.next());
assertEquals("firstVersion", it.next());
assertEquals("groupId", it.next());
assertEquals("artifactId", it.next());
assertEquals("version", it.next());
assertFalse(it.hasNext());
}
public static String loadText(InputStream in) throws IOException {
StringBuilder builder = new StringBuilder();
InputStreamReader isr = new InputStreamReader(in);
try {
BufferedReader reader = new BufferedReader(isr);
while (true) {
String line = reader.readLine();
if (line == null) {
line = builder.toString();
return line;
}
builder.append(line);
builder.append("\n");
}
} finally {
isr.close();
in.close();
}
}
}
| nikhilvibhav/camel | tooling/camel-util-json/src/test/java/org/apache/camel/util/json/JsonSimpleOrderedTest.java | Java | apache-2.0 | 3,327 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package stubgenerator.traitStaticPropertiesStub;
public class JavaXImpl extends GroovyXImpl {
public static void main(String[] args) {
new JavaXImpl();
}
}
| jwagenleitner/incubator-groovy | src/test-resources/stubgenerator/traitStaticPropertiesStub/JavaXImpl.java | Java | apache-2.0 | 995 |
package org.apache.maven.artifact.manager;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.apache.maven.wagon.TransferFailedException;
import org.apache.maven.wagon.authentication.AuthenticationInfo;
import org.apache.maven.wagon.proxy.ProxyInfo;
/**
* Manages <a href="https://maven.apache.org/wagon">Wagon</a> related operations in Maven.
*
* @author <a href="michal.maczka@dimatics.com">Michal Maczka </a>
*/
@Deprecated
public interface WagonManager
extends org.apache.maven.repository.legacy.WagonManager
{
/**
* this method is only here for backward compat (project-info-reports:dependencies)
* the default implementation will return an empty AuthenticationInfo
*/
AuthenticationInfo getAuthenticationInfo( String id );
ProxyInfo getProxy( String protocol );
void getArtifact( Artifact artifact, ArtifactRepository repository )
throws TransferFailedException, ResourceDoesNotExistException;
void getArtifact( Artifact artifact, List<ArtifactRepository> remoteRepositories )
throws TransferFailedException, ResourceDoesNotExistException;
ArtifactRepository getMirrorRepository( ArtifactRepository repository );
}
| lbndev/maven | maven-compat/src/main/java/org/apache/maven/artifact/manager/WagonManager.java | Java | apache-2.0 | 2,151 |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), hosted at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* Accurate Software Design, LLC.
* Portions created by the Initial Developer are Copyright (C) 2006-2008
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* See listed authors below.
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chee.archive.entity;
import java.io.Serializable;
import java.util.Date;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import org.dcm4che2.data.DicomObject;
import org.dcm4che2.data.Tag;
import org.dcm4che2.data.UID;
import org.dcm4chee.archive.common.PPSStatus;
import org.dcm4chee.archive.conf.AttributeFilter;
import org.dcm4chee.archive.util.DicomObjectUtils;
/**
* @author Damien Evans <damien.daddy@gmail.com>
* @author Justin Falk <jfalkmu@gmail.com>
* @author Gunter Zeilinger <gunterze@gmail.com>
* @version $Revision$ $Date$
* @since Feb 29, 2008
*/
@Entity
@Table(name = "mpps")
public class MPPS extends BaseEntity implements Serializable {
private static final long serialVersionUID = -599495313070741738L;
@Column(name = "created_time")
private Date createdTime;
@Column(name = "updated_time")
private Date updatedTime;
@Column(name = "mpps_iuid", unique = true, nullable = false)
private String sopInstanceUID;
@Column(name = "pps_start")
private Date startDateTime;
@Column(name = "station_aet")
private String performedStationAET;
@Column(name = "modality")
private String modality;
@Column(name = "accession_no")
private String accessionNumber;
@Column(name = "mpps_status", nullable = false)
private PPSStatus status;
// JPA definition in orm.xml
private byte[] encodedAttributes;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "drcode_fk")
private Code discontinuationReasonCode;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "patient_fk")
private Patient patient;
@OneToMany(mappedBy = "modalityPerformedProcedureStep", fetch = FetchType.LAZY)
private Set<Series> series;
public Date getCreatedTime() {
return createdTime;
}
public Date getUpdatedTime() {
return updatedTime;
}
public String getSopInstanceUID() {
return sopInstanceUID;
}
public Date getStartDateTime() {
return startDateTime;
}
public String getPerformedStationAET() {
return performedStationAET;
}
public String getModality() {
return modality;
}
public String getAccessionNumber() {
return accessionNumber;
}
public PPSStatus getStatus() {
return status;
}
public byte[] getEncodedAttributes() {
return encodedAttributes;
}
public Code getDiscontinuationReasonCode() {
return discontinuationReasonCode;
}
public void setDiscontinuationReasonCode(Code discontinuationReasonCode) {
this.discontinuationReasonCode = discontinuationReasonCode;
}
public Patient getPatient() {
return patient;
}
public void setPatient(Patient patient) {
this.patient = patient;
}
public Set<Series> getSeries() {
return series;
}
public void setSeries(Set<Series> series) {
this.series = series;
}
@Override
public String toString() {
return "MPPS[pk=" + pk
+ ", iuid=" + sopInstanceUID
+ ", status=" + status
+ ", accno=" + accessionNumber
+ ", start=" + startDateTime
+ ", mod=" + modality
+ ", aet=" + performedStationAET
+ "]";
}
public void onPrePersist() {
createdTime = new Date();
}
public void onPreUpdate() {
updatedTime = new Date();
}
public DicomObject getAttributes() {
return DicomObjectUtils.decode(encodedAttributes);
}
public void setAttributes(DicomObject attrs) {
this.sopInstanceUID = attrs.getString(Tag.SOPInstanceUID);
this.startDateTime = attrs.getDate(
Tag.PerformedProcedureStepStartDate,
Tag.PerformedProcedureStepStartTime);
this.performedStationAET = attrs.getString(Tag.PerformedStationAETitle);
this.modality = attrs.getString(Tag.Modality);
this.accessionNumber = attrs.getString(new int[] {
Tag.ScheduledStepAttributesSequence, 0, Tag.AccessionNumber });
if (this.accessionNumber == null)
this.accessionNumber = attrs.getString(Tag.AccessionNumber);
this.status = PPSStatus.valueOf(attrs.getString(
Tag.PerformedProcedureStepStatus).replace(' ', '_'));
this.encodedAttributes = DicomObjectUtils.encode(AttributeFilter.getExcludePatientAttributeFilter().filter(attrs),
UID.DeflatedExplicitVRLittleEndian);
}
}
| medicayun/medicayundicom | dcm4chee-arc3-entities/trunk/src/main/java/org/dcm4chee/archive/entity/MPPS.java | Java | apache-2.0 | 6,621 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Implementations of serializer and derserializer interfaces.
*/
package org.apache.reef.wake.avro.impl;
| markusweimer/incubator-reef | lang/java/reef-wake/wake/src/main/java/org/apache/reef/wake/avro/impl/package-info.java | Java | apache-2.0 | 920 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import com.google.common.collect.Iterators;
import com.google.common.util.concurrent.Uninterruptibles;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.EnumSet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.apache.hadoop.fs.StorageType.RAM_DISK;
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class TestLazyPersistFiles extends LazyPersistTestCase {
private static final int THREADPOOL_SIZE = 10;
/**
* Append to lazy persist file is denied.
* @throws IOException
*/
@Test
public void testAppendIsDenied() throws IOException {
getClusterBuilder().build();
final String METHOD_NAME = GenericTestUtils.getMethodName();
Path path = new Path("/" + METHOD_NAME + ".dat");
makeTestFile(path, BLOCK_SIZE, true);
try {
client.append(path.toString(), BUFFER_LENGTH,
EnumSet.of(CreateFlag.APPEND), null, null).close();
fail("Append to LazyPersist file did not fail as expected");
} catch (Throwable t) {
LOG.info("Got expected exception ", t);
}
}
/**
* Truncate to lazy persist file is denied.
* @throws IOException
*/
@Test
public void testTruncateIsDenied() throws IOException {
getClusterBuilder().build();
final String METHOD_NAME = GenericTestUtils.getMethodName();
Path path = new Path("/" + METHOD_NAME + ".dat");
makeTestFile(path, BLOCK_SIZE, true);
try {
client.truncate(path.toString(), BLOCK_SIZE/2);
fail("Truncate to LazyPersist file did not fail as expected");
} catch (Throwable t) {
LOG.info("Got expected exception ", t);
}
}
/**
* If one or more replicas of a lazyPersist file are lost, then the file
* must be discarded by the NN, instead of being kept around as a
* 'corrupt' file.
*/
@Test
public void testCorruptFilesAreDiscarded()
throws IOException, InterruptedException, TimeoutException {
getClusterBuilder().setRamDiskReplicaCapacity(2).build();
final String METHOD_NAME = GenericTestUtils.getMethodName();
Path path1 = new Path("/" + METHOD_NAME + ".01.dat");
makeTestFile(path1, BLOCK_SIZE, true);
ensureFileReplicasOnStorageType(path1, RAM_DISK);
// Stop the DataNode and sleep for the time it takes the NN to
// detect the DN as being dead.
cluster.shutdownDataNodes();
Thread.sleep(30000L);
assertThat(cluster.getNamesystem().getNumDeadDataNodes(), is(1));
// Next, wait for the redundancy monitor to mark the file as corrupt.
Thread.sleep(2 * DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_DEFAULT * 1000);
// Wait for the LazyPersistFileScrubber to run
Thread.sleep(2 * LAZY_WRITE_FILE_SCRUBBER_INTERVAL_SEC * 1000);
// Ensure that path1 does not exist anymore, whereas path2 does.
assert(!fs.exists(path1));
// We should have zero blocks that needs replication i.e. the one
// belonging to path2.
assertThat(cluster.getNameNode()
.getNamesystem()
.getBlockManager()
.getLowRedundancyBlocksCount(),
is(0L));
}
@Test
public void testDisableLazyPersistFileScrubber()
throws IOException, InterruptedException, TimeoutException {
getClusterBuilder().setRamDiskReplicaCapacity(2).disableScrubber().build();
final String METHOD_NAME = GenericTestUtils.getMethodName();
Path path1 = new Path("/" + METHOD_NAME + ".01.dat");
makeTestFile(path1, BLOCK_SIZE, true);
ensureFileReplicasOnStorageType(path1, RAM_DISK);
// Stop the DataNode and sleep for the time it takes the NN to
// detect the DN as being dead.
cluster.shutdownDataNodes();
Thread.sleep(30000L);
// Next, wait for the redundancy monitor to mark the file as corrupt.
Thread.sleep(2 * DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_DEFAULT * 1000);
// Wait for the LazyPersistFileScrubber to run
Thread.sleep(2 * LAZY_WRITE_FILE_SCRUBBER_INTERVAL_SEC * 1000);
// Ensure that path1 exist.
Assert.assertTrue(fs.exists(path1));
}
/**
* If NN restarted then lazyPersist files should not deleted
*/
@Test
public void testFileShouldNotDiscardedIfNNRestarted()
throws IOException, InterruptedException, TimeoutException {
getClusterBuilder().setRamDiskReplicaCapacity(2).build();
final String METHOD_NAME = GenericTestUtils.getMethodName();
Path path1 = new Path("/" + METHOD_NAME + ".01.dat");
makeTestFile(path1, BLOCK_SIZE, true);
ensureFileReplicasOnStorageType(path1, RAM_DISK);
cluster.shutdownDataNodes();
cluster.restartNameNodes();
// wait for the redundancy monitor to mark the file as corrupt.
Thread.sleep(2 * DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_DEFAULT * 1000);
Long corruptBlkCount = (long) Iterators.size(cluster.getNameNode()
.getNamesystem().getBlockManager().getCorruptReplicaBlockIterator());
// Check block detected as corrupted
assertThat(corruptBlkCount, is(1L));
// Ensure path1 exist.
Assert.assertTrue(fs.exists(path1));
}
/**
* Concurrent read from the same node and verify the contents.
*/
@Test
public void testConcurrentRead()
throws Exception {
getClusterBuilder().setRamDiskReplicaCapacity(2).build();
final String METHOD_NAME = GenericTestUtils.getMethodName();
final Path path1 = new Path("/" + METHOD_NAME + ".dat");
final int SEED = 0xFADED;
final int NUM_TASKS = 5;
makeRandomTestFile(path1, BLOCK_SIZE, true, SEED);
ensureFileReplicasOnStorageType(path1, RAM_DISK);
//Read from multiple clients
final CountDownLatch latch = new CountDownLatch(NUM_TASKS);
final AtomicBoolean testFailed = new AtomicBoolean(false);
Runnable readerRunnable = new Runnable() {
@Override
public void run() {
try {
Assert.assertTrue(verifyReadRandomFile(path1, BLOCK_SIZE, SEED));
} catch (Throwable e) {
LOG.error("readerRunnable error", e);
testFailed.set(true);
} finally {
latch.countDown();
}
}
};
Thread threads[] = new Thread[NUM_TASKS];
for (int i = 0; i < NUM_TASKS; i++) {
threads[i] = new Thread(readerRunnable);
threads[i].start();
}
Thread.sleep(500);
for (int i = 0; i < NUM_TASKS; i++) {
Uninterruptibles.joinUninterruptibly(threads[i]);
}
Assert.assertFalse(testFailed.get());
}
/**
* Concurrent write with eviction
* RAM_DISK can hold 9 replicas
* 4 threads each write 5 replicas
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testConcurrentWrites()
throws IOException, InterruptedException {
getClusterBuilder().setRamDiskReplicaCapacity(9).build();
final String METHOD_NAME = GenericTestUtils.getMethodName();
final int SEED = 0xFADED;
final int NUM_WRITERS = 4;
final int NUM_WRITER_PATHS = 5;
Path paths[][] = new Path[NUM_WRITERS][NUM_WRITER_PATHS];
for (int i = 0; i < NUM_WRITERS; i++) {
paths[i] = new Path[NUM_WRITER_PATHS];
for (int j = 0; j < NUM_WRITER_PATHS; j++) {
paths[i][j] =
new Path("/" + METHOD_NAME + ".Writer" + i + ".File." + j + ".dat");
}
}
final CountDownLatch latch = new CountDownLatch(NUM_WRITERS);
final AtomicBoolean testFailed = new AtomicBoolean(false);
ExecutorService executor = Executors.newFixedThreadPool(THREADPOOL_SIZE);
for (int i = 0; i < NUM_WRITERS; i++) {
Runnable writer = new WriterRunnable(i, paths[i], SEED, latch, testFailed);
executor.execute(writer);
}
Thread.sleep(3 * LAZY_WRITER_INTERVAL_SEC * 1000);
triggerBlockReport();
// Stop executor from adding new tasks to finish existing threads in queue
latch.await();
assertThat(testFailed.get(), is(false));
}
class WriterRunnable implements Runnable {
private final int id;
private final Path paths[];
private final int seed;
private CountDownLatch latch;
private AtomicBoolean bFail;
public WriterRunnable(int threadIndex, Path[] paths,
int seed, CountDownLatch latch,
AtomicBoolean bFail) {
id = threadIndex;
this.paths = paths;
this.seed = seed;
this.latch = latch;
this.bFail = bFail;
System.out.println("Creating Writer: " + id);
}
public void run() {
System.out.println("Writer " + id + " starting... ");
int i = 0;
try {
for (i = 0; i < paths.length; i++) {
makeRandomTestFile(paths[i], BLOCK_SIZE, true, seed);
// eviction may faiL when all blocks are not persisted yet.
// ensureFileReplicasOnStorageType(paths[i], RAM_DISK);
}
} catch (IOException e) {
bFail.set(true);
LOG.error("Writer exception: writer id:" + id +
" testfile: " + paths[i].toString() +
" " + e);
} finally {
latch.countDown();
}
}
}
}
| dennishuo/hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java | Java | apache-2.0 | 10,335 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.update.processor;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.plugin.PluginInfoInitialized;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.SolrException;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.ArrayList;
/**
* Manages a chain of UpdateRequestProcessorFactories.
* <p>
* Chains can be configured via solrconfig.xml using the following syntax...
* </p>
* <pre class="prettyprint">
* <updateRequestProcessorChain name="key" default="true">
* <processor class="package.Class1" />
* <processor class="package.Class2" >
* <str name="someInitParam1">value</str>
* <int name="someInitParam2">42</int>
* </processor>
* <processor class="solr.LogUpdateProcessorFactory" >
* <int name="maxNumToLog">100</int>
* </processor>
* <processor class="solr.RunUpdateProcessorFactory" />
* </updateRequestProcessorChain>
* </pre>
* <p>
* Multiple Chains can be defined, each with a distinct name. The name of
* a chain used to handle an update request may be specified using the request
* param <code>update.chain</code>. If no chain is explicitly selected
* by name, then Solr will attempt to determine a default chain:
* </p>
* <ul>
* <li>A single configured chain may explicitly be declared with
* <code>default="true"</code> (see example above)</li>
* <li>If no chain is explicitly declared as the default, Solr will look for
* any chain that does not have a name, and treat it as the default</li>
* <li>As a last resort, Solr will create an implicit default chain
* consisting of:<ul>
* <li>{@link LogUpdateProcessorFactory}</li>
* <li>{@link DistributedUpdateProcessorFactory}</li>
* <li>{@link RunUpdateProcessorFactory}</li>
* </ul></li>
* </ul>
*
* <p>
* Allmost all processor chains should end with an instance of
* <code>RunUpdateProcessorFactory</code> unless the user is explicitly
* executing the update commands in an alternative custom
* <code>UpdateRequestProcessorFactory</code>. If a chain includes
* <code>RunUpdateProcessorFactory</code> but does not include a
* <code>DistributingUpdateProcessorFactory</code>, it will be added
* automatically by {@link #init init()}.
* </p>
*
* @see UpdateRequestProcessorFactory
* @see #init
* @see #createProcessor
* @since solr 1.3
*/
public final class UpdateRequestProcessorChain implements PluginInfoInitialized
{
public final static Logger log = LoggerFactory.getLogger(UpdateRequestProcessorChain.class);
private UpdateRequestProcessorFactory[] chain;
private final SolrCore solrCore;
public UpdateRequestProcessorChain(SolrCore solrCore) {
this.solrCore = solrCore;
}
/**
* Initializes the chain using the factories specified by the <code>PluginInfo</code>.
* if the chain includes the <code>RunUpdateProcessorFactory</code>, but
* does not include an implementation of the
* <code>DistributingUpdateProcessorFactory</code> interface, then an
* instance of <code>DistributedUpdateProcessorFactory</code> will be
* injected immediately prior to the <code>RunUpdateProcessorFactory</code>.
*
* @see DistributingUpdateProcessorFactory
* @see RunUpdateProcessorFactory
* @see DistributedUpdateProcessorFactory
*/
@Override
public void init(PluginInfo info) {
final String infomsg = "updateRequestProcessorChain \"" +
(null != info.name ? info.name : "") + "\"" +
(info.isDefault() ? " (default)" : "");
log.info("creating " + infomsg);
// wrap in an ArrayList so we know we know we can do fast index lookups
// and that add(int,Object) is supported
List<UpdateRequestProcessorFactory> list = new ArrayList
(solrCore.initPlugins(info.getChildren("processor"),UpdateRequestProcessorFactory.class,null));
if(list.isEmpty()){
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
infomsg + " require at least one processor");
}
int numDistrib = 0;
int runIndex = -1;
// hi->lo incase multiple run instances, add before first one
// (no idea why someone might use multiple run instances, but just in case)
for (int i = list.size()-1; 0 <= i; i--) {
UpdateRequestProcessorFactory factory = list.get(i);
if (factory instanceof DistributingUpdateProcessorFactory) {
numDistrib++;
}
if (factory instanceof RunUpdateProcessorFactory) {
runIndex = i;
}
}
if (1 < numDistrib) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
infomsg + " may not contain more then one " +
"instance of DistributingUpdateProcessorFactory");
}
if (0 <= runIndex && 0 == numDistrib) {
// by default, add distrib processor immediately before run
DistributedUpdateProcessorFactory distrib
= new DistributedUpdateProcessorFactory();
distrib.init(new NamedList());
list.add(runIndex, distrib);
log.info("inserting DistributedUpdateProcessorFactory into " + infomsg);
}
chain = list.toArray(new UpdateRequestProcessorFactory[list.size()]);
}
/**
* Creates a chain backed directly by the specified array. Modifications to
* the array will affect future calls to <code>createProcessor</code>
*/
public UpdateRequestProcessorChain( UpdateRequestProcessorFactory[] chain,
SolrCore solrCore) {
this.chain = chain;
this.solrCore = solrCore;
}
/**
* Uses the factories in this chain to creates a new
* <code>UpdateRequestProcessor</code> instance specific for this request.
* If the <code>DISTRIB_UPDATE_PARAM</code> is present in the request and is
* non-blank, then any factory in this chain prior to the instance of
* <code>{@link DistributingUpdateProcessorFactory}</code> will be skipped,
* except for the log update processor factory.
*
* @see UpdateRequestProcessorFactory#getInstance
* @see DistributingUpdateProcessorFactory#DISTRIB_UPDATE_PARAM
*/
public UpdateRequestProcessor createProcessor(SolrQueryRequest req,
SolrQueryResponse rsp)
{
UpdateRequestProcessor processor = null;
UpdateRequestProcessor last = null;
final String distribPhase = req.getParams().get(DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM);
final boolean skipToDistrib = distribPhase != null;
boolean afterDistrib = true; // we iterate backwards, so true to start
for (int i = chain.length-1; i>=0; i--) {
UpdateRequestProcessorFactory factory = chain[i];
if (skipToDistrib) {
if (afterDistrib) {
if (factory instanceof DistributingUpdateProcessorFactory) {
afterDistrib = false;
}
} else if (!(factory instanceof UpdateRequestProcessorFactory.RunAlways)) {
// skip anything that doesn't have the marker interface
continue;
}
}
processor = factory.getInstance(req, rsp, last);
last = processor == null ? last : processor;
}
return last;
}
/**
* Returns the underlying array of factories used in this chain.
* Modifications to the array will affect future calls to
* <code>createProcessor</code>
*/
public UpdateRequestProcessorFactory[] getFactories() {
return chain;
}
}
| williamchengit/TestRepo | solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java | Java | apache-2.0 | 8,567 |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.pim.cli;
import org.apache.karaf.shell.commands.Command;
import org.onosproject.cli.AbstractShellCommand;
import org.onosproject.pim.impl.PimInterface;
import org.onosproject.pim.impl.PimInterfaceService;
import java.util.Set;
/**
* Lists the interfaces where PIM is enabled.
*/
@Command(scope = "onos", name = "pim-interfaces",
description = "Lists the interfaces where PIM is enabled")
public class PimInterfacesListCommand extends AbstractShellCommand {
private static final String FORMAT = "interfaceName=%s, holdTime=%s, priority=%s, genId=%s";
private static final String ROUTE_FORMAT = " %s";
@Override
protected void execute() {
PimInterfaceService interfaceService = get(PimInterfaceService.class);
Set<PimInterface> interfaces = interfaceService.getPimInterfaces();
interfaces.forEach(pimIntf -> {
print(FORMAT, pimIntf.getInterface().name(),
pimIntf.getHoldtime(), pimIntf.getPriority(),
pimIntf.getGenerationId());
pimIntf.getRoutes().forEach(route -> print(ROUTE_FORMAT, route));
});
}
}
| donNewtonAlpha/onos | apps/pim/src/main/java/org/onosproject/pim/cli/PimInterfacesListCommand.java | Java | apache-2.0 | 1,782 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.operators.hash;
import org.apache.flink.api.common.typeutils.GenericPairComparator;
import org.apache.flink.api.common.typeutils.TypeComparator;
import org.apache.flink.api.common.typeutils.TypePairComparator;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.ByteValueSerializer;
import org.apache.flink.api.common.typeutils.base.LongComparator;
import org.apache.flink.api.common.typeutils.base.LongSerializer;
import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArrayComparator;
import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArraySerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.runtime.TupleComparator;
import org.apache.flink.api.java.typeutils.runtime.TupleSerializer;
import org.apache.flink.api.java.typeutils.runtime.ValueComparator;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.core.memory.MemorySegmentFactory;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync;
import org.apache.flink.types.ByteValue;
import org.apache.flink.util.MutableObjectIterator;
import org.junit.Test;
import org.junit.Assert;
import org.mockito.Mockito;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
public class HashTableTest {
private final TypeSerializer<Tuple2<Long, byte[]>> buildSerializer;
private final TypeSerializer<Long> probeSerializer;
private final TypeComparator<Tuple2<Long, byte[]>> buildComparator;
private final TypeComparator<Long> probeComparator;
private final TypePairComparator<Long, Tuple2<Long, byte[]>> pairComparator;
public HashTableTest() {
TypeSerializer<?>[] fieldSerializers = { LongSerializer.INSTANCE, BytePrimitiveArraySerializer.INSTANCE };
@SuppressWarnings("unchecked")
Class<Tuple2<Long, byte[]>> clazz = (Class<Tuple2<Long, byte[]>>) (Class<?>) Tuple2.class;
this.buildSerializer = new TupleSerializer<Tuple2<Long, byte[]>>(clazz, fieldSerializers);
this.probeSerializer = LongSerializer.INSTANCE;
TypeComparator<?>[] comparators = { new LongComparator(true) };
TypeSerializer<?>[] comparatorSerializers = { LongSerializer.INSTANCE };
this.buildComparator = new TupleComparator<Tuple2<Long, byte[]>>(new int[] {0}, comparators, comparatorSerializers);
this.probeComparator = new LongComparator(true);
this.pairComparator = new TypePairComparator<Long, Tuple2<Long, byte[]>>() {
private long ref;
@Override
public void setReference(Long reference) {
ref = reference;
}
@Override
public boolean equalToReference(Tuple2<Long, byte[]> candidate) {
//noinspection UnnecessaryUnboxing
return candidate.f0.longValue() == ref;
}
@Override
public int compareToReference(Tuple2<Long, byte[]> candidate) {
long x = ref;
long y = candidate.f0;
return (x < y) ? -1 : ((x == y) ? 0 : 1);
}
};
}
// ------------------------------------------------------------------------
// Tests
// ------------------------------------------------------------------------
/**
* This tests a combination of values that lead to a corner case situation where memory
* was missing and the computation deadlocked.
*/
@Test
public void testBufferMissingForProbing() {
final IOManager ioMan = new IOManagerAsync();
try {
final int pageSize = 32*1024;
final int numSegments = 34;
final int numRecords = 3400;
final int recordLen = 270;
final byte[] payload = new byte[recordLen - 8 - 4];
List<MemorySegment> memory = getMemory(numSegments, pageSize);
MutableHashTable<Tuple2<Long, byte[]>, Long> table = new MutableHashTable<>(
buildSerializer, probeSerializer, buildComparator, probeComparator,
pairComparator, memory, ioMan, 16, false);
table.open(new TupleBytesIterator(payload, numRecords), new LongIterator(10000));
try {
while (table.nextRecord()) {
MutableObjectIterator<Tuple2<Long, byte[]>> matches = table.getBuildSideIterator();
while (matches.next() != null);
}
}
catch (RuntimeException e) {
if (!e.getMessage().contains("exceeded maximum number of recursions")) {
e.printStackTrace();
fail("Test failed with unexpected exception");
}
}
finally {
table.close();
}
checkNoTempFilesRemain(ioMan);
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
finally {
ioMan.shutdown();
}
}
/**
* This tests the case where no additional partition buffers are used at the point when spilling
* is triggered, testing that overflow bucket buffers are taken into account when deciding which
* partition to spill.
*/
@Test
public void testSpillingFreesOnlyOverflowSegments() {
final IOManager ioMan = new IOManagerAsync();
final TypeSerializer<ByteValue> serializer = ByteValueSerializer.INSTANCE;
final TypeComparator<ByteValue> buildComparator = new ValueComparator<>(true, ByteValue.class);
final TypeComparator<ByteValue> probeComparator = new ValueComparator<>(true, ByteValue.class);
@SuppressWarnings("unchecked")
final TypePairComparator<ByteValue, ByteValue> pairComparator = Mockito.mock(TypePairComparator.class);
try {
final int pageSize = 32*1024;
final int numSegments = 34;
List<MemorySegment> memory = getMemory(numSegments, pageSize);
MutableHashTable<ByteValue, ByteValue> table = new MutableHashTable<>(
serializer, serializer, buildComparator, probeComparator,
pairComparator, memory, ioMan, 1, false);
table.open(new ByteValueIterator(100000000), new ByteValueIterator(1));
table.close();
checkNoTempFilesRemain(ioMan);
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
finally {
ioMan.shutdown();
}
}
/**
* Tests that the MutableHashTable spills its partitions when creating the initial table
* without overflow segments in the partitions. This means that the records are large.
*/
@Test
public void testSpillingWhenBuildingTableWithoutOverflow() throws Exception {
final IOManager ioMan = new IOManagerAsync();
final TypeSerializer<byte[]> serializer = BytePrimitiveArraySerializer.INSTANCE;
final TypeComparator<byte[]> buildComparator = new BytePrimitiveArrayComparator(true);
final TypeComparator<byte[]> probeComparator = new BytePrimitiveArrayComparator(true);
@SuppressWarnings("unchecked")
final TypePairComparator<byte[], byte[]> pairComparator = new GenericPairComparator<>(
new BytePrimitiveArrayComparator(true), new BytePrimitiveArrayComparator(true));
final int pageSize = 128;
final int numSegments = 33;
List<MemorySegment> memory = getMemory(numSegments, pageSize);
MutableHashTable<byte[], byte[]> table = new MutableHashTable<byte[], byte[]>(
serializer,
serializer,
buildComparator,
probeComparator,
pairComparator,
memory,
ioMan,
1,
false);
int numElements = 9;
table.open(
new CombiningIterator<byte[]>(
new ByteArrayIterator(numElements, 128,(byte) 0),
new ByteArrayIterator(numElements, 128,(byte) 1)),
new CombiningIterator<byte[]>(
new ByteArrayIterator(1, 128,(byte) 0),
new ByteArrayIterator(1, 128,(byte) 1)));
while(table.nextRecord()) {
MutableObjectIterator<byte[]> iterator = table.getBuildSideIterator();
int counter = 0;
while(iterator.next() != null) {
counter++;
}
// check that we retrieve all our elements
Assert.assertEquals(numElements, counter);
}
table.close();
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
private static List<MemorySegment> getMemory(int numSegments, int segmentSize) {
ArrayList<MemorySegment> list = new ArrayList<MemorySegment>(numSegments);
for (int i = 0; i < numSegments; i++) {
list.add(MemorySegmentFactory.allocateUnpooledSegment(segmentSize));
}
return list;
}
private static void checkNoTempFilesRemain(IOManager ioManager) {
for (File dir : ioManager.getSpillingDirectories()) {
for (String file : dir.list()) {
if (file != null && !(file.equals(".") || file.equals(".."))) {
fail("hash table did not clean up temp files. remaining file: " + file);
}
}
}
}
private static class TupleBytesIterator implements MutableObjectIterator<Tuple2<Long, byte[]>> {
private final byte[] payload;
private final int numRecords;
private int count = 0;
TupleBytesIterator(byte[] payload, int numRecords) {
this.payload = payload;
this.numRecords = numRecords;
}
@Override
public Tuple2<Long, byte[]> next(Tuple2<Long, byte[]> reuse) {
return next();
}
@Override
public Tuple2<Long, byte[]> next() {
if (count++ < numRecords) {
return new Tuple2<>(42L, payload);
} else {
return null;
}
}
}
private static class ByteArrayIterator implements MutableObjectIterator<byte[]> {
private final long numRecords;
private long counter = 0;
private final byte[] arrayValue;
ByteArrayIterator(long numRecords, int length, byte value) {
this.numRecords = numRecords;
arrayValue = new byte[length];
Arrays.fill(arrayValue, value);
}
@Override
public byte[] next(byte[] array) {
return next();
}
@Override
public byte[] next() {
if (counter++ < numRecords) {
return arrayValue;
} else {
return null;
}
}
}
private static class LongIterator implements MutableObjectIterator<Long> {
private final long numRecords;
private long value = 0;
LongIterator(long numRecords) {
this.numRecords = numRecords;
}
@Override
public Long next(Long aLong) {
return next();
}
@Override
public Long next() {
if (value < numRecords) {
return value++;
} else {
return null;
}
}
}
private static class ByteValueIterator implements MutableObjectIterator<ByteValue> {
private final long numRecords;
private long value = 0;
ByteValueIterator(long numRecords) {
this.numRecords = numRecords;
}
@Override
public ByteValue next(ByteValue aLong) {
return next();
}
@Override
public ByteValue next() {
if (value++ < numRecords) {
return new ByteValue((byte) 0);
} else {
return null;
}
}
}
private static class CombiningIterator<T> implements MutableObjectIterator<T> {
private final MutableObjectIterator<T> left;
private final MutableObjectIterator<T> right;
public CombiningIterator(MutableObjectIterator<T> left, MutableObjectIterator<T> right) {
this.left = left;
this.right = right;
}
@Override
public T next(T reuse) throws IOException {
T value = left.next(reuse);
if (value == null) {
return right.next(reuse);
} else {
return value;
}
}
@Override
public T next() throws IOException {
T value = left.next();
if (value == null) {
return right.next();
} else {
return value;
}
}
}
}
| hongyuhong/flink | flink-runtime/src/test/java/org/apache/flink/runtime/operators/hash/HashTableTest.java | Java | apache-2.0 | 12,009 |
package com.avast.android.dialogs.core;
import android.content.Context;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
/**
* Internal base builder that holds common values for all dialog fragment builders.
*
* @author Tomas Vondracek
*/
public abstract class BaseDialogBuilder<T extends BaseDialogBuilder<T>> {
public final static String ARG_REQUEST_CODE = "request_code";
public final static String ARG_CANCELABLE_ON_TOUCH_OUTSIDE = "cancelable_oto";
public final static String DEFAULT_TAG = "simple_dialog";
private String mTag = DEFAULT_TAG;
public final static int DEFAULT_REQUEST_CODE = -42;
private int mRequestCode = DEFAULT_REQUEST_CODE;
public static String ARG_USE_DARK_THEME = "usedarktheme";
public static String ARG_USE_LIGHT_THEME = "uselighttheme";
protected final Context mContext;
protected final FragmentManager mFragmentManager;
protected final Class<? extends BaseDialogFragment> mClass;
private Fragment mTargetFragment;
private boolean mCancelable = true;
private boolean mCancelableOnTouchOutside = true;
private boolean mUseDarkTheme = false;
private boolean mUseLightTheme = false;
public BaseDialogBuilder(Context context, FragmentManager fragmentManager, Class<? extends BaseDialogFragment> clazz) {
mFragmentManager = fragmentManager;
mContext = context.getApplicationContext();
mClass = clazz;
}
protected abstract T self();
protected abstract Bundle prepareArguments();
public T setCancelable(boolean cancelable) {
mCancelable = cancelable;
return self();
}
public T setCancelableOnTouchOutside(boolean cancelable) {
mCancelableOnTouchOutside = cancelable;
if (cancelable) {
mCancelable = cancelable;
}
return self();
}
public T setTargetFragment(Fragment fragment, int requestCode) {
mTargetFragment = fragment;
mRequestCode = requestCode;
return self();
}
public T setRequestCode(int requestCode) {
mRequestCode = requestCode;
return self();
}
public T setTag(String tag) {
mTag = tag;
return self();
}
public T useDarkTheme() {
mUseDarkTheme = true;
return self();
}
public T useLightTheme() {
mUseLightTheme = true;
return self();
}
private BaseDialogFragment create() {
final Bundle args = prepareArguments();
final BaseDialogFragment fragment = (BaseDialogFragment) Fragment.instantiate(mContext, mClass.getName(), args);
args.putBoolean(ARG_CANCELABLE_ON_TOUCH_OUTSIDE, mCancelableOnTouchOutside);
args.putBoolean(ARG_USE_DARK_THEME, mUseDarkTheme);
args.putBoolean(ARG_USE_LIGHT_THEME, mUseLightTheme);
if (mTargetFragment != null) {
fragment.setTargetFragment(mTargetFragment, mRequestCode);
} else {
args.putInt(ARG_REQUEST_CODE, mRequestCode);
}
fragment.setCancelable(mCancelable);
return fragment;
}
public DialogFragment show() {
BaseDialogFragment fragment = create();
fragment.show(mFragmentManager, mTag);
return fragment;
}
/**
* Like show() but allows the commit to be executed after an activity's state is saved. This
* is dangerous because the commit can be lost if the activity needs to later be restored from
* its state, so this should only be used for cases where it is okay for the UI state to change
* unexpectedly on the user.
*/
public DialogFragment showAllowingStateLoss() {
BaseDialogFragment fragment = create();
fragment.showAllowingStateLoss(mFragmentManager, mTag);
return fragment;
}
}
| jaohoang/android-styled-dialogs | library/src/main/java/com/avast/android/dialogs/core/BaseDialogBuilder.java | Java | apache-2.0 | 3,904 |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.graph;
import com.google.common.base.Preconditions;
public class DefaultDirectedAcyclicGraph<T> extends DefaultTraversableGraph<T>
implements DirectedAcyclicGraph<T> {
public DefaultDirectedAcyclicGraph(MutableDirectedGraph<T> graph) {
super(graph);
Preconditions.checkArgument(super.isAcyclic());
}
}
| janicduplessis/buck | src/com/facebook/buck/graph/DefaultDirectedAcyclicGraph.java | Java | apache-2.0 | 954 |
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2017 Eric Lafortune @ GuardSquare
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.gui.splash;
import java.awt.*;
/**
* This interface describes objects that can paint themselves, possibly varying
* as a function of time.
*
* @author Eric Lafortune
*/
public interface Sprite
{
/**
* Paints the object.
*
* @param graphics the Graphics to paint on.
* @param time the time since the start of the animation, expressed in
* milliseconds.
*/
public void paint(Graphics graphics, long time);
}
| damienmg/bazel | third_party/java/proguard/proguard5.3.3/src/proguard/gui/splash/Sprite.java | Java | apache-2.0 | 1,374 |
package info.ephyra.answerselection.filters;
import info.ephyra.io.Logger;
import info.ephyra.io.MsgPrinter;
import info.ephyra.nlp.NETagger;
import info.ephyra.nlp.OpenNLP;
import info.ephyra.nlp.SnowballStemmer;
import info.ephyra.nlp.StanfordNeTagger;
import info.ephyra.nlp.indices.WordFrequencies;
import info.ephyra.querygeneration.Query;
import info.ephyra.querygeneration.generators.BagOfWordsG;
import info.ephyra.questionanalysis.AnalyzedQuestion;
import info.ephyra.questionanalysis.KeywordExtractor;
import info.ephyra.questionanalysis.QuestionNormalizer;
import info.ephyra.search.Result;
import info.ephyra.trec.TREC13To16Parser;
import info.ephyra.trec.TRECTarget;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
/**
* <p>A web reinforcement approach that ranks answer candidates for definitional
* questions. Several variations of the target of the question are generated and
* are used to retrieve relevant text snippets from the web. The frequencies of
* content words in these snippets are counted and the scores of the answers are
* adjusted to assign higher scores to candidates that cover frequent keywords.
* This approach is based on the assumption that terms that often cooccur with
* the target provide relevant information on the target that should be covered
* by the answers.</p>
*
* <p>Several instances of this web term importance filter have been implemented
* that use different sources for text snippets.</p>
*
* <p>This class extends the class <code>Filter</code>.</p>
*
* @author Guido Sautter
* @version 2008-02-15
*/
public abstract class WebTermImportanceFilter extends Filter {
protected static final String person = "person";
protected static final String organization = "organization";
protected static final String location = "location";
protected static final String event = "event";
public static final int NO_NORMALIZATION = 0;
public static final int LINEAR_LENGTH_NORMALIZATION = 1;
public static final int SQUARE_ROOT_LENGTH_NORMALIZATION = 2;
public static final int LOG_LENGTH_NORMALIZATION = 3;
public static final int LOG_10_LENGTH_NORMALIZATION = 4;
private final int normalizationMode;
private final int tfNormalizationMode;
private final boolean isCombined;
// protected static final String WIKIPEDIA = "wikipedia";
/**
*/
protected WebTermImportanceFilter(int normalizationMode, int tfNormalizationMode, boolean isCombined) {
this.normalizationMode = normalizationMode;
this.tfNormalizationMode = tfNormalizationMode;
this.isCombined = isCombined;
}
/**
* fetch the term frequencies in the top X result snippets of a web search
* for some target
*
* @param targets an array of strings containing the targets
* @return a HashMap mapping the terms in the web serach results to their
* frequency in the snippets
*/
public abstract HashMap<String, TermCounter> getTermCounters(String[] targets);
/**
* @author sautter
*
* Mutable integer class to avoid creating new objects all the time
*/
protected class TermCounter {
private int value = 0;
/** Constructor
*/
protected TermCounter() {}
/**
* Constructor
* @param value the initial value
*/
protected TermCounter(int value) {
this.value = value;
}
/** @return the value of this TermCounter
*/
public int getValue() {
return this.value;
}
/** increment the value of this TermCounter by 1
*/
public void increment() {
this.value++;
}
/** increment the value of this TermCounter by <code>inc</code>
* @param inc
*/
public void increment(int inc) {
this.value += inc;
}
/** decrement the value of this TermCounter by 1
*/
public void decrement() {
this.value--;
}
/** decrement the value of this TermCounter by <code>dec</code>
* @param dec
*/
public void decrement(int dec) {
this.value -= dec;
}
/** multiply the value of this TermCounter times <code>fact</code>
* @param fact
*/
public void multiplyValue(int fact) {
this.value *= fact;
}
/** devide the value of this TermCounter times <code>denom</code>
* @param denom
*/
public void divideValue(int denom) {
this.value /= denom;
}
}
/**
* produce the target variations for a given target
*
* @param target the original traget String
* @return an array of strings containing the variations of the target
* String, including the original target
*/
public String[] getTargets(String target) {
ArrayList<String> targets = new ArrayList<String>();
targets.add(target);
boolean isPerson = false;
boolean brackets = false;
// If target starts with "the", "a", or "an", remove it.
if (target.startsWith("the ")) {
targets.add(target.substring(4, target.length()));
} else if (target.startsWith("an ")) {
targets.add(target.substring(3, target.length()));
} else if (target.startsWith("a ")) {
targets.add(target.substring(2, target.length()));
}
String targetType = this.checkType(target);
if (TEST_TARGET_GENERATION) {
if (targetType == null) System.out.println(" target type could not be determined");
else System.out.println(" target type is " + targetType);
}
if (person.equals(targetType)) {
// (complete) target is of type Person, no further processing is necessary
isPerson = true;
// split parts in brackets from parts not in brackets:
// "Norwegian Cruise Lines (NCL)" --> "Norwegian Cruise Lines" + "NCL"
} else if (target.contains("(") && target.contains(")")) {
int i1 = target.indexOf("(");
int i2 = target.indexOf(")");
String s1 = target.substring(0, i1 - 1);
String s2 = target.substring(i1 + 1, i2);
// Log.println("*** '"+s1+"' '"+s2+"'", true);
targets.clear();
targets.add(s1);
targets.add(s2);
// Log.println(" "+target+" contains brackest. No further processing
// necessary.", true);
brackets = true;
} else if (this.cutExtension(target, targets)) {
// do nothing, it's in the cutExtensions method
} else if (target.endsWith("University")) {
// chop off "University"
String toAdd = target.substring(0, target.length() - 11);
targets.add(toAdd);
} else if (target.endsWith("International")) {
// chop off International"
String toAdd = target.substring(0, target.length() - 14);
targets.add(toAdd);
} else if (target.endsWith("Corporation")) {
// chop off "Corporation"
String toAdd = target.substring(0, target.length() - 12);
targets.add(toAdd);
} else {
this.extractUpperCaseParts(targets);
HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets);
for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) {
String item = iter.next();
String type = this.checkType(item);
if (person.equals(type)) {
// after removing the first NP, check again if target is
// Person (example: "philanthropist Alberto Vilar")
// Log.println(" "+item+" is Person. No further processing
// necessary.", true);
// attention, this also discarts events containing person names!!!
// maybe remove this call
//targets.clear();
targets.add(item);
}
}
}
if (isPerson) {
targets.add("\"" + target + "\"");
// // own extension: add 'wikipedia' to target
// targets.add(target + " " + WIKIPEDIA);
// targets.add("\"" + target + "\" " + WIKIPEDIA);
} else if (!brackets) { // maybe remove condition
//targets = this.processLongTargets(targets);
this.extractUpperCaseParts(targets);
//targets = this.checkForEvent(targets);
// described effect done in extractUpperCaseParts(), uses NLP stuff we don't have
//targets = this.checkForDeterminer(targets);
// bad thing, uses to many miraculous external classen we don't have
//targets = this.removeAttachedPP(targets);
// done in extractUpperCaseParts()
//targets = this.cutFirstNpInNpSequence(targets);
this.cutFirstNpInNpSequence(targets);
//targets = this.removeNounAfterNounGroup(targets);
// done in extractUpperCaseParts()
// own extension: extract acronyms 'Basque ETA' --> 'ETA'
this.extractAcronyms(targets);
//targets = this.postProcess(targets);
this.postProcess(targets);
}
HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets);
for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) {
String item = iter.next();
String type = this.checkType(item);
if (organization.equals(type)/* && !brackets*/) {
targets.add("the " + item);
if (!brackets)
targets.add("the " + target);
} else if (person.equals(type)) {
targets.add("\"" + item + "\"");
// // own extension: add 'wikipedia' to target
// targets.add(item + " " + WIKIPEDIA);
// targets.add("\"" + item + "\" " + WIKIPEDIA);
}
// own extension: add determiner to acronyms
if (item.matches("([A-Z]){3,}"))
targets.add("the " + item);
else if (item.matches("([A-Z]\\.){2,}"))
targets.add("the " + item);
}
// own extension: add quoted version of title case targets like 'The Daily Show'
duplicateFreeTargets = new LinkedHashSet<String>(targets);
for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) {
String item = iter.next();
if (item.matches("([A-Z][a-z]++)++")) {
targets.add("\"" + item + "\"");
// // own extension: add 'wikipedia' to target
// targets.add(item + " " + WIKIPEDIA);
// targets.add("\"" + item + "\" " + WIKIPEDIA);
}
}
// own extension: always use quoted version of original target if it has more than one word
String[] targetTokens = NETagger.tokenize(target);
if (targetTokens.length > 1) {
targets.add("\"" + target + "\"");
// // own extension: add 'wikipedia' to target
// targets.add(target + " " + WIKIPEDIA);
// targets.add("\"" + target + "\" " + WIKIPEDIA);
}
duplicateFreeTargets = new LinkedHashSet<String>(targets);
return duplicateFreeTargets.toArray(new String[duplicateFreeTargets.size()]);
}
/**
* find the NE type of a target
*
* @param target the target String to check
* @return the NE type of target, or null, if the type couldn't be determined
*/
private String checkType(String target) {
if (!StanfordNeTagger.isInitialized()) StanfordNeTagger.init();
HashMap<String, String[]> nesByType = StanfordNeTagger.extractNEs(target);
ArrayList<String> neTypes = new ArrayList<String>(nesByType.keySet());
for (int t = 0; t < neTypes.size(); t++) {
String type = neTypes.get(t);
String[] nes = nesByType.get(type);
for (int n = 0; n < nes.length; n++)
if (nes[n].equals(target))
return type.replace("NE", "");
}
return null;
}
/**
* cut tailing words like "University", "International", "Corporation":
* "Microsoft Corporation" --> "Microsoft" and add the non-cut part to target list
*
* @param target the target String to cut
* @param targets the target list to add the cut part to
* @return true if a cut target was added, false otherwise
*/
private boolean cutExtension(String target, ArrayList<String> targets) {
if (this.extensionList.isEmpty())
for (int i = 0; i < extensions.length; i++)
this.extensionList.add(extensions[i]);
String[] targetTokens = target.split("\\s");
String last = targetTokens[targetTokens.length - 1];
if (this.extensionList.contains(last) && (targetTokens.length > 1)) {
String cutTarget = targetTokens[0];
for (int i = 1; i < (targetTokens.length - 1); i++)
cutTarget += " " + targetTokens[i];
targets.add(cutTarget);
return true;
}
return false;
}
private HashSet<String> extensionList = new HashSet<String>();
private static final String[] extensions = {
"University",
"Corporation",
"International",
// last year's winner's list ends here
"Incorporated",
"Inc.",
"Comp.",
"Corp.",
"Co.",
"Museum",
"<to be extended>"
};
/** extract non lower case parts from the targets:
* "the film 'Star Wars'" --> "'Star Wars'"
* "1998 indictment and trial of Susan McDougal" --> "Susan McDougal"
* "Miss Universe 2000 crowned" --> "Miss Universe 2000"
* "Abraham from the bible" --> "Abraham"
* "Gobi desert" --> "Gobi"
*
* @param targets the list of targets
*/
private void extractUpperCaseParts(ArrayList<String> targets) {
HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets);
for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) {
String target = iter.next();
String[] targetTokens = target.split("\\s");
String upperCasePart = null;
int i = 0;
while (i < targetTokens.length) {
// find start of next upper case part
while ((i < targetTokens.length) && !Character.isUpperCase(targetTokens[i].charAt(0))) i++;
// start upper case part
if (i < targetTokens.length) {
upperCasePart = targetTokens[i];
i++;
}
// collect non-lower-case part
while ((i < targetTokens.length) && !Character.isLowerCase(targetTokens[i].charAt(0))) {
upperCasePart += " " + targetTokens[i];
i++;
}
if (upperCasePart != null) {
targets.add(upperCasePart);
upperCasePart = null;
}
}
}
}
/** extract acronyms from the targets:
* "Basque ETA" --> "ETA"
*
* @param targets the list of targets
*/
private void extractAcronyms(ArrayList<String> targets) {
HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets);
for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) {
String target = iter.next();
String[] targetTokens = target.split("\\s");
for (String t : targetTokens) {
if (t.matches("([A-Z]){3,}")) {
targets.add(t);
} else if (t.matches("([A-Z]\\.){2,}")) {
targets.add(t);
}
}
}
}
/** remove first NP in a sequence of NPs:
* "the film 'Star Wars'" --> "'Star Wars'"
*
* @param targets the list of targets
*/
private void cutFirstNpInNpSequence(ArrayList<String> targets) {
HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets);
for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) {
String target = iter.next();
// tokenize and tag sentence
String[] targetTokens = OpenNLP.tokenize(target);
String[] posTags = OpenNLP.tagPos(targetTokens);
String[] chunkTags = OpenNLP.tagChunks(targetTokens, posTags);
String np = null;
int i = 0;
// find first NP
while ((i < targetTokens.length) && !"B-NP".equals(chunkTags[i])) i++;
// skip first NP
i++;
// find next NP
while (( i < targetTokens.length) && !"B-NP".equals(chunkTags[i])) i++;
// start NP
if (i < targetTokens.length) {
np = targetTokens[i];
i++;
}
// add rest of NP
while (i < targetTokens.length) {
np += " " + targetTokens[i];
i++;
}
if (np != null) targets.add(np);
}
}
/** take care of remaining brackets
*
* @param targets the list of targets
*/
private void postProcess(ArrayList<String> targets) {
HashSet<String> duplicateFreeTargets = new LinkedHashSet<String>(targets);
targets.clear();
for (Iterator<String> iter = duplicateFreeTargets.iterator(); iter.hasNext();) {
String target = iter.next().trim();
boolean add = true;
if (target.startsWith("(") && target.endsWith(")"))
target = target.substring(1, target.length() - 1).trim();
if (target.startsWith("(") != target.endsWith(")")) add = false;
// own extension: cut leading and tailing apostrophes
while (target.startsWith("'")) target = target.substring(1).trim();
while (target.endsWith("'")) target = target.substring(0, (target.length() - 1)).trim();
// own extension: cut leading singel letters, but keep determiner "a"
while (target.matches("[b-z]\\s.++")) target = target.substring(2);
// own extension: filter one-char targets
if (target.length() < 2) add = false;
if (add) targets.add(target);
}
}
/**
* Increment the score of each result snippet for each word in it according
* to the number of top-100 web search engine snippets containing this
* particular word. This favors snippets that provide information given
* frequently and thus likely to be more important with regard to the
* target.
*
* @param results array of <code>Result</code> objects
* @return extended array of <code>Result</code> objects
*/
@SuppressWarnings("unchecked")
public Result[] apply(Result[] results) {
// catch empty result
if (results.length == 0) return results;
// produce target variations
String target = results[0].getQuery().getOriginalQueryString();
System.out.println("WebTermImportanceFilter:\n processing target '" + target + "'");
HashMap<String, TermCounter> rawTermCounters = this.cacheLookup(target);
// query generation test
if (TEST_TARGET_GENERATION) {
String[] targets = this.getTargets(target);
System.out.println(" generated web serach Strings:");
for (String t : targets) System.out.println(" - " + t);
// query generation test only
return results;
// cache miss
} else if (rawTermCounters == null) {
String[] targets = this.getTargets(target);
System.out.println(" web serach Strings are");
for (String t : targets) System.out.println(" - " + t);
rawTermCounters = this.getTermCounters(targets);
this.cache(target, rawTermCounters);
}
// get target tokens
HashSet<String> rawTargetTerms = new HashSet<String>();
String[] targetTokens = OpenNLP.tokenize(target);
for (String tt : targetTokens)
if (Character.isLetterOrDigit(tt.charAt(0)))
rawTargetTerms.add(tt);
// stem terms, collect target terms
HashMap<String, TermCounter> termCounters = new HashMap<String, TermCounter>();//this.getTermCounters(targets);
HashSet<String> targetTerms = new HashSet<String>();
ArrayList<String> rawTerms = new ArrayList<String>(rawTermCounters.keySet());
for (String rawTerm : rawTerms) {
String stemmedTerm = SnowballStemmer.stem(rawTerm.toLowerCase());
if (!termCounters.containsKey(stemmedTerm))
termCounters.put(stemmedTerm, new TermCounter());
termCounters.get(stemmedTerm).increment(rawTermCounters.get(rawTerm).getValue());
if (rawTargetTerms.contains(rawTerm))
targetTerms.add(stemmedTerm);
}
// get overall recall (since 20070718)
int termCount = this.getCountSum(termCounters);
int termCountLog = ((termCount > 100) ? ((int) Math.log10(termCount)) : 2);
System.out.println("WebTermImportanceFilter: termCountLog is " + termCountLog);
// score results
ArrayList<Result> resultList = new ArrayList<Result>();
boolean goOn;
do {
goOn = false;
ArrayList<Result> rawResults = new ArrayList<Result>();
// score all results
for (Result r : results) {
if (r.getScore() != Float.NEGATIVE_INFINITY) {
// tokenize sentence
String[] sentence = NETagger.tokenize(r.getAnswer());
float importance = 0;
// scan sentence for terms from web result
for (int i = 0; i < sentence.length; i++) {
String term = sentence[i];
if ((term.length() > 1)/* && !StringUtils.isSubsetKeywords(term, r.getQuery().getAnalyzedQuestion().getQuestion()) && !FunctionWords.lookup(term)*/) {
term = SnowballStemmer.stem(term.toLowerCase());
TermCounter count = termCounters.get(term);
if (count != null) {
double tf; // 20070706
if (this.tfNormalizationMode == NO_NORMALIZATION) tf = 1;
else if (this.tfNormalizationMode == LOG_LENGTH_NORMALIZATION) {
tf = WordFrequencies.lookup(sentence[i].toLowerCase());
if (tf > Math.E) tf = Math.log(tf);
else tf = 1;
} else if (this.tfNormalizationMode == LOG_LENGTH_NORMALIZATION) {
tf = WordFrequencies.lookup(sentence[i].toLowerCase());
if (tf > 10) tf = Math.log10(tf);
else tf = 1;
} else tf = 1;
importance += (count.getValue() / tf);
}
}
}
// don't throw out 0-scored results for combining approaches
if (this.isCombined || (importance > 0)) {
if (this.normalizationMode == NO_NORMALIZATION)
r.setScore(importance);
else if (this.normalizationMode == LINEAR_LENGTH_NORMALIZATION)
r.setScore(importance / sentence.length); // try normalized score
else if (this.normalizationMode == SQUARE_ROOT_LENGTH_NORMALIZATION)
r.setScore(importance / ((float) Math.sqrt(sentence.length))); // try normalized score
else if (this.normalizationMode == LOG_LENGTH_NORMALIZATION)
r.setScore(importance / (1 + ((float) Math.log(sentence.length)))); // try normalized score
else if (this.normalizationMode == LOG_10_LENGTH_NORMALIZATION)
r.setScore(importance / (1 + ((float) Math.log10(sentence.length)))); // try normalized score
rawResults.add(r);
}
}
}
if (rawResults.size() != 0) {
// find top result
Collections.sort(rawResults);
Collections.reverse(rawResults);
Result top = rawResults.remove(0);
resultList.add(top);
// decrement scores of top result terms
String[] sentence = NETagger.tokenize(top.getAnswer());
for (int i = 0; i < sentence.length; i++) {
String term = SnowballStemmer.stem(sentence[i].toLowerCase());
TermCounter count = termCounters.get(term);
if (count != null) {
// if (targetTerms.contains(term)) count.divideValue(2);
// else count.divideValue(5);
// if (targetTerms.contains(term)) count.divideValue(2);
// else count.divideValue(3);
// if (targetTerms.contains(term)) count.divideValue(2);
// else count.divideValue(2);
// 20070718
if (targetTerms.contains(term)) count.divideValue(2);
else count.divideValue(termCountLog);
if (count.getValue() == 0) termCounters.remove(term);
}
}
// prepare remaining results for next round
results = rawResults.toArray(new Result[rawResults.size()]);
goOn = true;
}
} while (goOn);
Collections.sort(resultList);
Collections.reverse(resultList);
// set position-dependent extra score for combining approaches
if (this.isCombined) {
float eScore = 100;
for (Result r : resultList) {
r.addExtraScore((this.getClass().getName() + this.normalizationMode), eScore);
eScore *= 0.9f;
}
}
return resultList.toArray(new Result[resultList.size()]);
}
// private static String lastTarget = null;
// private static String lastCacherClassName = null;
// private static HashMap<String, TermCounter> lastTargetTermCounters = null;
private static class CacheEntry {
String target;
HashMap<String, TermCounter> termCounters;
public CacheEntry(String target, HashMap<String, TermCounter> termCounters) {
this.target = target;
this.termCounters = termCounters;
}
}
private static HashMap<String, CacheEntry> cache = new HashMap<String, CacheEntry>();
private void cache(String target, HashMap<String, TermCounter> termCounters) {
String className = this.getClass().getName();
System.out.println("WebTermImportanceFilter: caching web lookup result for target '" + target + "' from class '" + className + "'");
CacheEntry ce = new CacheEntry(target, termCounters);
cache.put(className, ce);
// lastTarget = target;
// lastCacherClassName = className;
// lastTargetTermCounters = termCounters;
}
private HashMap<String, TermCounter> cacheLookup(String target) {
String className = this.getClass().getName();
System.out.println("WebTermImportanceFilter: doing cache lookup result for target '" + target + "', class '" + className + "'");
CacheEntry ce = cache.get(className);
if (ce == null) {
System.out.println(" --> cache miss, no entry for '" + className + "' so far");
return null;
} else if (target.equals(ce.target)) {
System.out.println(" --> cache hit");
return ce.termCounters;
} else {
System.out.println(" --> cache miss, last target for '" + className + "' is '" + ce.target + "'");
return null;
}
}
/** add all the term counters in source to target (perform a union of the key sets, summing up the counters)
* @param source
* @param target
*/
protected void addTermCounters(HashMap<String, TermCounter> source, HashMap<String, TermCounter> target) {
for (Iterator<String> keys = source.keySet().iterator(); keys.hasNext();) {
String key = keys.next();
int count = source.get(key).getValue();
if (!target.containsKey(key))
target.put(key, new TermCounter());
target.get(key).increment(count);
}
}
/** get the maximum count out of a set of counters
* @param counters
*/
protected int getMaxCount(HashMap<String, TermCounter> counters) {
int max = 0;
for (Iterator<String> keys = counters.keySet().iterator(); keys.hasNext();)
max = Math.max(max, counters.get(keys.next()).getValue());
return max;
}
/** get the sum of a set of counters
* @param counters
*/
protected int getCountSum(HashMap<String, TermCounter> counters) {
int sum = 0;
for (Iterator<String> keys = counters.keySet().iterator(); keys.hasNext();)
sum += counters.get(keys.next()).getValue();
return sum;
}
/** get the sum of a set of counters, each one minus the count in another set of counters
* @param counters
* @param compare
*/
protected int sumDiff(HashMap<String, TermCounter> counters, HashMap<String, TermCounter> compare) {
int diffSum = 0;
for (Iterator<String> keys = counters.keySet().iterator(); keys.hasNext();) {
String key = keys.next();
int count = counters.get(key).getValue();
int comp = (compare.containsKey(key) ? compare.get(key).getValue() : 0);
diffSum += Math.max((count - comp), 0);
}
return diffSum;
}
protected static boolean TEST_TARGET_GENERATION = false;
public static void main(String[] args) {
TEST_TARGET_GENERATION = true;
MsgPrinter.enableStatusMsgs(true);
MsgPrinter.enableErrorMsgs(true);
// create tokenizer
MsgPrinter.printStatusMsg("Creating tokenizer...");
if (!OpenNLP.createTokenizer("res/nlp/tokenizer/opennlp/EnglishTok.bin.gz"))
MsgPrinter.printErrorMsg("Could not create tokenizer.");
// LingPipe.createTokenizer();
// create sentence detector
// MsgPrinter.printStatusMsg("Creating sentence detector...");
// if (!OpenNLP.createSentenceDetector("res/nlp/sentencedetector/opennlp/EnglishSD.bin.gz"))
// MsgPrinter.printErrorMsg("Could not create sentence detector.");
// LingPipe.createSentenceDetector();
// create stemmer
MsgPrinter.printStatusMsg("Creating stemmer...");
SnowballStemmer.create();
// create part of speech tagger
MsgPrinter.printStatusMsg("Creating POS tagger...");
if (!OpenNLP.createPosTagger("res/nlp/postagger/opennlp/tag.bin.gz",
"res/nlp/postagger/opennlp/tagdict"))
MsgPrinter.printErrorMsg("Could not create OpenNLP POS tagger.");
// if (!StanfordPosTagger.init("res/nlp/postagger/stanford/" +
// "train-wsj-0-18.holder"))
// MsgPrinter.printErrorMsg("Could not create Stanford POS tagger.");
// create chunker
MsgPrinter.printStatusMsg("Creating chunker...");
if (!OpenNLP.createChunker("res/nlp/phrasechunker/opennlp/" +
"EnglishChunk.bin.gz"))
MsgPrinter.printErrorMsg("Could not create chunker.");
// create named entity taggers
MsgPrinter.printStatusMsg("Creating NE taggers...");
NETagger.loadListTaggers("res/nlp/netagger/lists/");
NETagger.loadRegExTaggers("res/nlp/netagger/patterns.lst");
MsgPrinter.printStatusMsg(" ...loading models");
// if (!NETagger.loadNameFinders("res/nlp/netagger/opennlp/"))
// MsgPrinter.printErrorMsg("Could not create OpenNLP NE tagger.");
if (!StanfordNeTagger.isInitialized() && !StanfordNeTagger.init())
MsgPrinter.printErrorMsg("Could not create Stanford NE tagger.");
MsgPrinter.printStatusMsg(" ...done");
WebTermImportanceFilter wtif = new TargetGeneratorTest(NO_NORMALIZATION);
TRECTarget[] targets = TREC13To16Parser.loadTargets(args[0]);
for (TRECTarget target : targets) {
String question = target.getTargetDesc();
// query generation
MsgPrinter.printGeneratingQueries();
String qn = QuestionNormalizer.normalize(question);
MsgPrinter.printNormalization(qn); // print normalized question string
Logger.logNormalization(qn); // log normalized question string
String[] kws = KeywordExtractor.getKeywords(qn);
AnalyzedQuestion aq = new AnalyzedQuestion(question);
aq.setKeywords(kws);
aq.setFactoid(false);
Query[] queries = new BagOfWordsG().generateQueries(aq);
for (int q = 0; q < queries.length; q++)
queries[q].setOriginalQueryString(question);
Result[] results = new Result[1];
results[0] = new Result("This would be the answer", queries[0]);
wtif.apply(results);
}
}
private static class TargetGeneratorTest extends WebTermImportanceFilter {
TargetGeneratorTest(int normalizationMode) {
super(normalizationMode, normalizationMode, false);
}
public HashMap<String, TermCounter> getTermCounters(String[] targets) {
return new HashMap<String, TermCounter>();
}
}
}
| vishnujayvel/QAGenerator | src/info/ephyra/answerselection/filters/WebTermImportanceFilter.java | Java | gpl-3.0 | 30,379 |
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2017 Eric Lafortune @ GuardSquare
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.classfile.attribute.annotation.target.visitor;
import proguard.classfile.*;
import proguard.classfile.attribute.CodeAttribute;
import proguard.classfile.attribute.annotation.*;
import proguard.classfile.attribute.annotation.target.*;
/**
* This interface specifies the methods for a visitor of <code>TargetInfo</code>
* objects.
*
* @author Eric Lafortune
*/
public interface TargetInfoVisitor
{
public void visitTypeParameterTargetInfo( Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterTargetInfo typeParameterTargetInfo);
public void visitTypeParameterTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, TypeParameterTargetInfo typeParameterTargetInfo);
public void visitSuperTypeTargetInfo( Clazz clazz, TypeAnnotation typeAnnotation, SuperTypeTargetInfo superTypeTargetInfo);
public void visitTypeParameterBoundTargetInfo(Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo);
public void visitTypeParameterBoundTargetInfo(Clazz clazz, Field field, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo);
public void visitTypeParameterBoundTargetInfo(Clazz clazz, Method method, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo);
public void visitEmptyTargetInfo( Clazz clazz, Field field, TypeAnnotation typeAnnotation, EmptyTargetInfo emptyTargetInfo);
public void visitEmptyTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, EmptyTargetInfo emptyTargetInfo);
public void visitFormalParameterTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, FormalParameterTargetInfo formalParameterTargetInfo);
public void visitThrowsTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, ThrowsTargetInfo throwsTargetInfo);
public void visitLocalVariableTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, LocalVariableTargetInfo localVariableTargetInfo);
public void visitCatchTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, CatchTargetInfo catchTargetInfo);
public void visitOffsetTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, OffsetTargetInfo offsetTargetInfo);
public void visitTypeArgumentTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, TypeArgumentTargetInfo typeArgumentTargetInfo);
}
| dslomov/bazel | third_party/java/proguard/proguard5.3.3/src/proguard/classfile/attribute/annotation/target/visitor/TargetInfoVisitor.java | Java | apache-2.0 | 3,983 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.NoSuchElementException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapred.IFile.Reader;
import org.apache.hadoop.mapred.IFile.Writer;
import org.apache.hadoop.mapred.Merger.Segment;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.CryptoUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <code>BackupStore</code> is an utility class that is used to support
* the mark-reset functionality of values iterator
*
* <p>It has two caches - a memory cache and a file cache where values are
* stored as they are iterated, after a mark. On reset, values are retrieved
* from these caches. Framework moves from the memory cache to the
* file cache when the memory cache becomes full.
*
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class BackupStore<K,V> {
private static final Logger LOG =
LoggerFactory.getLogger(BackupStore.class.getName());
private static final int MAX_VINT_SIZE = 9;
private static final int EOF_MARKER_SIZE = 2 * MAX_VINT_SIZE;
private final TaskAttemptID tid;
private MemoryCache memCache;
private FileCache fileCache;
List<Segment<K,V>> segmentList = new LinkedList<Segment<K,V>>();
private int readSegmentIndex = 0;
private int firstSegmentOffset = 0;
private int currentKVOffset = 0;
private int nextKVOffset = -1;
private DataInputBuffer currentKey = null;
private DataInputBuffer currentValue = new DataInputBuffer();
private DataInputBuffer currentDiskValue = new DataInputBuffer();
private boolean hasMore = false;
private boolean inReset = false;
private boolean clearMarkFlag = false;
private boolean lastSegmentEOF = false;
private Configuration conf;
public BackupStore(Configuration conf, TaskAttemptID taskid)
throws IOException {
final float bufferPercent =
conf.getFloat(JobContext.REDUCE_MARKRESET_BUFFER_PERCENT, 0f);
if (bufferPercent > 1.0 || bufferPercent < 0.0) {
throw new IOException(JobContext.REDUCE_MARKRESET_BUFFER_PERCENT +
bufferPercent);
}
int maxSize = (int)Math.min(
Runtime.getRuntime().maxMemory() * bufferPercent, Integer.MAX_VALUE);
// Support an absolute size also.
int tmp = conf.getInt(JobContext.REDUCE_MARKRESET_BUFFER_SIZE, 0);
if (tmp > 0) {
maxSize = tmp;
}
memCache = new MemoryCache(maxSize);
fileCache = new FileCache(conf);
tid = taskid;
this.conf = conf;
LOG.info("Created a new BackupStore with a memory of " + maxSize);
}
/**
* Write the given K,V to the cache.
* Write to memcache if space is available, else write to the filecache
* @param key
* @param value
* @throws IOException
*/
public void write(DataInputBuffer key, DataInputBuffer value)
throws IOException {
assert (key != null && value != null);
if (fileCache.isActive()) {
fileCache.write(key, value);
return;
}
if (memCache.reserveSpace(key, value)) {
memCache.write(key, value);
} else {
fileCache.activate();
fileCache.write(key, value);
}
}
public void mark() throws IOException {
// We read one KV pair in advance in hasNext.
// If hasNext has read the next KV pair from a new segment, but the
// user has not called next() for that KV, then reset the readSegmentIndex
// to the previous segment
if (nextKVOffset == 0) {
assert (readSegmentIndex != 0);
assert (currentKVOffset != 0);
readSegmentIndex --;
}
// just drop segments before the current active segment
int i = 0;
Iterator<Segment<K,V>> itr = segmentList.iterator();
while (itr.hasNext()) {
Segment<K,V> s = itr.next();
if (i == readSegmentIndex) {
break;
}
s.close();
itr.remove();
i++;
LOG.debug("Dropping a segment");
}
// FirstSegmentOffset is the offset in the current segment from where we
// need to start reading on the next reset
firstSegmentOffset = currentKVOffset;
readSegmentIndex = 0;
LOG.debug("Setting the FirsSegmentOffset to " + currentKVOffset);
}
public void reset() throws IOException {
// Create a new segment for the previously written records only if we
// are not already in the reset mode
if (!inReset) {
if (fileCache.isActive) {
fileCache.createInDiskSegment();
} else {
memCache.createInMemorySegment();
}
}
inReset = true;
// Reset the segments to the correct position from where the next read
// should begin.
for (int i = 0; i < segmentList.size(); i++) {
Segment<K,V> s = segmentList.get(i);
if (s.inMemory()) {
int offset = (i == 0) ? firstSegmentOffset : 0;
s.getReader().reset(offset);
} else {
s.closeReader();
if (i == 0) {
s.reinitReader(firstSegmentOffset);
s.getReader().disableChecksumValidation();
}
}
}
currentKVOffset = firstSegmentOffset;
nextKVOffset = -1;
readSegmentIndex = 0;
hasMore = false;
lastSegmentEOF = false;
LOG.debug("Reset - First segment offset is " + firstSegmentOffset +
" Segment List Size is " + segmentList.size());
}
public boolean hasNext() throws IOException {
if (lastSegmentEOF) {
return false;
}
// We read the next KV from the cache to decide if there is any left.
// Since hasNext can be called several times before the actual call to
// next(), we use hasMore to avoid extra reads. hasMore is set to false
// when the user actually consumes this record in next()
if (hasMore) {
return true;
}
Segment<K,V> seg = segmentList.get(readSegmentIndex);
// Mark the current position. This would be set to currentKVOffset
// when the user consumes this record in next().
nextKVOffset = (int) seg.getActualPosition();
if (seg.nextRawKey()) {
currentKey = seg.getKey();
seg.getValue(currentValue);
hasMore = true;
return true;
} else {
if (!seg.inMemory()) {
seg.closeReader();
}
}
// If this is the last segment, mark the lastSegmentEOF flag and return
if (readSegmentIndex == segmentList.size() - 1) {
nextKVOffset = -1;
lastSegmentEOF = true;
return false;
}
nextKVOffset = 0;
readSegmentIndex ++;
Segment<K,V> nextSegment = segmentList.get(readSegmentIndex);
// We possibly are moving from a memory segment to a disk segment.
// Reset so that we do not corrupt the in-memory segment buffer.
// See HADOOP-5494
if (!nextSegment.inMemory()) {
currentValue.reset(currentDiskValue.getData(),
currentDiskValue.getLength());
nextSegment.init(null);
}
if (nextSegment.nextRawKey()) {
currentKey = nextSegment.getKey();
nextSegment.getValue(currentValue);
hasMore = true;
return true;
} else {
throw new IOException("New segment did not have even one K/V");
}
}
public void next() throws IOException {
if (!hasNext()) {
throw new NoSuchElementException("iterate past last value");
}
// Reset hasMore. See comment in hasNext()
hasMore = false;
currentKVOffset = nextKVOffset;
nextKVOffset = -1;
}
public DataInputBuffer nextValue() {
return currentValue;
}
public DataInputBuffer nextKey() {
return currentKey;
}
public void reinitialize() throws IOException {
if (segmentList.size() != 0) {
clearSegmentList();
}
memCache.reinitialize(true);
fileCache.reinitialize();
readSegmentIndex = firstSegmentOffset = 0;
currentKVOffset = 0;
nextKVOffset = -1;
hasMore = inReset = clearMarkFlag = false;
}
/**
* This function is called the ValuesIterator when a mark is called
* outside of a reset zone.
*/
public void exitResetMode() throws IOException {
inReset = false;
if (clearMarkFlag ) {
// If a flag was set to clear mark, do the reinit now.
// See clearMark()
reinitialize();
return;
}
if (!fileCache.isActive) {
memCache.reinitialize(false);
}
}
/** For writing the first key and value bytes directly from the
* value iterators, pass the current underlying output stream
* @param length The length of the impending write
*/
public DataOutputStream getOutputStream(int length) throws IOException {
if (memCache.reserveSpace(length)) {
return memCache.dataOut;
} else {
fileCache.activate();
return fileCache.writer.getOutputStream();
}
}
/** This method is called by the valueIterators after writing the first
* key and value bytes to the BackupStore
* @param length
*/
public void updateCounters(int length) {
if (fileCache.isActive) {
fileCache.writer.updateCountersForExternalAppend(length);
} else {
memCache.usedSize += length;
}
}
public void clearMark() throws IOException {
if (inReset) {
// If we are in the reset mode, we just mark a flag and come out
// The actual re initialization would be done when we exit the reset
// mode
clearMarkFlag = true;
} else {
reinitialize();
}
}
private void clearSegmentList() throws IOException {
for (Segment<K,V> segment: segmentList) {
long len = segment.getLength();
segment.close();
if (segment.inMemory()) {
memCache.unreserve(len);
}
}
segmentList.clear();
}
class MemoryCache {
private DataOutputBuffer dataOut;
private int blockSize;
private int usedSize;
private final BackupRamManager ramManager;
// Memory cache is made up of blocks.
private int defaultBlockSize = 1024 * 1024;
public MemoryCache(int maxSize) {
ramManager = new BackupRamManager(maxSize);
if (maxSize < defaultBlockSize) {
defaultBlockSize = maxSize;
}
}
public void unreserve(long len) {
ramManager.unreserve((int)len);
}
/**
* Re-initialize the memory cache.
*
* @param clearAll If true, re-initialize the ramManager also.
*/
void reinitialize(boolean clearAll) {
if (clearAll) {
ramManager.reinitialize();
}
int allocatedSize = createNewMemoryBlock(defaultBlockSize,
defaultBlockSize);
assert(allocatedSize == defaultBlockSize || allocatedSize == 0);
LOG.debug("Created a new mem block of " + allocatedSize);
}
private int createNewMemoryBlock(int requestedSize, int minSize) {
int allocatedSize = ramManager.reserve(requestedSize, minSize);
usedSize = 0;
if (allocatedSize == 0) {
dataOut = null;
blockSize = 0;
} else {
dataOut = new DataOutputBuffer(allocatedSize);
blockSize = allocatedSize;
}
return allocatedSize;
}
/**
* This method determines if there is enough space left in the
* memory cache to write to the requested length + space for
* subsequent EOF makers.
* @param length
* @return true if enough space is available
*/
boolean reserveSpace(int length) throws IOException {
int availableSize = blockSize - usedSize;
if (availableSize >= length + EOF_MARKER_SIZE) {
return true;
}
// Not enough available. Close this block
assert (!inReset);
createInMemorySegment();
// Create a new block
int tmp = Math.max(length + EOF_MARKER_SIZE, defaultBlockSize);
availableSize = createNewMemoryBlock(tmp,
(length + EOF_MARKER_SIZE));
return (availableSize == 0) ? false : true;
}
boolean reserveSpace(DataInputBuffer key, DataInputBuffer value)
throws IOException {
int keyLength = key.getLength() - key.getPosition();
int valueLength = value.getLength() - value.getPosition();
int requestedSize = keyLength + valueLength +
WritableUtils.getVIntSize(keyLength) +
WritableUtils.getVIntSize(valueLength);
return reserveSpace(requestedSize);
}
/**
* Write the key and value to the cache in the IFile format
* @param key
* @param value
* @throws IOException
*/
public void write(DataInputBuffer key, DataInputBuffer value)
throws IOException {
int keyLength = key.getLength() - key.getPosition();
int valueLength = value.getLength() - value.getPosition();
WritableUtils.writeVInt(dataOut, keyLength);
WritableUtils.writeVInt(dataOut, valueLength);
dataOut.write(key.getData(), key.getPosition(), keyLength);
dataOut.write(value.getData(), value.getPosition(), valueLength);
usedSize += keyLength + valueLength +
WritableUtils.getVIntSize(keyLength) +
WritableUtils.getVIntSize(valueLength);
LOG.debug("ID: " + segmentList.size() + " WRITE TO MEM");
}
/**
* This method creates a memory segment from the existing buffer
* @throws IOException
*/
void createInMemorySegment () throws IOException {
// If nothing was written in this block because the record size
// was greater than the allocated block size, just return.
if (usedSize == 0) {
ramManager.unreserve(blockSize);
return;
}
// spaceAvailable would have ensured that there is enough space
// left for the EOF markers.
assert ((blockSize - usedSize) >= EOF_MARKER_SIZE);
WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER);
WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER);
usedSize += EOF_MARKER_SIZE;
ramManager.unreserve(blockSize - usedSize);
Reader<K, V> reader =
new org.apache.hadoop.mapreduce.task.reduce.InMemoryReader<K, V>(null,
(org.apache.hadoop.mapred.TaskAttemptID) tid,
dataOut.getData(), 0, usedSize, conf);
Segment<K, V> segment = new Segment<K, V>(reader, false);
segmentList.add(segment);
LOG.debug("Added Memory Segment to List. List Size is " +
segmentList.size());
}
}
class FileCache {
private LocalDirAllocator lDirAlloc;
private final Configuration conf;
private final FileSystem fs;
private boolean isActive = false;
private Path file = null;
private IFile.Writer<K,V> writer = null;
private int spillNumber = 0;
public FileCache(Configuration conf)
throws IOException {
this.conf = conf;
this.fs = FileSystem.getLocal(conf);
this.lDirAlloc = new LocalDirAllocator(MRConfig.LOCAL_DIR);
}
void write(DataInputBuffer key, DataInputBuffer value)
throws IOException {
if (writer == null) {
// If spillNumber is 0, we should have called activate and not
// come here at all
assert (spillNumber != 0);
writer = createSpillFile();
}
writer.append(key, value);
LOG.debug("ID: " + segmentList.size() + " WRITE TO DISK");
}
void reinitialize() {
spillNumber = 0;
writer = null;
isActive = false;
}
void activate() throws IOException {
isActive = true;
writer = createSpillFile();
}
void createInDiskSegment() throws IOException {
assert (writer != null);
writer.close();
Segment<K,V> s = new Segment<K, V>(conf, fs, file, null, true);
writer = null;
segmentList.add(s);
LOG.debug("Disk Segment added to List. Size is " + segmentList.size());
}
boolean isActive() { return isActive; }
private Writer<K,V> createSpillFile() throws IOException {
Path tmp =
new Path(MRJobConfig.OUTPUT + "/backup_" + tid.getId() + "_"
+ (spillNumber++) + ".out");
LOG.info("Created file: " + tmp);
file = lDirAlloc.getLocalPathForWrite(tmp.toUri().getPath(),
-1, conf);
FSDataOutputStream out = fs.create(file);
out = CryptoUtils.wrapIfNecessary(conf, out);
return new Writer<K, V>(conf, out, null, null, null, null, true);
}
}
static class BackupRamManager implements RamManager {
private int availableSize = 0;
private final int maxSize;
public BackupRamManager(int size) {
availableSize = maxSize = size;
}
public boolean reserve(int requestedSize, InputStream in) {
// Not used
LOG.warn("Reserve(int, InputStream) not supported by BackupRamManager");
return false;
}
int reserve(int requestedSize) {
if (availableSize == 0) {
return 0;
}
int reservedSize = Math.min(requestedSize, availableSize);
availableSize -= reservedSize;
LOG.debug("Reserving: " + reservedSize + " Requested: " + requestedSize);
return reservedSize;
}
int reserve(int requestedSize, int minSize) {
if (availableSize < minSize) {
LOG.debug("No space available. Available: " + availableSize +
" MinSize: " + minSize);
return 0;
} else {
return reserve(requestedSize);
}
}
public void unreserve(int requestedSize) {
availableSize += requestedSize;
LOG.debug("Unreserving: " + requestedSize +
". Available: " + availableSize);
}
void reinitialize() {
availableSize = maxSize;
}
}
}
| dennishuo/hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java | Java | apache-2.0 | 18,966 |
package io.cattle.platform.networking.host.dao.impl;
import static io.cattle.platform.core.model.tables.HostTable.*;
import static io.cattle.platform.core.model.tables.HostVnetMapTable.*;
import static io.cattle.platform.core.model.tables.SubnetVnetMapTable.*;
import static io.cattle.platform.core.model.tables.VnetTable.*;
import java.util.List;
import io.cattle.platform.core.model.Host;
import io.cattle.platform.core.model.HostVnetMap;
import io.cattle.platform.core.model.Network;
import io.cattle.platform.core.model.Subnet;
import io.cattle.platform.core.model.SubnetVnetMap;
import io.cattle.platform.core.model.Vnet;
import io.cattle.platform.core.model.tables.records.VnetRecord;
import io.cattle.platform.db.jooq.dao.impl.AbstractJooqDao;
import io.cattle.platform.networking.host.contants.HostOnlyConstants;
import io.cattle.platform.networking.host.dao.HostOnlyDao;
import io.cattle.platform.object.ObjectManager;
import javax.inject.Inject;
import org.jooq.Record;
public class HostOnlyDaoImpl extends AbstractJooqDao implements HostOnlyDao {
ObjectManager objectManager;
@Override
public Vnet getVnetForHost(Network network, Host host) {
Long physicalHostId = host.getPhysicalHostId();
Record record = null;
if ( physicalHostId == null ) {
record = create()
.select(VNET.fields())
.from(VNET)
.join(HOST_VNET_MAP)
.on(HOST_VNET_MAP.VNET_ID.eq(VNET.ID))
.where(VNET.NETWORK_ID.eq(network.getId())
.and(HOST_VNET_MAP.HOST_ID.eq(host.getId()))
.and(HOST_VNET_MAP.REMOVED.isNull()))
.fetchAny();
} else {
record = create()
.select(VNET.fields())
.from(VNET)
.join(HOST_VNET_MAP)
.on(HOST_VNET_MAP.VNET_ID.eq(VNET.ID))
.join(HOST)
.on(HOST_VNET_MAP.HOST_ID.eq(HOST.ID))
.where(VNET.NETWORK_ID.eq(network.getId())
.and(HOST.PHYSICAL_HOST_ID.eq(physicalHostId))
.and(HOST_VNET_MAP.REMOVED.isNull()))
.fetchAny();
}
return record == null ? null : record.into(VnetRecord.class);
}
@Override
public Vnet createVnetForHost(Network network, Host host, Subnet subnet, String uri) {
if ( uri == null ) {
uri = HostOnlyConstants.DEFAULT_HOST_SUBNET_URI;
}
Vnet vnet = objectManager.create(Vnet.class,
VNET.URI, uri,
VNET.ACCOUNT_ID, network.getAccountId(),
VNET.NETWORK_ID, network.getId());
objectManager.create(HostVnetMap.class,
HOST_VNET_MAP.VNET_ID, vnet.getId(),
HOST_VNET_MAP.HOST_ID, host.getId());
if ( subnet != null ) {
objectManager.create(SubnetVnetMap.class,
SUBNET_VNET_MAP.VNET_ID, vnet.getId(),
SUBNET_VNET_MAP.SUBNET_ID, subnet.getId());
}
return vnet;
}
@Override
public HostVnetMap mapVnetToHost(Vnet vnet, Host host) {
List<HostVnetMap> maps = objectManager.find(HostVnetMap.class,
HOST_VNET_MAP.VNET_ID, vnet.getId(),
HOST_VNET_MAP.HOST_ID, host.getId());
if ( maps.size() > 0 ) {
return maps.get(0);
}
return objectManager.create(HostVnetMap.class,
HOST_VNET_MAP.VNET_ID, vnet.getId(),
HOST_VNET_MAP.HOST_ID, host.getId());
}
public ObjectManager getObjectManager() {
return objectManager;
}
@Inject
public void setObjectManager(ObjectManager objectManager) {
this.objectManager = objectManager;
}
}
| stresler/cattle | code/implementation/host-only-network/src/main/java/io/cattle/platform/networking/host/dao/impl/HostOnlyDaoImpl.java | Java | apache-2.0 | 3,937 |
/*
* Copyright (C) 2015 Bilibili
* Copyright (C) 2015 Zhang Rui <bbcallen@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tv.danmaku.ijk.media.player.misc;
import android.annotation.TargetApi;
import android.os.Build;
import android.text.TextUtils;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import tv.danmaku.ijk.media.player.IjkMediaMeta;
public class IjkMediaFormat implements IMediaFormat {
// Common
public static final String KEY_IJK_CODEC_LONG_NAME_UI = "ijk-codec-long-name-ui";
public static final String KEY_IJK_CODEC_NAME_UI = "ijk-codec-name-ui";
public static final String KEY_IJK_BIT_RATE_UI = "ijk-bit-rate-ui";
// Video
public static final String KEY_IJK_CODEC_PROFILE_LEVEL_UI = "ijk-profile-level-ui";
public static final String KEY_IJK_CODEC_PIXEL_FORMAT_UI = "ijk-pixel-format-ui";
public static final String KEY_IJK_RESOLUTION_UI = "ijk-resolution-ui";
public static final String KEY_IJK_FRAME_RATE_UI = "ijk-frame-rate-ui";
// Audio
public static final String KEY_IJK_SAMPLE_RATE_UI = "ijk-sample-rate-ui";
public static final String KEY_IJK_CHANNEL_UI = "ijk-channel-ui";
// Codec
public static final String CODEC_NAME_H264 = "h264";
public final IjkMediaMeta.IjkStreamMeta mMediaFormat;
public IjkMediaFormat(IjkMediaMeta.IjkStreamMeta streamMeta) {
mMediaFormat = streamMeta;
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public int getInteger(String name) {
if (mMediaFormat == null)
return 0;
return mMediaFormat.getInt(name);
}
@Override
public String getString(String name) {
if (mMediaFormat == null)
return null;
if (sFormatterMap.containsKey(name)) {
Formatter formatter = sFormatterMap.get(name);
return formatter.format(this);
}
return mMediaFormat.getString(name);
}
//-------------------------
// Formatter
//-------------------------
private static abstract class Formatter {
public String format(IjkMediaFormat mediaFormat) {
String value = doFormat(mediaFormat);
if (TextUtils.isEmpty(value))
return getDefaultString();
return value;
}
protected abstract String doFormat(IjkMediaFormat mediaFormat);
@SuppressWarnings("SameReturnValue")
protected String getDefaultString() {
return "N/A";
}
}
private static final Map<String, Formatter> sFormatterMap = new HashMap<String, Formatter>();
{
sFormatterMap.put(KEY_IJK_CODEC_LONG_NAME_UI, new Formatter() {
@Override
public String doFormat(IjkMediaFormat mediaFormat) {
return mMediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_LONG_NAME);
}
});
sFormatterMap.put(KEY_IJK_CODEC_NAME_UI, new Formatter() {
@Override
public String doFormat(IjkMediaFormat mediaFormat) {
return mMediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_NAME);
}
});
sFormatterMap.put(KEY_IJK_BIT_RATE_UI, new Formatter() {
@Override
protected String doFormat(IjkMediaFormat mediaFormat) {
int bitRate = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_BITRATE);
if (bitRate <= 0) {
return null;
} else if (bitRate < 1000) {
return String.format(Locale.US, "%d bit/s", bitRate);
} else {
return String.format(Locale.US, "%d kb/s", bitRate / 1000);
}
}
});
sFormatterMap.put(KEY_IJK_CODEC_PROFILE_LEVEL_UI, new Formatter() {
@Override
protected String doFormat(IjkMediaFormat mediaFormat) {
int profileIndex = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_CODEC_PROFILE_ID);
String profile;
switch (profileIndex) {
case IjkMediaMeta.FF_PROFILE_H264_BASELINE:
profile = "Baseline";
break;
case IjkMediaMeta.FF_PROFILE_H264_CONSTRAINED_BASELINE:
profile = "Constrained Baseline";
break;
case IjkMediaMeta.FF_PROFILE_H264_MAIN:
profile = "Main";
break;
case IjkMediaMeta.FF_PROFILE_H264_EXTENDED:
profile = "Extended";
break;
case IjkMediaMeta.FF_PROFILE_H264_HIGH:
profile = "High";
break;
case IjkMediaMeta.FF_PROFILE_H264_HIGH_10:
profile = "High 10";
break;
case IjkMediaMeta.FF_PROFILE_H264_HIGH_10_INTRA:
profile = "High 10 Intra";
break;
case IjkMediaMeta.FF_PROFILE_H264_HIGH_422:
profile = "High 4:2:2";
break;
case IjkMediaMeta.FF_PROFILE_H264_HIGH_422_INTRA:
profile = "High 4:2:2 Intra";
break;
case IjkMediaMeta.FF_PROFILE_H264_HIGH_444:
profile = "High 4:4:4";
break;
case IjkMediaMeta.FF_PROFILE_H264_HIGH_444_PREDICTIVE:
profile = "High 4:4:4 Predictive";
break;
case IjkMediaMeta.FF_PROFILE_H264_HIGH_444_INTRA:
profile = "High 4:4:4 Intra";
break;
case IjkMediaMeta.FF_PROFILE_H264_CAVLC_444:
profile = "CAVLC 4:4:4";
break;
default:
return null;
}
StringBuilder sb = new StringBuilder();
sb.append(profile);
String codecName = mediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_NAME);
if (!TextUtils.isEmpty(codecName) && codecName.equalsIgnoreCase(CODEC_NAME_H264)) {
int level = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_CODEC_LEVEL);
if (level < 10)
return sb.toString();
sb.append(" Profile Level ");
sb.append((level / 10) % 10);
if ((level % 10) != 0) {
sb.append(".");
sb.append(level % 10);
}
}
return sb.toString();
}
});
sFormatterMap.put(KEY_IJK_CODEC_PIXEL_FORMAT_UI, new Formatter() {
@Override
protected String doFormat(IjkMediaFormat mediaFormat) {
return mediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_PIXEL_FORMAT);
}
});
sFormatterMap.put(KEY_IJK_RESOLUTION_UI, new Formatter() {
@Override
protected String doFormat(IjkMediaFormat mediaFormat) {
int width = mediaFormat.getInteger(KEY_WIDTH);
int height = mediaFormat.getInteger(KEY_HEIGHT);
int sarNum = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_SAR_NUM);
int sarDen = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_SAR_DEN);
if (width <= 0 || height <= 0) {
return null;
} else if (sarNum <= 0 || sarDen <= 0) {
return String.format(Locale.US, "%d x %d", width, height);
} else {
return String.format(Locale.US, "%d x %d [SAR %d:%d]", width,
height, sarNum, sarDen);
}
}
});
sFormatterMap.put(KEY_IJK_FRAME_RATE_UI, new Formatter() {
@Override
protected String doFormat(IjkMediaFormat mediaFormat) {
int fpsNum = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_FPS_NUM);
int fpsDen = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_FPS_DEN);
if (fpsNum <= 0 || fpsDen <= 0) {
return null;
} else {
return String.valueOf(((float) (fpsNum)) / fpsDen);
}
}
});
sFormatterMap.put(KEY_IJK_SAMPLE_RATE_UI, new Formatter() {
@Override
protected String doFormat(IjkMediaFormat mediaFormat) {
int sampleRate = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_SAMPLE_RATE);
if (sampleRate <= 0) {
return null;
} else {
return String.format(Locale.US, "%d Hz", sampleRate);
}
}
});
sFormatterMap.put(KEY_IJK_CHANNEL_UI, new Formatter() {
@Override
protected String doFormat(IjkMediaFormat mediaFormat) {
int channelLayout = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_CHANNEL_LAYOUT);
if (channelLayout <= 0) {
return null;
} else {
if (channelLayout == IjkMediaMeta.AV_CH_LAYOUT_MONO) {
return "mono";
} else if (channelLayout == IjkMediaMeta.AV_CH_LAYOUT_STEREO) {
return "stereo";
} else {
return String.format(Locale.US, "%x", channelLayout);
}
}
}
});
}
}
| Kerr1Gan/ShareBox | ijkplayer-java/src/main/java/tv/danmaku/ijk/media/player/misc/IjkMediaFormat.java | Java | apache-2.0 | 10,341 |
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client;
import junit.framework.TestCase;
import org.junit.Assert;
import com.vaadin.client.componentlocator.LocatorUtil;
/*
* Test LocatorUtil.isUIElement() & isNotificaitonElement methods
*/
public class LocatorUtilTest extends TestCase {
public void testIsUI1() {
boolean isUI = LocatorUtil.isUIElement("com.vaadin.ui.UI");
Assert.assertTrue(isUI);
}
public void testIsUI2() {
boolean isUI = LocatorUtil.isUIElement("/com.vaadin.ui.UI");
Assert.assertTrue(isUI);
}
public void testIsUI3() {
boolean isUI = LocatorUtil
.isUIElement("//com.vaadin.ui.UI[RandomString");
Assert.assertTrue(isUI);
}
public void testIsUI4() {
boolean isUI = LocatorUtil.isUIElement("//com.vaadin.ui.UI[0]");
Assert.assertTrue(isUI);
}
public void testIsNotification1() {
boolean isUI = LocatorUtil
.isNotificationElement("com.vaadin.ui.VNotification");
Assert.assertTrue(isUI);
}
public void testIsNotification2() {
boolean isUI = LocatorUtil
.isNotificationElement("com.vaadin.ui.Notification");
Assert.assertTrue(isUI);
}
public void testIsNotification3() {
boolean isUI = LocatorUtil
.isNotificationElement("/com.vaadin.ui.VNotification[");
Assert.assertTrue(isUI);
}
public void testIsNotification4() {
boolean isUI = LocatorUtil
.isNotificationElement("//com.vaadin.ui.VNotification[0]");
Assert.assertTrue(isUI);
}
}
| udayinfy/vaadin | client/tests/src/com/vaadin/client/LocatorUtilTest.java | Java | apache-2.0 | 2,206 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.apache.lucene.search.spell.DirectSpellChecker;
import org.apache.lucene.search.spell.StringDistance;
import org.apache.lucene.search.spell.SuggestMode;
import org.apache.lucene.util.automaton.LevenshteinAutomata;
public class DirectSpellcheckerSettings {
// NB: If this changes, make sure to change the default in TermBuilderSuggester
public static SuggestMode DEFAULT_SUGGEST_MODE = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
public static float DEFAULT_ACCURACY = 0.5f;
public static SortBy DEFAULT_SORT = SortBy.SCORE;
// NB: If this changes, make sure to change the default in TermBuilderSuggester
public static StringDistance DEFAULT_STRING_DISTANCE = DirectSpellChecker.INTERNAL_LEVENSHTEIN;
public static int DEFAULT_MAX_EDITS = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE;
public static int DEFAULT_MAX_INSPECTIONS = 5;
public static float DEFAULT_MAX_TERM_FREQ = 0.01f;
public static int DEFAULT_PREFIX_LENGTH = 1;
public static int DEFAULT_MIN_WORD_LENGTH = 4;
public static float DEFAULT_MIN_DOC_FREQ = 0f;
private SuggestMode suggestMode = DEFAULT_SUGGEST_MODE;
private float accuracy = DEFAULT_ACCURACY;
private SortBy sort = DEFAULT_SORT;
private StringDistance stringDistance = DEFAULT_STRING_DISTANCE;
private int maxEdits = DEFAULT_MAX_EDITS;
private int maxInspections = DEFAULT_MAX_INSPECTIONS;
private float maxTermFreq = DEFAULT_MAX_TERM_FREQ;
private int prefixLength = DEFAULT_PREFIX_LENGTH;
private int minWordLength = DEFAULT_MIN_WORD_LENGTH;
private float minDocFreq = DEFAULT_MIN_DOC_FREQ;
public SuggestMode suggestMode() {
return suggestMode;
}
public void suggestMode(SuggestMode suggestMode) {
this.suggestMode = suggestMode;
}
public float accuracy() {
return accuracy;
}
public void accuracy(float accuracy) {
this.accuracy = accuracy;
}
public SortBy sort() {
return sort;
}
public void sort(SortBy sort) {
this.sort = sort;
}
public StringDistance stringDistance() {
return stringDistance;
}
public void stringDistance(StringDistance distance) {
this.stringDistance = distance;
}
public int maxEdits() {
return maxEdits;
}
public void maxEdits(int maxEdits) {
this.maxEdits = maxEdits;
}
public int maxInspections() {
return maxInspections;
}
public void maxInspections(int maxInspections) {
this.maxInspections = maxInspections;
}
public float maxTermFreq() {
return maxTermFreq;
}
public void maxTermFreq(float maxTermFreq) {
this.maxTermFreq = maxTermFreq;
}
public int prefixLength() {
return prefixLength;
}
public void prefixLength(int prefixLength) {
this.prefixLength = prefixLength;
}
public int minWordLength() {
return minWordLength;
}
public void minWordLength(int minWordLength) {
this.minWordLength = minWordLength;
}
public float minDocFreq() {
return minDocFreq;
}
public void minDocFreq(float minDocFreq) {
this.minDocFreq = minDocFreq;
}
@Override
public String toString() {
return "[" +
"suggestMode=" + suggestMode +
",sort=" + sort +
",stringDistance=" + stringDistance +
",accuracy=" + accuracy +
",maxEdits=" + maxEdits +
",maxInspections=" + maxInspections +
",maxTermFreq=" + maxTermFreq +
",prefixLength=" + prefixLength +
",minWordLength=" + minWordLength +
",minDocFreq=" + minDocFreq +
"]";
}
}
| camilojd/elasticsearch | core/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java | Java | apache-2.0 | 4,660 |
//========================================================================
//
//File: $RCSfile: AddToLayerAction.java,v $
//Version: $Revision: 1.4 $
//Modified: $Date: 2013/01/10 23:05:58 $
//
//Copyright (c) 2005-2014 Mentor Graphics Corporation. All rights reserved.
//
//========================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//========================================================================
package org.xtuml.bp.ui.graphics.actions;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.gef.GraphicalEditPart;
import org.eclipse.gef.GraphicalViewer;
import org.eclipse.jface.action.Action;
import org.xtuml.bp.core.CorePlugin;
import org.xtuml.bp.core.common.ClassQueryInterface_c;
import org.xtuml.bp.core.common.Transaction;
import org.xtuml.bp.core.common.TransactionManager;
import org.xtuml.bp.ui.canvas.Connector_c;
import org.xtuml.bp.ui.canvas.GraphicalElement_c;
import org.xtuml.bp.ui.canvas.Graphicalelementinlayer_c;
import org.xtuml.bp.ui.canvas.Layer_c;
import org.xtuml.bp.ui.canvas.Model_c;
import org.xtuml.bp.ui.canvas.Ooaofgraphics;
import org.xtuml.bp.ui.canvas.Shape_c;
import org.xtuml.bp.ui.graphics.editor.GraphicalEditor;
import org.xtuml.bp.ui.graphics.parts.ConnectorEditPart;
import org.xtuml.bp.ui.graphics.parts.ShapeEditPart;
public class AddToLayerAction extends Action {
private String layerName;
private Model_c model;
public AddToLayerAction(String layerName, Model_c model) {
this.layerName = layerName;
this.model = model;
}
@Override
public void run() {
Layer_c layer = Layer_c.getOneGD_LAYOnR34(model,
new ClassQueryInterface_c() {
@Override
public boolean evaluate(Object candidate) {
return ((Layer_c) candidate).getLayer_name().equals(
layerName);
}
});
if (layer != null) {
Transaction transaction = null;
TransactionManager manager = TransactionManager.getSingleton();
try {
transaction = manager.startTransaction(
"Add element(s) to layer", Ooaofgraphics
.getDefaultInstance());
List<GraphicalEditPart> selection = new ArrayList<GraphicalEditPart>();
GraphicalViewer viewer = GraphicalEditor.getEditor(model)
.getGraphicalViewer();
for (Object selected : viewer.getSelectedEditParts()) {
selection.add((GraphicalEditPart) selected);
}
for (GraphicalEditPart part : selection) {
if (part instanceof ShapeEditPart
|| part instanceof ConnectorEditPart) {
GraphicalElement_c elem = null;
Object partModel = part.getModel();
if (partModel instanceof Connector_c) {
elem = GraphicalElement_c
.getOneGD_GEOnR2((Connector_c) partModel);
} else {
elem = GraphicalElement_c
.getOneGD_GEOnR2((Shape_c) partModel);
}
if (elem != null) {
// if this element already exists in the layer
// skip, the tool allows this when at least one
// selected element is not part of the layer
Layer_c[] participatingLayers = Layer_c
.getManyGD_LAYsOnR35(Graphicalelementinlayer_c
.getManyGD_GLAYsOnR35(elem));
for(int i = 0; i < participatingLayers.length; i++) {
if(participatingLayers[i] == layer) {
continue;
}
}
if (part instanceof ShapeEditPart) {
ShapeEditPart shapePart = (ShapeEditPart) part;
participatingLayers = shapePart
.getInheritedLayers();
for (int i = 0; i < participatingLayers.length; i++) {
if (participatingLayers[i] == layer) {
continue;
}
}
}
if (part instanceof ConnectorEditPart) {
ConnectorEditPart conPart = (ConnectorEditPart) part;
participatingLayers = conPart
.getInheritedLayers();
for (int i = 0; i < participatingLayers.length; i++) {
if (participatingLayers[i] == layer) {
continue;
}
}
}
layer.Addelementtolayer(elem.getElementid());
}
if(!layer.getVisible()) {
// see if the part also belongs to any
// visible layers, otherwise de-select
Layer_c[] existingLayers = Layer_c
.getManyGD_LAYsOnR35(Graphicalelementinlayer_c
.getManyGD_GLAYsOnR35(elem));
boolean participatesInVisibleLayer = false;
for(int i = 0; i < existingLayers.length; i++) {
if(existingLayers[i].getVisible()) {
participatesInVisibleLayer = true;
break;
}
}
if(!participatesInVisibleLayer) {
viewer.deselect(part);
}
}
}
}
manager.endTransaction(transaction);
} catch (Exception e) {
if (transaction != null) {
manager.cancelTransaction(transaction, e);
}
CorePlugin.logError("Unable to add element to layer.", e);
}
}
}
}
| lwriemen/bridgepoint | src/org.xtuml.bp.ui.graphics/src/org/xtuml/bp/ui/graphics/actions/AddToLayerAction.java | Java | apache-2.0 | 5,561 |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
/**
* <p>
* Contains the output of DescribeSpotInstanceRequests.
* </p>
*/
public class DescribeSpotInstanceRequestsResult implements Serializable, Cloneable {
/**
* One or more Spot Instance requests.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequests;
/**
* One or more Spot Instance requests.
*
* @return One or more Spot Instance requests.
*/
public java.util.List<SpotInstanceRequest> getSpotInstanceRequests() {
if (spotInstanceRequests == null) {
spotInstanceRequests = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>();
spotInstanceRequests.setAutoConstruct(true);
}
return spotInstanceRequests;
}
/**
* One or more Spot Instance requests.
*
* @param spotInstanceRequests One or more Spot Instance requests.
*/
public void setSpotInstanceRequests(java.util.Collection<SpotInstanceRequest> spotInstanceRequests) {
if (spotInstanceRequests == null) {
this.spotInstanceRequests = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequestsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(spotInstanceRequests.size());
spotInstanceRequestsCopy.addAll(spotInstanceRequests);
this.spotInstanceRequests = spotInstanceRequestsCopy;
}
/**
* One or more Spot Instance requests.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setSpotInstanceRequests(java.util.Collection)} or
* {@link #withSpotInstanceRequests(java.util.Collection)} if you want to
* override the existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param spotInstanceRequests One or more Spot Instance requests.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeSpotInstanceRequestsResult withSpotInstanceRequests(SpotInstanceRequest... spotInstanceRequests) {
if (getSpotInstanceRequests() == null) setSpotInstanceRequests(new java.util.ArrayList<SpotInstanceRequest>(spotInstanceRequests.length));
for (SpotInstanceRequest value : spotInstanceRequests) {
getSpotInstanceRequests().add(value);
}
return this;
}
/**
* One or more Spot Instance requests.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param spotInstanceRequests One or more Spot Instance requests.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeSpotInstanceRequestsResult withSpotInstanceRequests(java.util.Collection<SpotInstanceRequest> spotInstanceRequests) {
if (spotInstanceRequests == null) {
this.spotInstanceRequests = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequestsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(spotInstanceRequests.size());
spotInstanceRequestsCopy.addAll(spotInstanceRequests);
this.spotInstanceRequests = spotInstanceRequestsCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSpotInstanceRequests() != null) sb.append("SpotInstanceRequests: " + getSpotInstanceRequests() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSpotInstanceRequests() == null) ? 0 : getSpotInstanceRequests().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof DescribeSpotInstanceRequestsResult == false) return false;
DescribeSpotInstanceRequestsResult other = (DescribeSpotInstanceRequestsResult)obj;
if (other.getSpotInstanceRequests() == null ^ this.getSpotInstanceRequests() == null) return false;
if (other.getSpotInstanceRequests() != null && other.getSpotInstanceRequests().equals(this.getSpotInstanceRequests()) == false) return false;
return true;
}
@Override
public DescribeSpotInstanceRequestsResult clone() {
try {
return (DescribeSpotInstanceRequestsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| sheofir/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/DescribeSpotInstanceRequestsResult.java | Java | apache-2.0 | 6,035 |
package org.asteriskjava.manager.event;
public class DongleCENDEvent extends ManagerEvent
{
private static final long serialVersionUID = 3257845467831284784L;
private String device;
private String endstatus;
private String cccause;
private String duration;
private String callidx;
public DongleCENDEvent(Object source)
{
super(source);
}
public String getDevice() {
return this.device;
}
public void setDevice(String device) {
this.device = device;
}
public String getCallidx() {
return callidx;
}
public void setCallidx(String callidx) {
this.callidx = callidx;
}
public String getCccause() {
return cccause;
}
public void setCccause(String cccause) {
this.cccause = cccause;
}
public String getDuration() {
return duration;
}
public void setDuration(String duration) {
this.duration = duration;
}
public String getEndstatus() {
return endstatus;
}
public void setEndstatus(String endstatus) {
this.endstatus = endstatus;
}
} | seanbright/asterisk-java | src/main/java/org/asteriskjava/manager/event/DongleCENDEvent.java | Java | apache-2.0 | 1,118 |
package org.apache.lucene.util.junitcompat;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.Properties;
import org.junit.*;
import org.junit.rules.TestRule;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesInvariantRule;
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
/**
* @see SystemPropertiesRestoreRule
* @see SystemPropertiesInvariantRule
*/
public class TestSystemPropertiesInvariantRule extends WithNestedTests {
public static final String PROP_KEY1 = "new-property-1";
public static final String VALUE1 = "new-value-1";
public TestSystemPropertiesInvariantRule() {
super(true);
}
public static class Base extends WithNestedTests.AbstractNestedTest {
public void testEmpty() {}
}
public static class InBeforeClass extends Base {
@BeforeClass
public static void beforeClass() {
System.setProperty(PROP_KEY1, VALUE1);
}
}
public static class InAfterClass extends Base {
@AfterClass
public static void afterClass() {
System.setProperty(PROP_KEY1, VALUE1);
}
}
public static class InTestMethod extends Base {
public void testMethod1() {
if (System.getProperty(PROP_KEY1) != null) {
throw new RuntimeException("Shouldn't be here.");
}
System.setProperty(PROP_KEY1, VALUE1);
}
public void testMethod2() {
testMethod1();
}
}
public static class NonStringProperties extends Base {
public void testMethod1() {
if (System.getProperties().get(PROP_KEY1) != null) {
throw new RuntimeException("Will pass.");
}
Properties properties = System.getProperties();
properties.put(PROP_KEY1, new Object());
Assert.assertTrue(System.getProperties().get(PROP_KEY1) != null);
}
public void testMethod2() {
testMethod1();
}
@AfterClass
public static void cleanup() {
System.getProperties().remove(PROP_KEY1);
}
}
public static class IgnoredProperty {
@Rule
public TestRule invariant = new SystemPropertiesInvariantRule(PROP_KEY1);
@Test
public void testMethod1() {
System.setProperty(PROP_KEY1, VALUE1);
}
}
@Before
@After
public void cleanup() {
System.clearProperty(PROP_KEY1);
}
@Test
public void testRuleInvariantBeforeClass() {
Result runClasses = JUnitCore.runClasses(InBeforeClass.class);
Assert.assertEquals(1, runClasses.getFailureCount());
Assert.assertTrue(runClasses.getFailures().get(0).getMessage()
.contains(PROP_KEY1));
Assert.assertNull(System.getProperty(PROP_KEY1));
}
@Test
public void testRuleInvariantAfterClass() {
Result runClasses = JUnitCore.runClasses(InAfterClass.class);
Assert.assertEquals(1, runClasses.getFailureCount());
Assert.assertTrue(runClasses.getFailures().get(0).getMessage()
.contains(PROP_KEY1));
Assert.assertNull(System.getProperty(PROP_KEY1));
}
@Test
public void testRuleInvariantInTestMethod() {
Result runClasses = JUnitCore.runClasses(InTestMethod.class);
Assert.assertEquals(2, runClasses.getFailureCount());
for (Failure f : runClasses.getFailures()) {
Assert.assertTrue(f.getMessage().contains(PROP_KEY1));
}
Assert.assertNull(System.getProperty(PROP_KEY1));
}
@Test
public void testNonStringProperties() {
Result runClasses = JUnitCore.runClasses(NonStringProperties.class);
Assert.assertEquals(1, runClasses.getFailureCount());
Assert.assertTrue(runClasses.getFailures().get(0).getMessage().contains("Will pass"));
Assert.assertEquals(3, runClasses.getRunCount());
}
@Test
public void testIgnoredProperty() {
System.clearProperty(PROP_KEY1);
try {
Result runClasses = JUnitCore.runClasses(IgnoredProperty.class);
Assert.assertEquals(0, runClasses.getFailureCount());
Assert.assertEquals(VALUE1, System.getProperty(PROP_KEY1));
} finally {
System.clearProperty(PROP_KEY1);
}
}
}
| smartan/lucene | src/test/java/org/apache/lucene/util/junitcompat/TestSystemPropertiesInvariantRule.java | Java | apache-2.0 | 4,895 |
package com.puppycrawl.tools.checkstyle.checks.whitespace.emptylineseparator;
public class InputEmptyLineSeparatorPrePreviousLineEmptiness {
}
| AkshitaKukreja30/checkstyle | src/test/resources/com/puppycrawl/tools/checkstyle/checks/whitespace/emptylineseparator/InputEmptyLineSeparatorPrePreviousLineEmptiness.java | Java | lgpl-2.1 | 146 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.upgrades;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.rest.ESRestTestCase;
public abstract class AbstractRollingTestCase extends ESRestTestCase {
protected enum ClusterType {
OLD,
MIXED,
UPGRADED;
public static ClusterType parse(String value) {
switch (value) {
case "old_cluster":
return OLD;
case "mixed_cluster":
return MIXED;
case "upgraded_cluster":
return UPGRADED;
default:
throw new AssertionError("unknown cluster type: " + value);
}
}
}
protected static final ClusterType CLUSTER_TYPE = ClusterType.parse(System.getProperty("tests.rest.suite"));
@Override
protected final boolean preserveIndicesUponCompletion() {
return true;
}
@Override
protected final boolean preserveReposUponCompletion() {
return true;
}
@Override
protected final Settings restClientSettings() {
return Settings.builder().put(super.restClientSettings())
// increase the timeout here to 90 seconds to handle long waits for a green
// cluster health. the waits for green need to be longer than a minute to
// account for delayed shards
.put(ESRestTestCase.CLIENT_RETRY_TIMEOUT, "90s")
.put(ESRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s")
.build();
}
}
| gfyoung/elasticsearch | qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractRollingTestCase.java | Java | apache-2.0 | 2,348 |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.gateway.engine.es.beans;
/**
* Used to store a primitive value into the shared state ES document.
*
* @author eric.wittmann@redhat.com
*/
public class PrimitiveBean {
private String value;
private String type;
/**
* Constructor.
*/
public PrimitiveBean() {
}
/**
* @return the value
*/
public String getValue() {
return value;
}
/**
* @param value the value to set
*/
public void setValue(String value) {
this.value = value;
}
/**
* @return the type
*/
public String getType() {
return type;
}
/**
* @param type the type to set
*/
public void setType(String type) {
this.type = type;
}
}
| jasonchaffee/apiman | gateway/engine/es/src/main/java/io/apiman/gateway/engine/es/beans/PrimitiveBean.java | Java | apache-2.0 | 1,370 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.operators;
import org.apache.flink.api.common.functions.MapPartitionFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.LocalCollectionOutputFormat;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.test.util.JavaProgramTestBase;
import org.apache.flink.test.util.TestBaseUtils;
import org.apache.flink.util.Collector;
import java.util.ArrayList;
import java.util.List;
/** Integration tests for {@link MapPartitionFunction}. */
@SuppressWarnings("serial")
public class MapPartitionITCase extends JavaProgramTestBase {
private static final String IN =
"1 1\n2 2\n2 8\n4 4\n4 4\n6 6\n7 7\n8 8\n"
+ "1 1\n2 2\n2 2\n4 4\n4 4\n6 3\n5 9\n8 8\n1 1\n2 2\n2 2\n3 0\n4 4\n"
+ "5 9\n7 7\n8 8\n1 1\n9 1\n5 9\n4 4\n4 4\n6 6\n7 7\n8 8\n";
private static final String RESULT =
"1 11\n2 12\n4 14\n4 14\n1 11\n2 12\n2 12\n4 14\n4 14\n3 16\n1 11\n2 12\n2 12\n0 13\n4 14\n1 11\n4 14\n4 14\n";
private List<Tuple2<String, String>> input = new ArrayList<>();
private List<Tuple2<String, Integer>> expected = new ArrayList<>();
private List<Tuple2<String, Integer>> result = new ArrayList<>();
@Override
protected void preSubmit() throws Exception {
// create input
for (String s : IN.split("\n")) {
String[] fields = s.split(" ");
input.add(new Tuple2<String, String>(fields[0], fields[1]));
}
// create expected
for (String s : RESULT.split("\n")) {
String[] fields = s.split(" ");
expected.add(new Tuple2<String, Integer>(fields[0], Integer.parseInt(fields[1])));
}
}
@Override
protected void postSubmit() {
compareResultCollections(
expected, result, new TestBaseUtils.TupleComparator<Tuple2<String, Integer>>());
}
@Override
protected void testProgram() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple2<String, String>> data = env.fromCollection(input);
data.mapPartition(new TestMapPartition())
.output(new LocalCollectionOutputFormat<Tuple2<String, Integer>>(result));
env.execute();
}
private static class TestMapPartition
implements MapPartitionFunction<Tuple2<String, String>, Tuple2<String, Integer>> {
@Override
public void mapPartition(
Iterable<Tuple2<String, String>> values, Collector<Tuple2<String, Integer>> out) {
for (Tuple2<String, String> value : values) {
String keyString = value.f0;
String valueString = value.f1;
int keyInt = Integer.parseInt(keyString);
int valueInt = Integer.parseInt(valueString);
if (keyInt + valueInt < 10) {
out.collect(new Tuple2<String, Integer>(valueString, keyInt + 10));
}
}
}
}
}
| kl0u/flink | flink-tests/src/test/java/org/apache/flink/test/operators/MapPartitionITCase.java | Java | apache-2.0 | 3,931 |
/*
* Copyright (C) 2010-2101 Alibaba Group Holding Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.otter.node.etl.common.task;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.I0Itec.zkclient.exception.ZkInterruptedException;
import org.apache.commons.lang.ClassUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang.math.RandomUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.otter.node.common.config.ConfigClientService;
import com.alibaba.otter.node.etl.common.jmx.StageAggregationCollector;
import com.alibaba.otter.node.etl.common.pipe.impl.RowDataPipeDelegate;
import com.alibaba.otter.shared.arbitrate.ArbitrateEventService;
import com.alibaba.otter.shared.arbitrate.model.TerminEventData;
import com.alibaba.otter.shared.arbitrate.model.TerminEventData.TerminType;
import com.alibaba.otter.shared.common.model.config.pipeline.Pipeline;
/**
* mainstem,select,extract,transform,load parent Thread.
*
* @author xiaoqing.zhouxq 2011-8-23 上午10:38:14
*/
public abstract class GlobalTask extends Thread {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
protected volatile boolean running = true;
protected Pipeline pipeline;
protected Long pipelineId;
protected ArbitrateEventService arbitrateEventService;
protected RowDataPipeDelegate rowDataPipeDelegate;
protected ExecutorService executorService;
protected ConfigClientService configClientService;
protected StageAggregationCollector stageAggregationCollector;
protected Map<Long, Future> pendingFuture;
public GlobalTask(Pipeline pipeline){
this(pipeline.getId());
this.pipeline = pipeline;
}
public GlobalTask(Long pipelineId){
this.pipelineId = pipelineId;
setName(createTaskName(pipelineId, ClassUtils.getShortClassName(this.getClass())));
pendingFuture = new HashMap<Long, Future>();
}
public void shutdown() {
running = false;
interrupt();
List<Future> cancelFutures = new ArrayList<Future>();
for (Map.Entry<Long, Future> entry : pendingFuture.entrySet()) {
if (!entry.getValue().isDone()) {
logger.warn("WARN ## Task future processId[{}] canceled!", entry.getKey());
cancelFutures.add(entry.getValue());
}
}
for (Future future : cancelFutures) {
future.cancel(true);
}
pendingFuture.clear();
}
protected void sendRollbackTermin(long pipelineId, Throwable exception) {
sendRollbackTermin(pipelineId, ExceptionUtils.getFullStackTrace(exception));
}
protected void sendRollbackTermin(long pipelineId, String message) {
TerminEventData errorEventData = new TerminEventData();
errorEventData.setPipelineId(pipelineId);
errorEventData.setType(TerminType.ROLLBACK);
errorEventData.setCode("setl");
errorEventData.setDesc(message);
arbitrateEventService.terminEvent().single(errorEventData);
// 每次发送完报警后,sleep一段时间,继续做后面的事
try {
Thread.sleep(3000 + RandomUtils.nextInt(3000));
} catch (InterruptedException e) {
}
}
/**
* 自动处理数据为null的情况,重新发一遍数据
*/
protected void processMissData(long pipelineId, String message) {
TerminEventData errorEventData = new TerminEventData();
errorEventData.setPipelineId(pipelineId);
errorEventData.setType(TerminType.RESTART);
errorEventData.setCode("setl");
errorEventData.setDesc(message);
arbitrateEventService.terminEvent().single(errorEventData);
}
protected String createTaskName(long pipelineId, String taskName) {
return new StringBuilder().append("pipelineId = ").append(pipelineId).append(",taskName = ").append(taskName).toString();
}
protected boolean isProfiling() {
return stageAggregationCollector.isProfiling();
}
protected boolean isInterrupt(Throwable e) {
if (!running) {
return true;
}
if (e instanceof InterruptedException || e instanceof ZkInterruptedException) {
return true;
}
if (ExceptionUtils.getRootCause(e) instanceof InterruptedException) {
return true;
}
return false;
}
public Collection<Long> getPendingProcess() {
List<Long> result = new ArrayList<Long>(pendingFuture.keySet());
Collections.sort(result);
return result;
}
// ====================== setter / getter =========================
public void setArbitrateEventService(ArbitrateEventService arbitrateEventService) {
this.arbitrateEventService = arbitrateEventService;
}
public void setRowDataPipeDelegate(RowDataPipeDelegate rowDataPipeDelegate) {
this.rowDataPipeDelegate = rowDataPipeDelegate;
}
public void setExecutorService(ExecutorService executorService) {
this.executorService = executorService;
}
public void setConfigClientService(ConfigClientService configClientService) {
this.configClientService = configClientService;
}
public void setStageAggregationCollector(StageAggregationCollector stageAggregationCollector) {
this.stageAggregationCollector = stageAggregationCollector;
}
}
| alibaba/otter | node/etl/src/main/java/com/alibaba/otter/node/etl/common/task/GlobalTask.java | Java | apache-2.0 | 6,295 |
package com.thinkaurelius.titan.hadoop.compat.h1;
import com.thinkaurelius.titan.graphdb.configuration.TitanConstants;
import com.thinkaurelius.titan.hadoop.config.job.JobClasspathConfigurer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
import com.thinkaurelius.titan.hadoop.HadoopGraph;
import com.thinkaurelius.titan.hadoop.compat.HadoopCompat;
import com.thinkaurelius.titan.hadoop.compat.HadoopCompiler;
public class Hadoop1Compat implements HadoopCompat {
static final String CFG_SPECULATIVE_MAPS = "mapred.map.tasks.speculative.execution";
static final String CFG_SPECULATIVE_REDUCES = "mapred.reduce.tasks.speculative.execution";
static final String CFG_JOB_JAR = "mapred.jar";
@Override
public HadoopCompiler newCompiler(HadoopGraph g) {
return new Hadoop1Compiler(g);
}
@Override
public TaskAttemptContext newTask(Configuration c, TaskAttemptID t) {
return new TaskAttemptContext(c, t);
}
@Override
public String getSpeculativeMapConfigKey() {
return CFG_SPECULATIVE_MAPS;
}
@Override
public String getSpeculativeReduceConfigKey() {
return CFG_SPECULATIVE_REDUCES;
}
@Override
public String getMapredJarConfigKey() {
return CFG_JOB_JAR;
}
@Override
public void incrementContextCounter(TaskInputOutputContext context, Enum<?> counter, long incr) {
context.getCounter(counter).increment(incr);
}
@Override
public Configuration getContextConfiguration(TaskAttemptContext context) {
return context.getConfiguration();
}
@Override
public long getCounter(MapReduceDriver counters, Enum<?> e) {
return counters.getCounters().findCounter(e).getValue();
}
@Override
public JobClasspathConfigurer newMapredJarConfigurer(String mapredJarPath) {
return new MapredJarConfigurer(mapredJarPath);
}
@Override
public JobClasspathConfigurer newDistCacheConfigurer() {
return new DistCacheConfigurer("titan-hadoop-core-" + TitanConstants.VERSION + ".jar");
}
@Override
public Configuration getJobContextConfiguration(JobContext context) {
return context.getConfiguration();
}
@Override
public Configuration newImmutableConfiguration(Configuration base) {
return new ImmutableConfiguration(base);
}
}
| evanv/titan | titan-hadoop-parent/titan-hadoop-1/src/main/java/com/thinkaurelius/titan/hadoop/compat/h1/Hadoop1Compat.java | Java | apache-2.0 | 2,628 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph;
/** Base class for exceptions occurring in the {@link ExecutionGraph}. */
public class ExecutionGraphException extends Exception {
private static final long serialVersionUID = -8253451032797220657L;
public ExecutionGraphException(String message) {
super(message);
}
public ExecutionGraphException(String message, Throwable cause) {
super(message, cause);
}
public ExecutionGraphException(Throwable cause) {
super(cause);
}
}
| tillrohrmann/flink | flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionGraphException.java | Java | apache-2.0 | 1,338 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.rest.messages.job.metrics;
/** Headers for aggregating job metrics. */
public class AggregatedJobMetricsHeaders
extends AbstractAggregatedMetricsHeaders<AggregatedJobMetricsParameters> {
private static final AggregatedJobMetricsHeaders INSTANCE = new AggregatedJobMetricsHeaders();
private AggregatedJobMetricsHeaders() {}
@Override
public String getTargetRestEndpointURL() {
return "/jobs/metrics";
}
@Override
public AggregatedJobMetricsParameters getUnresolvedMessageParameters() {
return new AggregatedJobMetricsParameters();
}
public static AggregatedJobMetricsHeaders getInstance() {
return INSTANCE;
}
@Override
public String getDescription() {
return "Provides access to aggregated job metrics.";
}
}
| apache/flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/metrics/AggregatedJobMetricsHeaders.java | Java | apache-2.0 | 1,648 |
/*
* Copyright (C) 2010-2101 Alibaba Group Holding Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.otter.node.etl.conflict.exception;
import org.apache.commons.lang.exception.NestableRuntimeException;
/**
* @author jianghang 2012-4-12 下午02:59:12
* @version 4.0.2
*/
public class ConflictException extends NestableRuntimeException {
private static final long serialVersionUID = -7288830284122672209L;
private String errorCode;
private String errorDesc;
public ConflictException(String errorCode){
super(errorCode);
}
public ConflictException(String errorCode, Throwable cause){
super(errorCode, cause);
}
public ConflictException(String errorCode, String errorDesc){
super(errorCode + ":" + errorDesc);
}
public ConflictException(String errorCode, String errorDesc, Throwable cause){
super(errorCode + ":" + errorDesc, cause);
}
public ConflictException(Throwable cause){
super(cause);
}
public String getErrorCode() {
return errorCode;
}
public String getErrorDesc() {
return errorDesc;
}
@Override
public Throwable fillInStackTrace() {
return this;
}
}
| wangcan2014/otter | node/etl/src/main/java/com/alibaba/otter/node/etl/conflict/exception/ConflictException.java | Java | apache-2.0 | 1,785 |
package org.csstudio.swt.xygraph.util;
import org.csstudio.swt.xygraph.figures.XYGraph;
import org.eclipse.draw2d.FigureUtilities;
import org.eclipse.draw2d.SWTGraphics;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.draw2d.geometry.Rectangle;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Cursor;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.graphics.Transform;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.FileDialog;
public class SingleSourceHelperImpl extends SingleSourceHelper {
@Override
protected Cursor createInternalCursor(Display display, ImageData imageData,
int width, int height, int style) {
return new Cursor(display, imageData, width, height);
}
@Override
protected Image createInternalVerticalTextImage(String text, Font font,
RGB color, boolean upToDown) {
final Dimension titleSize = FigureUtilities.getTextExtents(text, font);
final int w = titleSize.height;
final int h = titleSize.width + 1;
Image image = new Image(Display.getCurrent(), w, h);
final GC gc = new GC(image);
final Color titleColor = new Color(Display.getCurrent(), color);
RGB transparentRGB = new RGB(240, 240, 240);
gc.setBackground(XYGraphMediaFactory.getInstance().getColor(
transparentRGB));
gc.fillRectangle(image.getBounds());
gc.setForeground(titleColor);
gc.setFont(font);
final Transform tr = new Transform(Display.getCurrent());
if (!upToDown) {
tr.translate(0, h);
tr.rotate(-90);
gc.setTransform(tr);
} else {
tr.translate(w, 0);
tr.rotate(90);
gc.setTransform(tr);
}
gc.drawText(text, 0, 0);
tr.dispose();
gc.dispose();
final ImageData imageData = image.getImageData();
image.dispose();
titleColor.dispose();
imageData.transparentPixel = imageData.palette.getPixel(transparentRGB);
image = new Image(Display.getCurrent(), imageData);
return image;
}
@Override
protected Image getInternalXYGraphSnapShot(XYGraph xyGraph) {
Rectangle bounds = xyGraph.getBounds();
Image image = new Image(null, bounds.width + 6, bounds.height + 6);
GC gc = new GC(image);
SWTGraphics graphics = new SWTGraphics(gc);
graphics.translate(-bounds.x + 3, -bounds.y + 3);
graphics.setForegroundColor(xyGraph.getForegroundColor());
graphics.setBackgroundColor(xyGraph.getBackgroundColor());
xyGraph.paint(graphics);
gc.dispose();
return image;
}
@Override
protected String getInternalImageSavePath() {
FileDialog dialog = new FileDialog(Display.getDefault().getShells()[0],
SWT.SAVE);
dialog.setFilterNames(new String[] { "PNG Files", "All Files (*.*)" });
dialog.setFilterExtensions(new String[] { "*.png", "*.*" }); // Windows
String path = dialog.open();
return path;
}
}
| jhshin9/scouter | scouter.client/src/org/csstudio/swt/xygraph/util/SingleSourceHelperImpl.java | Java | apache-2.0 | 3,045 |
/*
* Copyright (c) 2003, 2008, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.java2d.opengl;
import java.awt.AlphaComposite;
import java.awt.GraphicsEnvironment;
import java.awt.Rectangle;
import java.awt.Transparency;
import java.awt.image.ColorModel;
import java.awt.image.Raster;
import sun.awt.SunHints;
import sun.awt.image.PixelConverter;
import sun.java2d.pipe.hw.AccelSurface;
import sun.java2d.SunGraphics2D;
import sun.java2d.SurfaceData;
import sun.java2d.SurfaceDataProxy;
import sun.java2d.loops.CompositeType;
import sun.java2d.loops.GraphicsPrimitive;
import sun.java2d.loops.MaskFill;
import sun.java2d.loops.SurfaceType;
import sun.java2d.pipe.ParallelogramPipe;
import sun.java2d.pipe.PixelToParallelogramConverter;
import sun.java2d.pipe.RenderBuffer;
import sun.java2d.pipe.TextPipe;
import static sun.java2d.pipe.BufferedOpCodes.*;
import static sun.java2d.opengl.OGLContext.OGLContextCaps.*;
/**
* This class describes an OpenGL "surface", that is, a region of pixels
* managed via OpenGL. An OGLSurfaceData can be tagged with one of three
* different SurfaceType objects for the purpose of registering loops, etc.
* This diagram shows the hierarchy of OGL SurfaceTypes:
*
* Any
* / \
* OpenGLSurface OpenGLTexture
* |
* OpenGLSurfaceRTT
*
* OpenGLSurface
* This kind of surface can be rendered to using OpenGL APIs. It is also
* possible to copy an OpenGLSurface to another OpenGLSurface (or to itself).
* This is typically accomplished by calling MakeContextCurrent(dstSD, srcSD)
* and then calling glCopyPixels() (although there are other techniques to
* achieve the same goal).
*
* OpenGLTexture
* This kind of surface cannot be rendered to using OpenGL (in the same sense
* as in OpenGLSurface). However, it is possible to upload a region of pixels
* to an OpenGLTexture object via glTexSubImage2D(). One can also copy a
* surface of type OpenGLTexture to an OpenGLSurface by binding the texture
* to a quad and then rendering it to the destination surface (this process
* is known as "texture mapping").
*
* OpenGLSurfaceRTT
* This kind of surface can be thought of as a sort of hybrid between
* OpenGLSurface and OpenGLTexture, in that one can render to this kind of
* surface as if it were of type OpenGLSurface, but the process of copying
* this kind of surface to another is more like an OpenGLTexture. (Note that
* "RTT" stands for "render-to-texture".)
*
* In addition to these SurfaceType variants, we have also defined some
* constants that describe in more detail the type of underlying OpenGL
* surface. This table helps explain the relationships between those
* "type" constants and their corresponding SurfaceType:
*
* OGL Type Corresponding SurfaceType
* -------- -------------------------
* WINDOW OpenGLSurface
* PBUFFER OpenGLSurface
* TEXTURE OpenGLTexture
* FLIP_BACKBUFFER OpenGLSurface
* FBOBJECT OpenGLSurfaceRTT
*/
public abstract class OGLSurfaceData extends SurfaceData
implements AccelSurface {
/**
* OGL-specific surface types
*
* @see sun.java2d.pipe.hw.AccelSurface
*/
public static final int PBUFFER = RT_PLAIN;
public static final int FBOBJECT = RT_TEXTURE;
/**
* Pixel formats
*/
public static final int PF_INT_ARGB = 0;
public static final int PF_INT_ARGB_PRE = 1;
public static final int PF_INT_RGB = 2;
public static final int PF_INT_RGBX = 3;
public static final int PF_INT_BGR = 4;
public static final int PF_INT_BGRX = 5;
public static final int PF_USHORT_565_RGB = 6;
public static final int PF_USHORT_555_RGB = 7;
public static final int PF_USHORT_555_RGBX = 8;
public static final int PF_BYTE_GRAY = 9;
public static final int PF_USHORT_GRAY = 10;
public static final int PF_3BYTE_BGR = 11;
/**
* SurfaceTypes
*/
private static final String DESC_OPENGL_SURFACE = "OpenGL Surface";
private static final String DESC_OPENGL_SURFACE_RTT =
"OpenGL Surface (render-to-texture)";
private static final String DESC_OPENGL_TEXTURE = "OpenGL Texture";
static final SurfaceType OpenGLSurface =
SurfaceType.Any.deriveSubType(DESC_OPENGL_SURFACE,
PixelConverter.ArgbPre.instance);
static final SurfaceType OpenGLSurfaceRTT =
OpenGLSurface.deriveSubType(DESC_OPENGL_SURFACE_RTT);
static final SurfaceType OpenGLTexture =
SurfaceType.Any.deriveSubType(DESC_OPENGL_TEXTURE);
/** This will be true if the fbobject system property has been enabled. */
private static boolean isFBObjectEnabled;
/** This will be true if the lcdshader system property has been enabled.*/
private static boolean isLCDShaderEnabled;
/** This will be true if the biopshader system property has been enabled.*/
private static boolean isBIOpShaderEnabled;
/** This will be true if the gradshader system property has been enabled.*/
private static boolean isGradShaderEnabled;
private OGLGraphicsConfig graphicsConfig;
protected int type;
// these fields are set from the native code when the surface is
// initialized
private int nativeWidth, nativeHeight;
protected static OGLRenderer oglRenderPipe;
protected static PixelToParallelogramConverter oglTxRenderPipe;
protected static ParallelogramPipe oglAAPgramPipe;
protected static OGLTextRenderer oglTextPipe;
protected static OGLDrawImage oglImagePipe;
protected native boolean initTexture(long pData,
boolean isOpaque, boolean texNonPow2,
boolean texRect,
int width, int height);
protected native boolean initFBObject(long pData,
boolean isOpaque, boolean texNonPow2,
boolean texRect,
int width, int height);
protected native boolean initFlipBackbuffer(long pData);
protected abstract boolean initPbuffer(long pData, long pConfigInfo,
boolean isOpaque,
int width, int height);
private native int getTextureTarget(long pData);
private native int getTextureID(long pData);
static {
if (!GraphicsEnvironment.isHeadless()) {
// fbobject currently enabled by default; use "false" to disable
String fbo = (String)java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction(
"sun.java2d.opengl.fbobject"));
isFBObjectEnabled = !"false".equals(fbo);
// lcdshader currently enabled by default; use "false" to disable
String lcd = (String)java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction(
"sun.java2d.opengl.lcdshader"));
isLCDShaderEnabled = !"false".equals(lcd);
// biopshader currently enabled by default; use "false" to disable
String biop = (String)java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction(
"sun.java2d.opengl.biopshader"));
isBIOpShaderEnabled = !"false".equals(biop);
// gradshader currently enabled by default; use "false" to disable
String grad = (String)java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction(
"sun.java2d.opengl.gradshader"));
isGradShaderEnabled = !"false".equals(grad);
OGLRenderQueue rq = OGLRenderQueue.getInstance();
oglImagePipe = new OGLDrawImage();
oglTextPipe = new OGLTextRenderer(rq);
oglRenderPipe = new OGLRenderer(rq);
if (GraphicsPrimitive.tracingEnabled()) {
oglTextPipe = oglTextPipe.traceWrap();
//The wrapped oglRenderPipe will wrap the AA pipe as well...
//oglAAPgramPipe = oglRenderPipe.traceWrap();
}
oglAAPgramPipe = oglRenderPipe.getAAParallelogramPipe();
oglTxRenderPipe =
new PixelToParallelogramConverter(oglRenderPipe,
oglRenderPipe,
1.0, 0.25, true);
OGLBlitLoops.register();
OGLMaskFill.register();
OGLMaskBlit.register();
}
}
protected OGLSurfaceData(OGLGraphicsConfig gc,
ColorModel cm, int type)
{
super(getCustomSurfaceType(type), cm);
this.graphicsConfig = gc;
this.type = type;
setBlitProxyKey(gc.getProxyKey());
}
@Override
public SurfaceDataProxy makeProxyFor(SurfaceData srcData) {
return OGLSurfaceDataProxy.createProxy(srcData, graphicsConfig);
}
/**
* Returns the appropriate SurfaceType corresponding to the given OpenGL
* surface type constant (e.g. TEXTURE -> OpenGLTexture).
*/
private static SurfaceType getCustomSurfaceType(int oglType) {
switch (oglType) {
case TEXTURE:
return OpenGLTexture;
case FBOBJECT:
return OpenGLSurfaceRTT;
case PBUFFER:
default:
return OpenGLSurface;
}
}
/**
* Note: This should only be called from the QFT under the AWT lock.
* This method is kept separate from the initSurface() method below just
* to keep the code a bit cleaner.
*/
private void initSurfaceNow(int width, int height) {
boolean isOpaque = (getTransparency() == Transparency.OPAQUE);
boolean success = false;
switch (type) {
case PBUFFER:
success = initPbuffer(getNativeOps(),
graphicsConfig.getNativeConfigInfo(),
isOpaque,
width, height);
break;
case TEXTURE:
success = initTexture(getNativeOps(),
isOpaque, isTexNonPow2Available(),
isTexRectAvailable(),
width, height);
break;
case FBOBJECT:
success = initFBObject(getNativeOps(),
isOpaque, isTexNonPow2Available(),
isTexRectAvailable(),
width, height);
break;
case FLIP_BACKBUFFER:
success = initFlipBackbuffer(getNativeOps());
break;
default:
break;
}
if (!success) {
throw new OutOfMemoryError("can't create offscreen surface");
}
}
/**
* Initializes the appropriate OpenGL offscreen surface based on the value
* of the type parameter. If the surface creation fails for any reason,
* an OutOfMemoryError will be thrown.
*/
protected void initSurface(final int width, final int height) {
OGLRenderQueue rq = OGLRenderQueue.getInstance();
rq.lock();
try {
switch (type) {
case TEXTURE:
case PBUFFER:
case FBOBJECT:
// need to make sure the context is current before
// creating the texture (or pbuffer, or fbobject)
OGLContext.setScratchSurface(graphicsConfig);
break;
default:
break;
}
rq.flushAndInvokeNow(new Runnable() {
public void run() {
initSurfaceNow(width, height);
}
});
} finally {
rq.unlock();
}
}
/**
* Returns the OGLContext for the GraphicsConfig associated with this
* surface.
*/
public final OGLContext getContext() {
return graphicsConfig.getContext();
}
/**
* Returns the OGLGraphicsConfig associated with this surface.
*/
final OGLGraphicsConfig getOGLGraphicsConfig() {
return graphicsConfig;
}
/**
* Returns one of the surface type constants defined above.
*/
public final int getType() {
return type;
}
/**
* If this surface is backed by a texture object, returns the target
* for that texture (either GL_TEXTURE_2D or GL_TEXTURE_RECTANGLE_ARB).
* Otherwise, this method will return zero.
*/
public final int getTextureTarget() {
return getTextureTarget(getNativeOps());
}
/**
* If this surface is backed by a texture object, returns the texture ID
* for that texture.
* Otherwise, this method will return zero.
*/
public final int getTextureID() {
return getTextureID(getNativeOps());
}
/**
* Returns native resource of specified {@code resType} associated with
* this surface.
*
* Specifically, for {@code OGLSurfaceData} this method returns the
* the following:
* <pre>
* TEXTURE - texture id
* </pre>
*
* Note: the resource returned by this method is only valid on the rendering
* thread.
*
* @return native resource of specified type or 0L if
* such resource doesn't exist or can not be retrieved.
* @see sun.java2d.pipe.hw.AccelSurface#getNativeResource
*/
public long getNativeResource(int resType) {
if (resType == TEXTURE) {
return getTextureID();
}
return 0L;
}
public Raster getRaster(int x, int y, int w, int h) {
throw new InternalError("not implemented yet");
}
/**
* For now, we can only render LCD text if:
* - the fragment shader extension is available, and
* - blending is disabled, and
* - the source color is opaque
* - and the destination is opaque
*
* Eventually, we could enhance the native OGL text rendering code
* and remove the above restrictions, but that would require significantly
* more code just to support a few uncommon cases.
*/
public boolean canRenderLCDText(SunGraphics2D sg2d) {
return
graphicsConfig.isCapPresent(CAPS_EXT_LCD_SHADER) &&
sg2d.compositeState <= SunGraphics2D.COMP_ISCOPY &&
sg2d.paintState <= SunGraphics2D.PAINT_OPAQUECOLOR &&
sg2d.surfaceData.getTransparency() == Transparency.OPAQUE;
}
public void validatePipe(SunGraphics2D sg2d) {
TextPipe textpipe;
boolean validated = false;
// OGLTextRenderer handles both AA and non-AA text, but
// only works with the following modes:
// (Note: For LCD text we only enter this code path if
// canRenderLCDText() has already validated that the mode is
// CompositeType.SrcNoEa (opaque color), which will be subsumed
// by the CompositeType.SrcNoEa (any color) test below.)
if (/* CompositeType.SrcNoEa (any color) */
(sg2d.compositeState <= sg2d.COMP_ISCOPY &&
sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) ||
/* CompositeType.SrcOver (any color) */
(sg2d.compositeState == sg2d.COMP_ALPHA &&
sg2d.paintState <= sg2d.PAINT_ALPHACOLOR &&
(((AlphaComposite)sg2d.composite).getRule() ==
AlphaComposite.SRC_OVER)) ||
/* CompositeType.Xor (any color) */
(sg2d.compositeState == sg2d.COMP_XOR &&
sg2d.paintState <= sg2d.PAINT_ALPHACOLOR))
{
textpipe = oglTextPipe;
} else {
// do this to initialize textpipe correctly; we will attempt
// to override the non-text pipes below
super.validatePipe(sg2d);
textpipe = sg2d.textpipe;
validated = true;
}
PixelToParallelogramConverter txPipe = null;
OGLRenderer nonTxPipe = null;
if (sg2d.antialiasHint != SunHints.INTVAL_ANTIALIAS_ON) {
if (sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) {
if (sg2d.compositeState <= sg2d.COMP_XOR) {
txPipe = oglTxRenderPipe;
nonTxPipe = oglRenderPipe;
}
} else if (sg2d.compositeState <= sg2d.COMP_ALPHA) {
if (OGLPaints.isValid(sg2d)) {
txPipe = oglTxRenderPipe;
nonTxPipe = oglRenderPipe;
}
// custom paints handled by super.validatePipe() below
}
} else {
if (sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) {
if (graphicsConfig.isCapPresent(CAPS_PS30) &&
(sg2d.imageComp == CompositeType.SrcOverNoEa ||
sg2d.imageComp == CompositeType.SrcOver))
{
if (!validated) {
super.validatePipe(sg2d);
validated = true;
}
PixelToParallelogramConverter aaConverter =
new PixelToParallelogramConverter(sg2d.shapepipe,
oglAAPgramPipe,
1.0/8.0, 0.499,
false);
sg2d.drawpipe = aaConverter;
sg2d.fillpipe = aaConverter;
sg2d.shapepipe = aaConverter;
} else if (sg2d.compositeState == sg2d.COMP_XOR) {
// install the solid pipes when AA and XOR are both enabled
txPipe = oglTxRenderPipe;
nonTxPipe = oglRenderPipe;
}
}
// other cases handled by super.validatePipe() below
}
if (txPipe != null) {
if (sg2d.transformState >= sg2d.TRANSFORM_TRANSLATESCALE) {
sg2d.drawpipe = txPipe;
sg2d.fillpipe = txPipe;
} else if (sg2d.strokeState != sg2d.STROKE_THIN) {
sg2d.drawpipe = txPipe;
sg2d.fillpipe = nonTxPipe;
} else {
sg2d.drawpipe = nonTxPipe;
sg2d.fillpipe = nonTxPipe;
}
// Note that we use the transforming pipe here because it
// will examine the shape and possibly perform an optimized
// operation if it can be simplified. The simplifications
// will be valid for all STROKE and TRANSFORM types.
sg2d.shapepipe = txPipe;
} else {
if (!validated) {
super.validatePipe(sg2d);
}
}
// install the text pipe based on our earlier decision
sg2d.textpipe = textpipe;
// always override the image pipe with the specialized OGL pipe
sg2d.imagepipe = oglImagePipe;
}
@Override
protected MaskFill getMaskFill(SunGraphics2D sg2d) {
if (sg2d.paintState > sg2d.PAINT_ALPHACOLOR) {
/*
* We can only accelerate non-Color MaskFill operations if
* all of the following conditions hold true:
* - there is an implementation for the given paintState
* - the current Paint can be accelerated for this destination
* - multitexturing is available (since we need to modulate
* the alpha mask texture with the paint texture)
*
* In all other cases, we return null, in which case the
* validation code will choose a more general software-based loop.
*/
if (!OGLPaints.isValid(sg2d) ||
!graphicsConfig.isCapPresent(CAPS_MULTITEXTURE))
{
return null;
}
}
return super.getMaskFill(sg2d);
}
public boolean copyArea(SunGraphics2D sg2d,
int x, int y, int w, int h, int dx, int dy)
{
if (sg2d.transformState < sg2d.TRANSFORM_TRANSLATESCALE &&
sg2d.compositeState < sg2d.COMP_XOR)
{
x += sg2d.transX;
y += sg2d.transY;
oglRenderPipe.copyArea(sg2d, x, y, w, h, dx, dy);
return true;
}
return false;
}
public void flush() {
invalidate();
OGLRenderQueue rq = OGLRenderQueue.getInstance();
rq.lock();
try {
// make sure we have a current context before
// disposing the native resources (e.g. texture object)
OGLContext.setScratchSurface(graphicsConfig);
RenderBuffer buf = rq.getBuffer();
rq.ensureCapacityAndAlignment(12, 4);
buf.putInt(FLUSH_SURFACE);
buf.putLong(getNativeOps());
// this call is expected to complete synchronously, so flush now
rq.flushNow();
} finally {
rq.unlock();
}
}
/**
* Disposes the native resources associated with the given OGLSurfaceData
* (referenced by the pData parameter). This method is invoked from
* the native Dispose() method from the Disposer thread when the
* Java-level OGLSurfaceData object is about to go away. Note that we
* also pass a reference to the native GLX/WGLGraphicsConfigInfo
* (pConfigInfo) for the purposes of making a context current.
*/
static void dispose(long pData, long pConfigInfo) {
OGLRenderQueue rq = OGLRenderQueue.getInstance();
rq.lock();
try {
// make sure we have a current context before
// disposing the native resources (e.g. texture object)
OGLContext.setScratchSurface(pConfigInfo);
RenderBuffer buf = rq.getBuffer();
rq.ensureCapacityAndAlignment(12, 4);
buf.putInt(DISPOSE_SURFACE);
buf.putLong(pData);
// this call is expected to complete synchronously, so flush now
rq.flushNow();
} finally {
rq.unlock();
}
}
static void swapBuffers(long window) {
OGLRenderQueue rq = OGLRenderQueue.getInstance();
rq.lock();
try {
RenderBuffer buf = rq.getBuffer();
rq.ensureCapacityAndAlignment(12, 4);
buf.putInt(SWAP_BUFFERS);
buf.putLong(window);
rq.flushNow();
} finally {
rq.unlock();
}
}
/**
* Returns true if OpenGL textures can have non-power-of-two dimensions
* when using the basic GL_TEXTURE_2D target.
*/
boolean isTexNonPow2Available() {
return graphicsConfig.isCapPresent(CAPS_TEXNONPOW2);
}
/**
* Returns true if OpenGL textures can have non-power-of-two dimensions
* when using the GL_TEXTURE_RECTANGLE_ARB target (only available when the
* GL_ARB_texture_rectangle extension is present).
*/
boolean isTexRectAvailable() {
return graphicsConfig.isCapPresent(CAPS_EXT_TEXRECT);
}
public Rectangle getNativeBounds() {
OGLRenderQueue rq = OGLRenderQueue.getInstance();
rq.lock();
try {
return new Rectangle(nativeWidth, nativeHeight);
} finally {
rq.unlock();
}
}
}
| rokn/Count_Words_2015 | testing/openjdk/jdk/src/share/classes/sun/java2d/opengl/OGLSurfaceData.java | Java | mit | 24,976 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.script.FieldScript;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
public final class ScriptFieldsFetchSubPhase implements FetchSubPhase {
@Override
public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
if (context.hasScriptFields() == false) {
return;
}
hits = hits.clone(); // don't modify the incoming hits
Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId));
int lastReaderId = -1;
FieldScript[] leafScripts = null;
List<ScriptFieldsContext.ScriptField> scriptFields = context.scriptFields().fields();
final IndexReader reader = context.searcher().getIndexReader();
for (SearchHit hit : hits) {
int readerId = ReaderUtil.subIndex(hit.docId(), reader.leaves());
LeafReaderContext leafReaderContext = reader.leaves().get(readerId);
if (readerId != lastReaderId) {
leafScripts = createLeafScripts(leafReaderContext, scriptFields);
lastReaderId = readerId;
}
int docId = hit.docId() - leafReaderContext.docBase;
for (int i = 0; i < leafScripts.length; i++) {
leafScripts[i].setDocument(docId);
final Object value;
try {
value = leafScripts[i].execute();
CollectionUtils.ensureNoSelfReferences(value, "ScriptFieldsFetchSubPhase leaf script " + i);
} catch (RuntimeException e) {
if (scriptFields.get(i).ignoreException()) {
continue;
}
throw e;
}
if (hit.fieldsOrNull() == null) {
hit.fields(new HashMap<>(2));
}
String scriptFieldName = scriptFields.get(i).name();
DocumentField hitField = hit.getFields().get(scriptFieldName);
if (hitField == null) {
final List<Object> values;
if (value instanceof Collection) {
values = new ArrayList<>((Collection<?>) value);
} else {
values = Collections.singletonList(value);
}
hitField = new DocumentField(scriptFieldName, values);
hit.getFields().put(scriptFieldName, hitField);
}
}
}
}
private FieldScript[] createLeafScripts(LeafReaderContext context,
List<ScriptFieldsContext.ScriptField> scriptFields) {
FieldScript[] scripts = new FieldScript[scriptFields.size()];
for (int i = 0; i < scripts.length; i++) {
try {
scripts[i] = scriptFields.get(i).script().newInstance(context);
} catch (IOException e1) {
throw new IllegalStateException("Failed to load script " + scriptFields.get(i).name(), e1);
}
}
return scripts;
}
}
| coding0011/elasticsearch | server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java | Java | apache-2.0 | 4,508 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.resolve.ast.builder.strategy;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.impl.light.LightMethodBuilder;
import com.intellij.psi.impl.light.LightPsiClassBuilder;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil;
import org.jetbrains.plugins.groovy.lang.resolve.ast.builder.BuilderAnnotationContributor;
import org.jetbrains.plugins.groovy.lang.resolve.ast.builder.BuilderHelperLightPsiClass;
import org.jetbrains.plugins.groovy.transformations.TransformationContext;
import java.util.Objects;
import static org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil.createType;
public class DefaultBuilderStrategySupport extends BuilderAnnotationContributor {
public static final String DEFAULT_STRATEGY_NAME = "DefaultStrategy";
@Override
public void applyTransformation(@NotNull TransformationContext context) {
new DefaultBuilderStrategyHandler(context).doProcess();
}
private static class DefaultBuilderStrategyHandler {
private final @NotNull TransformationContext myContext;
private final @NotNull GrTypeDefinition myContainingClass;
private DefaultBuilderStrategyHandler(@NotNull TransformationContext context) {
myContext = context;
myContainingClass = context.getCodeClass();
}
public void doProcess() {
processTypeDefinition();
processMethods();
}
private void processTypeDefinition() {
final PsiAnnotation builderAnno = PsiImplUtil.getAnnotation(myContainingClass, BUILDER_FQN);
if (!isApplicable(builderAnno, DEFAULT_STRATEGY_NAME)) return;
boolean includeSuper = isIncludeSuperProperties(builderAnno);
final PsiClass builderClass = createBuilderClass(builderAnno, getFields(myContext, includeSuper));
myContext.addMethod(createBuilderMethod(builderClass, builderAnno));
myContext.addInnerClass(builderClass);
}
@NotNull
private LightPsiClassBuilder createBuilderClass(@NotNull final PsiAnnotation annotation, @NotNull PsiVariable[] setters) {
return createBuilderClass(annotation, setters, null);
}
@NotNull
private LightPsiClassBuilder createBuilderClass(@NotNull final PsiAnnotation annotation,
@NotNull PsiVariable[] setters,
@Nullable PsiType builtType) {
final LightPsiClassBuilder builderClass = new BuilderHelperLightPsiClass(
myContainingClass, getBuilderClassName(annotation, myContainingClass)
);
for (PsiVariable field : setters) {
LightMethodBuilder setter = createFieldSetter(builderClass, field, annotation);
builderClass.addMethod(setter);
}
final LightMethodBuilder buildMethod = createBuildMethod(
annotation, builtType == null ? createType(myContainingClass) : builtType
);
return builderClass.addMethod(buildMethod);
}
@NotNull
private LightMethodBuilder createBuilderMethod(@NotNull PsiClass builderClass, @NotNull PsiAnnotation annotation) {
final LightMethodBuilder builderMethod = new LightMethodBuilder(myContext.getManager(), getBuilderMethodName(annotation));
builderMethod.addModifier(PsiModifier.STATIC);
builderMethod.setOriginInfo(ORIGIN_INFO);
builderMethod.setNavigationElement(annotation);
builderMethod.setMethodReturnType(createType(builderClass));
return builderMethod;
}
private void processMethods() {
for (GrMethod method : myContext.getCodeClass().getCodeMethods()) {
processMethod(method);
}
}
private void processMethod(@NotNull GrMethod method) {
final PsiAnnotation annotation = PsiImplUtil.getAnnotation(method, BUILDER_FQN);
if (!isApplicable(annotation, DEFAULT_STRATEGY_NAME)) return;
if (method.isConstructor()) {
processConstructor(method, annotation);
}
else if (method.hasModifierProperty(PsiModifier.STATIC)) {
processFactoryMethod(method, annotation);
}
}
private void processConstructor(@NotNull GrMethod method, PsiAnnotation annotation) {
PsiClass builderClass = createBuilderClass(annotation, method.getParameters());
myContext.addMethod(createBuilderMethod(builderClass, annotation));
myContext.addInnerClass(builderClass);
}
private void processFactoryMethod(@NotNull GrMethod method, PsiAnnotation annotation) {
PsiClass builderClass = createBuilderClass(annotation, method.getParameters(), method.getReturnType());
myContext.addMethod(createBuilderMethod(builderClass, annotation));
myContext.addInnerClass(builderClass);
}
@NotNull
private static String getBuilderMethodName(@NotNull PsiAnnotation annotation) {
final String builderMethodName = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "builderMethodName");
return StringUtil.isEmpty(builderMethodName) ? "builder" : builderMethodName;
}
}
@NotNull
public static String getBuilderClassName(@NotNull PsiAnnotation annotation, @NotNull GrTypeDefinition clazz) {
final String builderClassName = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "builderClassName");
return builderClassName == null ? String.format("%s%s", clazz.getName(), "Builder") : builderClassName;
}
@NotNull
public static LightMethodBuilder createBuildMethod(@NotNull PsiAnnotation annotation, @NotNull PsiType builtType) {
final LightMethodBuilder buildMethod = new LightMethodBuilder(annotation.getManager(), getBuildMethodName(annotation));
buildMethod.setOriginInfo(ORIGIN_INFO);
buildMethod.setMethodReturnType(builtType);
return buildMethod;
}
@NotNull
public static LightMethodBuilder createFieldSetter(@NotNull PsiClass builderClass,
@NotNull PsiVariable field,
@NotNull PsiAnnotation annotation) {
String name = Objects.requireNonNull(field.getName());
return createFieldSetter(builderClass, name, field.getType(), annotation, field);
}
@NotNull
public static LightMethodBuilder createFieldSetter(@NotNull PsiClass builderClass,
@NotNull String name,
@NotNull PsiType type,
@NotNull PsiAnnotation annotation,
@NotNull PsiElement navigationElement) {
final LightMethodBuilder fieldSetter = new LightMethodBuilder(builderClass.getManager(), getFieldMethodName(annotation, name));
fieldSetter.addModifier(PsiModifier.PUBLIC);
fieldSetter.addParameter(name, type);
fieldSetter.setContainingClass(builderClass);
fieldSetter.setMethodReturnType(JavaPsiFacade.getElementFactory(builderClass.getProject()).createType(builderClass));
fieldSetter.setNavigationElement(navigationElement);
fieldSetter.setOriginInfo(ORIGIN_INFO);
return fieldSetter;
}
@NotNull
public static String getFieldMethodName(@NotNull PsiAnnotation annotation, @NotNull String fieldName) {
final String prefix = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "prefix");
return StringUtil.isEmpty(prefix) ? fieldName : String.format("%s%s", prefix, StringUtil.capitalize(fieldName));
}
@NotNull
private static String getBuildMethodName(@NotNull PsiAnnotation annotation) {
final String buildMethodName = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "buildMethodName");
return StringUtil.isEmpty(buildMethodName) ? "build" : buildMethodName;
}
}
| goodwinnk/intellij-community | plugins/groovy/groovy-psi/src/org/jetbrains/plugins/groovy/lang/resolve/ast/builder/strategy/DefaultBuilderStrategySupport.java | Java | apache-2.0 | 8,689 |
/*
* Copyright 2004,2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.registry.ws.client.test.security;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.ResourceImpl;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
public class ContinuousOperations extends SecurityTestSetup {
public ContinuousOperations(String text) {
super(text);
}
public void testContinousDelete() throws Exception {
int iterations = 100;
for (int i = 0; i < iterations; i++) {
Resource res1 = registry.newResource();
byte[] r1content = RegistryUtils.encodeString("R2 content");
res1.setContent(r1content);
String path = "/con-delete/test/" + i + 1;
registry.put(path, res1);
Resource resource1 = registry.get(path);
assertEquals("File content is not matching", RegistryUtils.decodeBytes((byte[]) resource1.getContent()),
RegistryUtils.decodeBytes((byte[]) res1.getContent()));
registry.delete(path);
boolean value = false;
if (registry.resourceExists(path)) {
value = true;
}
assertFalse("Resoruce not found at the path", value);
res1.discard();
resource1.discard();
Thread.sleep(100);
}
}
public void testContinuousUpdate() throws Exception {
int iterations = 100;
for (int i = 0; i < iterations; i++) {
Resource res1 = registry.newResource();
byte[] r1content = RegistryUtils.encodeString("R2 content");
res1.setContent(r1content);
String path = "/con-delete/test-update/" + i + 1;
registry.put(path, res1);
Resource resource1 = registry.get(path);
assertEquals("File content is not matching", RegistryUtils.decodeBytes((byte[]) resource1.getContent()),
RegistryUtils.decodeBytes((byte[]) res1.getContent()));
Resource resource = new ResourceImpl();
byte[] r1content1 = RegistryUtils.encodeString("R2 content updated");
resource.setContent(r1content1);
resource.setProperty("abc", "abc");
registry.put(path, resource);
Resource resource2 = registry.get(path);
assertEquals("File content is not matching", RegistryUtils.decodeBytes((byte[]) resource.getContent()),
RegistryUtils.decodeBytes((byte[]) resource2.getContent()));
resource.discard();
res1.discard();
resource1.discard();
resource2.discard();
Thread.sleep(100);
}
}
}
| thusithathilina/carbon-registry | components/registry/org.wso2.carbon.registry.ws.client/src/main/ws-test/org/wso2/carbon/registry/ws/client/test/security/ContinuousOperations.java | Java | apache-2.0 | 3,380 |
/*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.autoconfigure.jooq;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
/**
* {@link ImportAutoConfiguration Auto-configuration imports} for typical jOOQ tests. Most
* tests should consider using {@link JooqTest @JooqTest} rather than using this
* annotation directly.
*
* @author Michael Simons
* @since 2.0.0
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
@ImportAutoConfiguration
public @interface AutoConfigureJooq {
}
| Buzzardo/spring-boot | spring-boot-project/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/jooq/AutoConfigureJooq.java | Java | apache-2.0 | 1,395 |
/*
*******************************************************************************
* Copyright (C) 2002-2012, International Business Machines Corporation and *
* others. All Rights Reserved. *
*******************************************************************************
*/
package com.ibm.icu.dev.util;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import com.ibm.icu.text.UnicodeSet;
import com.ibm.icu.text.UnicodeSetIterator;
public abstract class Visitor {
public void doAt(Object item) {
if (item instanceof Collection) {
doAt((Collection) item);
} else if (item instanceof Map) {
doAt((Map) item);
} else if (item instanceof Object[]) {
doAt((Object[]) item);
} else if (item instanceof UnicodeSet) {
doAt((UnicodeSet) item);
} else {
doSimpleAt(item);
}
}
public int count(Object item) {
if (item instanceof Collection) {
return ((Collection) item).size();
} else if (item instanceof Map) {
return ((Map) item).size();
} else if (item instanceof Object[]) {
return ((Object[]) item).length;
} else if (item instanceof UnicodeSet) {
return ((UnicodeSet) item).size();
} else {
return 1;
}
}
// the default implementation boxing
public void doAt(int o) {
doSimpleAt(new Integer(o));
}
public void doAt(double o) {
doSimpleAt(new Double(o));
}
public void doAt(char o) {
doSimpleAt(new Character(o));
}
// for subclassing
protected void doAt (Collection c) {
if (c.size() == 0) doBefore(c, null);
Iterator it = c.iterator();
boolean first = true;
Object last = null;
while (it.hasNext()) {
Object item = it.next();
if (first) {
doBefore(c, item);
first = false;
} else {
doBetween(c, last, item);
}
doAt(last=item);
}
doAfter(c, last);
}
protected void doAt (Map c) {
doAt(c.entrySet());
}
protected void doAt (UnicodeSet c) {
if (c.size() == 0) doBefore(c, null);
UnicodeSetIterator it = new UnicodeSetIterator(c);
boolean first = true;
Object last = null;
Object item;
CodePointRange cpr0 = new CodePointRange();
CodePointRange cpr1 = new CodePointRange();
CodePointRange cpr;
while(it.nextRange()) {
if (it.codepoint == UnicodeSetIterator.IS_STRING) {
item = it.string;
} else {
cpr = last == cpr0 ? cpr1 : cpr0; // make sure we don't override last
cpr.codepoint = it.codepoint;
cpr.codepointEnd = it.codepointEnd;
item = cpr;
}
if (!first) {
doBefore(c, item);
first = true;
} else {
doBetween(c, last, item);
}
doAt(last = item);
}
doAfter(c, last);
}
protected void doAt (Object[] c) {
doBefore(c, c.length == 0 ? null : c[0]);
Object last = null;
for (int i = 0; i < c.length; ++i) {
if (i != 0) doBetween(c, last, c[i]);
doAt(last = c[i]);
}
doAfter(c, last);
}
public static class CodePointRange{
public int codepoint, codepointEnd;
}
// ===== MUST BE OVERRIDEN =====
abstract protected void doBefore(Object container, Object item);
abstract protected void doBetween(Object container, Object lastItem, Object nextItem);
abstract protected void doAfter(Object container, Object item);
abstract protected void doSimpleAt(Object o);
}
| nightauer/quickdic-dictionary.dictionary | jars/icu4j-52_1/main/tests/framework/src/com/ibm/icu/dev/util/Visitor.java | Java | apache-2.0 | 4,008 |
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.web;
import org.springframework.beans.factory.annotation.Value;
/**
* Configuration properties for web error handling.
*
* @author Michael Stummvoll
* @author Stephane Nicoll
* @author Vedran Pavic
* @since 1.3.0
*/
public class ErrorProperties {
/**
* Path of the error controller.
*/
@Value("${error.path:/error}")
private String path = "/error";
/**
* Include the "exception" attribute.
*/
private boolean includeException;
/**
* When to include a "stacktrace" attribute.
*/
private IncludeStacktrace includeStacktrace = IncludeStacktrace.NEVER;
public String getPath() {
return this.path;
}
public void setPath(String path) {
this.path = path;
}
public boolean isIncludeException() {
return this.includeException;
}
public void setIncludeException(boolean includeException) {
this.includeException = includeException;
}
public IncludeStacktrace getIncludeStacktrace() {
return this.includeStacktrace;
}
public void setIncludeStacktrace(IncludeStacktrace includeStacktrace) {
this.includeStacktrace = includeStacktrace;
}
/**
* Include Stacktrace attribute options.
*/
public enum IncludeStacktrace {
/**
* Never add stacktrace information.
*/
NEVER,
/**
* Always add stacktrace information.
*/
ALWAYS,
/**
* Add stacktrace information when the "trace" request parameter is "true".
*/
ON_TRACE_PARAM
}
}
| bbrouwer/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/ErrorProperties.java | Java | apache-2.0 | 2,080 |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2007 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.mockito.asm.util;
import org.mockito.asm.AnnotationVisitor;
import org.mockito.asm.Attribute;
import org.mockito.asm.FieldVisitor;
/**
* A {@link FieldVisitor} that checks that its methods are properly used.
*/
public class CheckFieldAdapter implements FieldVisitor {
private final FieldVisitor fv;
private boolean end;
public CheckFieldAdapter(final FieldVisitor fv) {
this.fv = fv;
}
public AnnotationVisitor visitAnnotation(
final String desc,
final boolean visible)
{
checkEnd();
CheckMethodAdapter.checkDesc(desc, false);
return new CheckAnnotationAdapter(fv.visitAnnotation(desc, visible));
}
public void visitAttribute(final Attribute attr) {
checkEnd();
if (attr == null) {
throw new IllegalArgumentException("Invalid attribute (must not be null)");
}
fv.visitAttribute(attr);
}
public void visitEnd() {
checkEnd();
end = true;
fv.visitEnd();
}
private void checkEnd() {
if (end) {
throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
}
}
}
| wxcandy/Mahjong | org/mockito/asm/util/CheckFieldAdapter.java | Java | mit | 2,946 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.loader;
public class Constants {
public static final String Package = "org.apache.catalina.loader";
}
| plumer/codana | tomcat_files/7.0.61/Constants (2).java | Java | mit | 945 |
/*
* Copyright 2009 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.generator.logging;
/**
* Defines the interface for creating Log implementations.
*
* @author Jeff Butler
*
*/
public interface AbstractLogFactory {
Log getLog(Class<?> aClass);
}
| NanYoMy/mybatis-generator | src/main/java/org/mybatis/generator/logging/AbstractLogFactory.java | Java | mit | 836 |
/**
* Copyright (C) 2011 JTalks.org Team
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jtalks.jcommune.plugin.api.web.validation.validators;
import org.jtalks.jcommune.plugin.api.service.PluginBbCodeService;
import org.jtalks.jcommune.plugin.api.web.validation.annotations.BbCodeAwareSize;
import javax.validation.ConstraintValidator;
import javax.validation.ConstraintValidatorContext;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
/**
* Extends default @Size annotation to ignore BB codes in string.
* As for now, applicable to string values only.
*
* @author Evgeniy Naumenko
*/
public class BbCodeAwareSizeValidator implements ConstraintValidator<BbCodeAwareSize, String>, ApplicationContextAware {
public static final String NEW_LINE_HTML = "<br/>";
public static final String QUOTE_HTML = """;
public static final String EMPTY_LIST_BB_REGEXP = "\\[list\\][\n\r\\s]*(\\[\\*\\][\n\r\\s]*)*\\[\\/list\\]";
private int min;
private int max;
private ApplicationContext context;
private PluginBbCodeService bbCodeService;
@Autowired
public BbCodeAwareSizeValidator(PluginBbCodeService bbCodeService) {
this.bbCodeService = bbCodeService;
}
/**
* {@inheritDoc}
*/
@Override
public void initialize(BbCodeAwareSize constraintAnnotation) {
this.min = constraintAnnotation.min();
this.max = constraintAnnotation.max();
}
/**
* The database stores both bb codes and symbols visible for users.
* Post length with bb codes can't be greater than max value.
* {@inheritDoc}
*/
@Override
public boolean isValid(String value, ConstraintValidatorContext context) {
if (value != null) {
String emptyListRemoved = removeEmptyListBb(value);
String trimed = removeBBCodes(emptyListRemoved).trim();
int plainTextLength = getDisplayedLength(trimed);
return plainTextLength >= min && value.length() <= max;
}
return false;
}
/**
* Removes all BB codes from the text given, simply cutting
* out all [...]-style tags found
*
* @param source text to cleanup
* @return plain text without BB tags
*/
private String removeBBCodes(String source) {
return getBBCodeService().stripBBCodes(source);
}
@Override
public void setApplicationContext(ApplicationContext ac) throws BeansException {
this.context = ac;
}
private PluginBbCodeService getBBCodeService() {
if (bbCodeService == null) {
bbCodeService = this.context.getBean(PluginBbCodeService.class);
}
return bbCodeService;
}
/**
* Calculate length of string which be displayed.
* Needed because method <b>removeBBCodes</b> leaves """ and "<br/>" symbols.
* @param s String to calculate length.
* @return Length of string which be displayed.
*/
private int getDisplayedLength(String s) {
return s.replaceAll(QUOTE_HTML, "\"").replaceAll(NEW_LINE_HTML, "\n\r").length();
}
/**
* Removes all empty lists from text. Needed because <b>removeBBCodes</b> deletes
* bb codes for list but not deletes bb codes for list elements.
* @param text Text to remove empty lists.
* @return Text without empty lists.
*/
private String removeEmptyListBb(String text) {
return text.replaceAll(EMPTY_LIST_BB_REGEXP, "");
}
}
| Noctrunal/jcommune | jcommune-plugin-api/src/main/java/org/jtalks/jcommune/plugin/api/web/validation/validators/BbCodeAwareSizeValidator.java | Java | lgpl-2.1 | 4,375 |
/*
* Copyright 2011 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.collect.Lists;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.List;
/**
* <p>Compiler pass that converts all calls to:
* goog.object.create(key1, val1, key2, val2, ...) where all of the keys
* are literals into object literals.</p>
*
* @author agrieve@google.com (Andrew Grieve)
*/
final class ClosureOptimizePrimitives implements CompilerPass {
/** Reference to the JS compiler */
private final AbstractCompiler compiler;
/**
* Identifies all calls to goog.object.create.
*/
private class FindObjectCreateCalls extends AbstractPostOrderCallback {
List<Node> callNodes = Lists.newArrayList();
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isCall()) {
String fnName = n.getFirstChild().getQualifiedName();
if ("goog$object$create".equals(fnName) ||
"goog.object.create".equals(fnName)) {
callNodes.add(n);
}
}
}
}
/**
* @param compiler The AbstractCompiler
*/
ClosureOptimizePrimitives(AbstractCompiler compiler) {
this.compiler = compiler;
}
@Override
public void process(Node externs, Node root) {
FindObjectCreateCalls pass = new FindObjectCreateCalls();
NodeTraversal.traverse(compiler, root, pass);
processObjectCreateCalls(pass.callNodes);
}
/**
* Converts all of the given call nodes to object literals that are safe to
* do so.
*/
private void processObjectCreateCalls(List<Node> callNodes) {
for (Node callNode : callNodes) {
Node curParam = callNode.getFirstChild().getNext();
if (canOptimizeObjectCreate(curParam)) {
Node objNode = IR.objectlit().srcref(callNode);
while (curParam != null) {
Node keyNode = curParam;
Node valueNode = curParam.getNext();
curParam = valueNode.getNext();
callNode.removeChild(keyNode);
callNode.removeChild(valueNode);
if (!keyNode.isString()) {
keyNode = IR.string(NodeUtil.getStringValue(keyNode))
.srcref(keyNode);
}
keyNode.setType(Token.STRING_KEY);
keyNode.setQuotedString();
objNode.addChildToBack(IR.propdef(keyNode, valueNode));
}
callNode.getParent().replaceChild(callNode, objNode);
compiler.reportCodeChange();
}
}
}
/**
* Returns whether the given call to goog.object.create can be converted to an
* object literal.
*/
private boolean canOptimizeObjectCreate(Node firstParam) {
Node curParam = firstParam;
while (curParam != null) {
// All keys must be strings or numbers.
if (!curParam.isString() && !curParam.isNumber()) {
return false;
}
curParam = curParam.getNext();
// Check for an odd number of parameters.
if (curParam == null) {
return false;
}
curParam = curParam.getNext();
}
return true;
}
}
| jhiswin/idiil-closure-compiler | src/com/google/javascript/jscomp/ClosureOptimizePrimitives.java | Java | apache-2.0 | 3,783 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.common.GenerationStamp;
import org.apache.hadoop.hdfs.server.common.Storage;
/**
*
* CreateEditsLog
* Synopsis: CreateEditsLog -f numFiles StartingBlockId numBlocksPerFile
* [-r replicafactor] [-d editsLogDirectory]
* Default replication factor is 1
* Default edits log directory is /tmp/EditsLogOut
*
* Create a name node's edits log in /tmp/EditsLogOut.
* The file /tmp/EditsLogOut/current/edits can be copied to a name node's
* dfs.namenode.name.dir/current direcotry and the name node can be started as usual.
*
* The files are created in /createdViaInjectingInEditsLog
* The file names contain the starting and ending blockIds; hence once can
* create multiple edits logs using this command using non overlapping
* block ids and feed the files to a single name node.
*
* See Also @link #DataNodeCluster for injecting a set of matching
* blocks created with this command into a set of simulated data nodes.
*
*/
public class CreateEditsLog {
static final String BASE_PATH = "/createdViaInjectingInEditsLog";
static final String EDITS_DIR = "/tmp/EditsLogOut";
static String edits_dir = EDITS_DIR;
static final public long BLOCK_GENERATION_STAMP =
GenerationStamp.LAST_RESERVED_STAMP;
static void addFiles(FSEditLog editLog, int numFiles, short replication,
int blocksPerFile, long startingBlockId, long blockSize,
FileNameGenerator nameGenerator) {
PermissionStatus p = new PermissionStatus("joeDoe", "people",
new FsPermission((short)0777));
INodeId inodeId = new INodeId();
INodeDirectory dirInode = new INodeDirectory(inodeId.nextValue(), null, p,
0L);
editLog.logMkDir(BASE_PATH, dirInode);
BlockInfo[] blocks = new BlockInfo[blocksPerFile];
for (int iB = 0; iB < blocksPerFile; ++iB) {
blocks[iB] =
new BlockInfo(new Block(0, blockSize, BLOCK_GENERATION_STAMP),
replication);
}
long currentBlockId = startingBlockId;
long bidAtSync = startingBlockId;
for (int iF = 0; iF < numFiles; iF++) {
for (int iB = 0; iB < blocksPerFile; ++iB) {
blocks[iB].setBlockId(currentBlockId++);
}
final INodeFile inode = new INodeFile(inodeId.nextValue(), null,
p, 0L, 0L, blocks, replication, blockSize, (byte)0);
inode.toUnderConstruction("", "");
// Append path to filename with information about blockIDs
String path = "_" + iF + "_B" + blocks[0].getBlockId() +
"_to_B" + blocks[blocksPerFile-1].getBlockId() + "_";
String filePath = nameGenerator.getNextFileName("");
filePath = filePath + path;
// Log the new sub directory in edits
if ((iF % nameGenerator.getFilesPerDirectory()) == 0) {
String currentDir = nameGenerator.getCurrentDir();
dirInode = new INodeDirectory(inodeId.nextValue(), null, p, 0L);
editLog.logMkDir(currentDir, dirInode);
}
INodeFile fileUc = new INodeFile(inodeId.nextValue(), null,
p, 0L, 0L, BlockInfo.EMPTY_ARRAY, replication, blockSize, (byte)0);
fileUc.toUnderConstruction("", "");
editLog.logOpenFile(filePath, fileUc, false, false);
editLog.logCloseFile(filePath, inode);
if (currentBlockId - bidAtSync >= 2000) { // sync every 2K blocks
editLog.logSync();
bidAtSync = currentBlockId;
}
}
System.out.println("Created edits log in directory " + edits_dir);
System.out.println(" containing " +
numFiles + " File-Creates, each file with " + blocksPerFile + " blocks");
System.out.println(" blocks range: " +
startingBlockId + " to " + (currentBlockId-1));
}
static final String usage = "Usage: createditlogs " +
" -f numFiles startingBlockIds NumBlocksPerFile [-r replicafactor] " +
"[-d editsLogDirectory]\n" +
" Default replication factor is 1\n" +
" Default edits log direcory is " + EDITS_DIR + "\n";
static void printUsageExit() {
System.out.println(usage);
System.exit(-1);
}
static void printUsageExit(String err) {
System.out.println(err);
printUsageExit();
}
/**
* @param args arguments
* @throws IOException
*/
public static void main(String[] args) throws IOException {
long startingBlockId = 1;
int numFiles = 0;
short replication = 1;
int numBlocksPerFile = 0;
long blockSize = 10;
if (args.length == 0) {
printUsageExit();
}
for (int i = 0; i < args.length; i++) { // parse command line
if (args[i].equals("-h"))
printUsageExit();
if (args[i].equals("-f")) {
if (i + 3 >= args.length || args[i+1].startsWith("-") ||
args[i+2].startsWith("-") || args[i+3].startsWith("-")) {
printUsageExit(
"Missing num files, starting block and/or number of blocks");
}
numFiles = Integer.parseInt(args[++i]);
startingBlockId = Integer.parseInt(args[++i]);
numBlocksPerFile = Integer.parseInt(args[++i]);
if (numFiles <=0 || numBlocksPerFile <= 0) {
printUsageExit("numFiles and numBlocksPerFile most be greater than 0");
}
} else if (args[i].equals("-l")) {
if (i + 1 >= args.length) {
printUsageExit(
"Missing block length");
}
blockSize = Long.parseLong(args[++i]);
} else if (args[i].equals("-r") || args[i+1].startsWith("-")) {
if (i + 1 >= args.length) {
printUsageExit(
"Missing replication factor");
}
replication = Short.parseShort(args[++i]);
} else if (args[i].equals("-d")) {
if (i + 1 >= args.length || args[i+1].startsWith("-")) {
printUsageExit("Missing edits logs directory");
}
edits_dir = args[++i];
} else {
printUsageExit();
}
}
File editsLogDir = new File(edits_dir);
File subStructureDir = new File(edits_dir + "/" +
Storage.STORAGE_DIR_CURRENT);
if ( !editsLogDir.exists() ) {
if ( !editsLogDir.mkdir()) {
System.out.println("cannot create " + edits_dir);
System.exit(-1);
}
}
if ( !subStructureDir.exists() ) {
if ( !subStructureDir.mkdir()) {
System.out.println("cannot create subdirs of " + edits_dir);
System.exit(-1);
}
}
FileNameGenerator nameGenerator = new FileNameGenerator(BASE_PATH, 100);
FSEditLog editLog = FSImageTestUtil.createStandaloneEditLog(editsLogDir);
editLog.openForWrite();
addFiles(editLog, numFiles, replication, numBlocksPerFile, startingBlockId,
blockSize, nameGenerator);
editLog.logSync();
editLog.close();
}
}
| ZhangXFeng/hadoop | src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java | Java | apache-2.0 | 8,019 |
package org.wso2.carbon.stratos.common.util;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.stratos.common.config.CloudServiceConfig;
import org.wso2.carbon.stratos.common.config.CloudServicesDescConfig;
import org.wso2.carbon.stratos.common.config.PermissionConfig;
import org.wso2.carbon.stratos.common.constants.StratosConstants;
import org.wso2.carbon.stratos.common.internal.CloudCommonServiceComponent;
import org.wso2.carbon.registry.core.Collection;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.user.core.UserStoreException;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
public class CloudServicesUtil {
private static final Log log = LogFactory.getLog(CloudServicesUtil.class);
// TODO protect using Java security
public static void activateAllServices(CloudServicesDescConfig cloudServicesDesc, int tenantId) throws Exception {
java.util.Collection<CloudServiceConfig> cloudServiceConfigList =
cloudServicesDesc.getCloudServiceConfigs().
values();
if (cloudServiceConfigList != null) {
for (CloudServiceConfig cloudServiceConfig : cloudServiceConfigList) {
if (cloudServiceConfig.isDefaultActive()) {
String cloudServiceName = cloudServiceConfig.getName();
try {
if (!CloudServicesUtil.isCloudServiceActive(cloudServiceName, tenantId)) {
CloudServicesUtil.setCloudServiceActive(true,
cloudServiceName,
tenantId,
cloudServicesDesc.getCloudServiceConfigs().
get(cloudServiceName));
}
} catch (Exception e) {
String msg = "Error in activating the cloud service at the tenant" +
"creation. tenant id: " + tenantId + ", service name: " +
cloudServiceName;
log.error(msg, e);
throw new UserStoreException(msg, e);
}
}
}
}
}
public static void activateOriginalAndCompulsoryServices(CloudServicesDescConfig cloudServicesDesc,
String originalService,
int tenantId) throws Exception {
Map<String, CloudServiceConfig> cloudServiceConfigs =
cloudServicesDesc.getCloudServiceConfigs();
if (CloudServicesUtil.isServiceNameValid(cloudServicesDesc, originalService)) {
if (!CloudServicesUtil.isCloudServiceActive(originalService, tenantId)) {
CloudServicesUtil.setCloudServiceActive(true, originalService, tenantId,
cloudServiceConfigs.get(originalService));
log.info("Successfully activated the " + originalService + " for the tenant " +
tenantId);
}
// register the compulsory services
if (!CloudServicesUtil.isCloudServiceActive(StratosConstants.CLOUD_IDENTITY_SERVICE,
tenantId)) {
CloudServicesUtil.setCloudServiceActive(true,
StratosConstants.CLOUD_IDENTITY_SERVICE,
tenantId,
cloudServiceConfigs.get(StratosConstants.CLOUD_IDENTITY_SERVICE));
}
if (!CloudServicesUtil.isCloudServiceActive(StratosConstants.CLOUD_GOVERNANCE_SERVICE,
tenantId)) {
CloudServicesUtil.setCloudServiceActive(true,
StratosConstants.CLOUD_GOVERNANCE_SERVICE,
tenantId,
cloudServiceConfigs.get(StratosConstants.CLOUD_GOVERNANCE_SERVICE));
}
} else {
log.warn("Unable to activate the " + originalService + " for the tenant " + tenantId);
}
}
public static void setCloudServiceActive(boolean active,
String cloudServiceName,
int tenantId, CloudServiceConfig cloudServiceConfig)
throws Exception {
if (cloudServiceConfig.getLabel() == null) {
// for the non-labled services, we are not setting/unsetting the
// service active
return;
}
UserRegistry govRegistry =
CloudCommonServiceComponent.getGovernanceSystemRegistry(
MultitenantConstants.SUPER_TENANT_ID);
UserRegistry configRegistry = CloudCommonServiceComponent.getConfigSystemRegistry(tenantId);
String cloudServiceInfoPath = StratosConstants.CLOUD_SERVICE_INFO_STORE_PATH +
RegistryConstants.PATH_SEPARATOR + tenantId +
RegistryConstants.PATH_SEPARATOR + cloudServiceName;
Resource cloudServiceInfoResource;
if (govRegistry.resourceExists(cloudServiceInfoPath)) {
cloudServiceInfoResource = govRegistry.get(cloudServiceInfoPath);
} else {
cloudServiceInfoResource = govRegistry.newCollection();
}
cloudServiceInfoResource.setProperty(StratosConstants.CLOUD_SERVICE_IS_ACTIVE_PROP_KEY,
active ? "true" : "false");
govRegistry.put(cloudServiceInfoPath, cloudServiceInfoResource);
// then we will copy the permissions
List<PermissionConfig> permissionConfigs = cloudServiceConfig.getPermissionConfigs();
for (PermissionConfig permissionConfig : permissionConfigs) {
String path = permissionConfig.getPath();
String name = permissionConfig.getName();
if (active) {
if (!configRegistry.resourceExists(path)) {
Collection collection = configRegistry.newCollection();
collection.setProperty(StratosConstants.DISPLAY_NAME, name);
configRegistry.put(path, collection);
}
} else {
if (configRegistry.resourceExists(path)) {
configRegistry.delete(path);
}
}
}
}
public static boolean isCloudServiceActive(String cloudServiceName,
int tenantId) throws Exception {
UserRegistry govRegistry = CloudCommonServiceComponent.getGovernanceSystemRegistry(
MultitenantConstants.SUPER_TENANT_ID);
return isCloudServiceActive(cloudServiceName, tenantId, govRegistry);
}
public static boolean isCloudServiceActive(String cloudServiceName,
int tenantId, UserRegistry govRegistry)
throws Exception {
// The cloud manager is always active
if (StratosConstants.CLOUD_MANAGER_SERVICE.equals(cloudServiceName)) {
return true;
}
String cloudServiceInfoPath = StratosConstants.CLOUD_SERVICE_INFO_STORE_PATH +
RegistryConstants.PATH_SEPARATOR + tenantId +
RegistryConstants.PATH_SEPARATOR + cloudServiceName;
Resource cloudServiceInfoResource;
if (govRegistry.resourceExists(cloudServiceInfoPath)) {
cloudServiceInfoResource = govRegistry.get(cloudServiceInfoPath);
String isActiveStr =
cloudServiceInfoResource.getProperty(
StratosConstants.CLOUD_SERVICE_IS_ACTIVE_PROP_KEY);
return "true".equals(isActiveStr);
}
return false;
}
public static boolean isServiceNameValid(CloudServicesDescConfig cloudServicesDesc,
String cloudServiceName) {
if(cloudServiceName == null) {
return false;
}
java.util.Collection<CloudServiceConfig> cloudServiceConfigList =
cloudServicesDesc.getCloudServiceConfigs().values();
if (cloudServiceName.equals(StratosConstants.CLOUD_MANAGER_SERVICE)) {
return false;
}
for (CloudServiceConfig cloudServiceConfig : cloudServiceConfigList) {
if (cloudServiceConfig.getName().equals(cloudServiceName)) {
return true;
}
}
return false;
}
}
| madhawa-gunasekara/carbon-commons | components/tenant-mgt-common/org.wso2.carbon.tenant.common/src/main/java/org/wso2/carbon/stratos/common/util/CloudServicesUtil.java | Java | apache-2.0 | 9,738 |
package autotest.afe;
import java.util.ArrayList;
import java.util.List;
public class CheckBoxPanel {
public static interface Display {
public ICheckBox generateCheckBox(int index);
}
private List<ICheckBox> checkBoxes = new ArrayList<ICheckBox>();
private Display display;
public void bindDisplay(Display display) {
this.display = display;
}
public ICheckBox generateCheckBox() {
return display.generateCheckBox(checkBoxes.size());
}
public void add(ICheckBox checkBox) {
checkBoxes.add(checkBox);
}
public List<ICheckBox> getChecked() {
List<ICheckBox> result = new ArrayList<ICheckBox>();
for(ICheckBox checkBox : checkBoxes) {
if (checkBox.getValue()) {
result.add(checkBox);
}
}
return result;
}
public void setEnabled(boolean enabled) {
for(ICheckBox thisBox : checkBoxes) {
thisBox.setEnabled(enabled);
}
}
public void reset() {
for (ICheckBox thisBox : checkBoxes) {
thisBox.setValue(false);
}
}
}
| spcui/autotest | frontend/client/src/autotest/afe/CheckBoxPanel.java | Java | gpl-2.0 | 1,139 |
package autotest.tko;
import autotest.common.Utils;
public abstract class LabelField extends ParameterizedField {
@Override
public String getSqlCondition(String value) {
String condition = " IS NOT NULL";
if (value.equals(Utils.JSON_NULL)) {
condition = " IS NULL";
}
return getFilteringName() + condition;
}
@Override
public String getFilteringName() {
return getQuotedSqlName() + ".id";
}
}
| nacc/autotest | frontend/client/src/autotest/tko/LabelField.java | Java | gpl-2.0 | 472 |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.codeInsight.functionTypeComments;
import com.jetbrains.python.codeInsight.functionTypeComments.psi.PyFunctionTypeAnnotation;
import com.jetbrains.python.codeInsight.functionTypeComments.psi.PyParameterTypeList;
import com.jetbrains.python.psi.PyElementType;
/**
* @author Mikhail Golubev
*/
public interface PyFunctionTypeAnnotationElementTypes {
PyElementType FUNCTION_SIGNATURE = new PyElementType("FUNCTION_SIGNATURE", PyFunctionTypeAnnotation.class);
PyElementType PARAMETER_TYPE_LIST = new PyElementType("PARAMETER_TYPE_LIST", PyParameterTypeList.class);
}
| asedunov/intellij-community | python/src/com/jetbrains/python/codeInsight/functionTypeComments/PyFunctionTypeAnnotationElementTypes.java | Java | apache-2.0 | 1,200 |
/**
* Implementation of Multi-User Chat (XEP-0045).
*/
package org.jivesoftware.openfire.muc.spi; | zuoyebushiwo/openfire-my-study | src/java/org/jivesoftware/openfire/muc/spi/package-info.java | Java | apache-2.0 | 99 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.gen;
import com.facebook.presto.byteCode.ByteCodeBlock;
import com.facebook.presto.byteCode.ByteCodeNode;
import com.facebook.presto.byteCode.ClassDefinition;
import com.facebook.presto.byteCode.MethodDefinition;
import com.facebook.presto.byteCode.Parameter;
import com.facebook.presto.byteCode.ParameterizedType;
import com.facebook.presto.byteCode.Scope;
import com.facebook.presto.byteCode.Variable;
import com.facebook.presto.byteCode.control.ForLoop;
import com.facebook.presto.byteCode.control.IfStatement;
import com.facebook.presto.byteCode.instruction.LabelNode;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.operator.PageProcessor;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.PageBuilder;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.relational.CallExpression;
import com.facebook.presto.sql.relational.ConstantExpression;
import com.facebook.presto.sql.relational.Expressions;
import com.facebook.presto.sql.relational.InputReferenceExpression;
import com.facebook.presto.sql.relational.RowExpression;
import com.facebook.presto.sql.relational.RowExpressionVisitor;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.primitives.Primitives;
import io.airlift.slice.Slice;
import java.util.List;
import java.util.TreeSet;
import static com.facebook.presto.byteCode.Access.PUBLIC;
import static com.facebook.presto.byteCode.Access.a;
import static com.facebook.presto.byteCode.OpCode.NOP;
import static com.facebook.presto.byteCode.Parameter.arg;
import static com.facebook.presto.byteCode.ParameterizedType.type;
import static com.facebook.presto.sql.gen.ByteCodeUtils.generateWrite;
import static com.facebook.presto.sql.gen.ByteCodeUtils.loadConstant;
import static java.lang.String.format;
import static java.util.Collections.nCopies;
public class PageProcessorCompiler
implements BodyCompiler<PageProcessor>
{
private final Metadata metadata;
public PageProcessorCompiler(Metadata metadata)
{
this.metadata = metadata;
}
@Override
public void generateMethods(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, RowExpression filter, List<RowExpression> projections)
{
generateProcessMethod(classDefinition, filter, projections);
generateFilterMethod(classDefinition, callSiteBinder, filter);
for (int i = 0; i < projections.size(); i++) {
generateProjectMethod(classDefinition, callSiteBinder, "project_" + i, projections.get(i));
}
}
private void generateProcessMethod(ClassDefinition classDefinition, RowExpression filter, List<RowExpression> projections)
{
Parameter session = arg("session", ConnectorSession.class);
Parameter page = arg("page", Page.class);
Parameter start = arg("start", int.class);
Parameter end = arg("end", int.class);
Parameter pageBuilder = arg("pageBuilder", PageBuilder.class);
MethodDefinition method = classDefinition.declareMethod(a(PUBLIC), "process", type(int.class), session, page, start, end, pageBuilder);
Scope scope = method.getScope();
Variable thisVariable = method.getThis();
Variable position = scope.declareVariable(int.class, "position");
method.getBody()
.comment("int position = start;")
.getVariable(start)
.putVariable(position);
List<Integer> allInputChannels = getInputChannels(Iterables.concat(projections, ImmutableList.of(filter)));
for (int channel : allInputChannels) {
Variable blockVariable = scope.declareVariable(Block.class, "block_" + channel);
method.getBody()
.comment("Block %s = page.getBlock(%s);", blockVariable.getName(), channel)
.getVariable(page)
.push(channel)
.invokeVirtual(Page.class, "getBlock", Block.class, int.class)
.putVariable(blockVariable);
}
//
// for loop loop body
//
LabelNode done = new LabelNode("done");
ByteCodeBlock loopBody = new ByteCodeBlock();
ForLoop loop = new ForLoop()
.initialize(NOP)
.condition(new ByteCodeBlock()
.comment("position < end")
.getVariable(position)
.getVariable(end)
.invokeStatic(CompilerOperations.class, "lessThan", boolean.class, int.class, int.class)
)
.update(new ByteCodeBlock()
.comment("position++")
.incrementVariable(position, (byte) 1))
.body(loopBody);
loopBody.comment("if (pageBuilder.isFull()) break;")
.getVariable(pageBuilder)
.invokeVirtual(PageBuilder.class, "isFull", boolean.class)
.ifTrueGoto(done);
// if (filter(cursor))
IfStatement filterBlock = new IfStatement();
filterBlock.condition()
.append(thisVariable)
.getVariable(session)
.append(pushBlockVariables(scope, getInputChannels(filter)))
.getVariable(position)
.invokeVirtual(classDefinition.getType(),
"filter",
type(boolean.class),
ImmutableList.<ParameterizedType>builder()
.add(type(ConnectorSession.class))
.addAll(nCopies(getInputChannels(filter).size(), type(Block.class)))
.add(type(int.class))
.build());
filterBlock.ifTrue()
.append(pageBuilder)
.invokeVirtual(PageBuilder.class, "declarePosition", void.class);
for (int projectionIndex = 0; projectionIndex < projections.size(); projectionIndex++) {
List<Integer> inputChannels = getInputChannels(projections.get(projectionIndex));
filterBlock.ifTrue()
.append(thisVariable)
.append(session)
.append(pushBlockVariables(scope, inputChannels))
.getVariable(position);
filterBlock.ifTrue()
.comment("pageBuilder.getBlockBuilder(%d)", projectionIndex)
.append(pageBuilder)
.push(projectionIndex)
.invokeVirtual(PageBuilder.class, "getBlockBuilder", BlockBuilder.class, int.class);
filterBlock.ifTrue()
.comment("project_%d(session, block_%s, position, blockBuilder)", projectionIndex, inputChannels)
.invokeVirtual(classDefinition.getType(),
"project_" + projectionIndex,
type(void.class),
ImmutableList.<ParameterizedType>builder()
.add(type(ConnectorSession.class))
.addAll(nCopies(inputChannels.size(), type(Block.class)))
.add(type(int.class))
.add(type(BlockBuilder.class))
.build());
}
loopBody.append(filterBlock);
method.getBody()
.append(loop)
.visitLabel(done)
.comment("return position;")
.getVariable(position)
.retInt();
}
private void generateFilterMethod(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, RowExpression filter)
{
Parameter session = arg("session", ConnectorSession.class);
List<Parameter> blocks = toBlockParameters(getInputChannels(filter));
Parameter position = arg("position", int.class);
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
"filter",
type(boolean.class),
ImmutableList.<Parameter>builder()
.add(session)
.addAll(blocks)
.add(position)
.build());
method.comment("Filter: %s", filter.toString());
Scope scope = method.getScope();
Variable wasNullVariable = scope.declareVariable(type(boolean.class), "wasNull");
ByteCodeExpressionVisitor visitor = new ByteCodeExpressionVisitor(
callSiteBinder,
fieldReferenceCompiler(callSiteBinder, position, wasNullVariable),
metadata.getFunctionRegistry());
ByteCodeNode body = filter.accept(visitor, scope);
LabelNode end = new LabelNode("end");
method
.getBody()
.comment("boolean wasNull = false;")
.putVariable(wasNullVariable, false)
.append(body)
.getVariable(wasNullVariable)
.ifFalseGoto(end)
.pop(boolean.class)
.push(false)
.visitLabel(end)
.retBoolean();
}
private void generateProjectMethod(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, String methodName, RowExpression projection)
{
Parameter session = arg("session", ConnectorSession.class);
List<Parameter> inputs = toBlockParameters(getInputChannels(projection));
Parameter position = arg("position", int.class);
Parameter output = arg("output", BlockBuilder.class);
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
methodName,
type(void.class),
ImmutableList.<Parameter>builder()
.add(session)
.addAll(inputs)
.add(position)
.add(output)
.build());
method.comment("Projection: %s", projection.toString());
Scope scope = method.getScope();
Variable wasNullVariable = scope.declareVariable(type(boolean.class), "wasNull");
ByteCodeBlock body = method.getBody()
.comment("boolean wasNull = false;")
.putVariable(wasNullVariable, false);
ByteCodeExpressionVisitor visitor = new ByteCodeExpressionVisitor(callSiteBinder, fieldReferenceCompiler(callSiteBinder, position, wasNullVariable), metadata.getFunctionRegistry());
body.getVariable(output)
.comment("evaluate projection: " + projection.toString())
.append(projection.accept(visitor, scope))
.append(generateWrite(callSiteBinder, scope, wasNullVariable, projection.getType()))
.ret();
}
private static List<Integer> getInputChannels(Iterable<RowExpression> expressions)
{
TreeSet<Integer> channels = new TreeSet<>();
for (RowExpression expression : Expressions.subExpressions(expressions)) {
if (expression instanceof InputReferenceExpression) {
channels.add(((InputReferenceExpression) expression).getField());
}
}
return ImmutableList.copyOf(channels);
}
private static List<Integer> getInputChannels(RowExpression expression)
{
return getInputChannels(ImmutableList.of(expression));
}
private static List<Parameter> toBlockParameters(List<Integer> inputChannels)
{
ImmutableList.Builder<Parameter> parameters = ImmutableList.builder();
for (int channel : inputChannels) {
parameters.add(arg("block_" + channel, Block.class));
}
return parameters.build();
}
private static ByteCodeNode pushBlockVariables(Scope scope, List<Integer> inputs)
{
ByteCodeBlock block = new ByteCodeBlock();
for (int channel : inputs) {
block.append(scope.getVariable("block_" + channel));
}
return block;
}
private RowExpressionVisitor<Scope, ByteCodeNode> fieldReferenceCompiler(final CallSiteBinder callSiteBinder, final Variable positionVariable, final Variable wasNullVariable)
{
return new RowExpressionVisitor<Scope, ByteCodeNode>()
{
@Override
public ByteCodeNode visitInputReference(InputReferenceExpression node, Scope scope)
{
int field = node.getField();
Type type = node.getType();
Variable block = scope.getVariable("block_" + field);
Class<?> javaType = type.getJavaType();
if (!javaType.isPrimitive() && javaType != Slice.class) {
javaType = Object.class;
}
IfStatement ifStatement = new IfStatement();
ifStatement.condition()
.setDescription(format("block_%d.get%s()", field, type))
.append(block)
.getVariable(positionVariable)
.invokeInterface(Block.class, "isNull", boolean.class, int.class);
ifStatement.ifTrue()
.putVariable(wasNullVariable, true)
.pushJavaDefault(javaType);
String methodName = "get" + Primitives.wrap(javaType).getSimpleName();
ifStatement.ifFalse()
.append(loadConstant(callSiteBinder.bind(type, Type.class)))
.append(block)
.getVariable(positionVariable)
.invokeInterface(Type.class, methodName, javaType, Block.class, int.class);
return ifStatement;
}
@Override
public ByteCodeNode visitCall(CallExpression call, Scope scope)
{
throw new UnsupportedOperationException("not yet implemented");
}
@Override
public ByteCodeNode visitConstant(ConstantExpression literal, Scope scope)
{
throw new UnsupportedOperationException("not yet implemented");
}
};
}
}
| deciament/presto | presto-main/src/main/java/com/facebook/presto/sql/gen/PageProcessorCompiler.java | Java | apache-2.0 | 15,098 |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.inspections.quickfix;
import com.intellij.codeInsight.intention.LowPriorityAction;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.ex.InspectionProfileModifiableModelKt;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.psi.util.QualifiedName;
import com.jetbrains.python.inspections.unresolvedReference.PyUnresolvedReferencesInspection;
import org.jetbrains.annotations.NotNull;
/**
* @author yole
*/
public class AddIgnoredIdentifierQuickFix implements LocalQuickFix, LowPriorityAction {
public static final String END_WILDCARD = ".*";
@NotNull private final QualifiedName myIdentifier;
private final boolean myIgnoreAllAttributes;
public AddIgnoredIdentifierQuickFix(@NotNull QualifiedName identifier, boolean ignoreAllAttributes) {
myIdentifier = identifier;
myIgnoreAllAttributes = ignoreAllAttributes;
}
@NotNull
@Override
public String getName() {
if (myIgnoreAllAttributes) {
return "Mark all unresolved attributes of '" + myIdentifier + "' as ignored";
}
else {
return "Ignore unresolved reference '" + myIdentifier + "'";
}
}
@NotNull
@Override
public String getFamilyName() {
return "Ignore unresolved reference";
}
@Override
public boolean startInWriteAction() {
return false;
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
final PsiElement context = descriptor.getPsiElement();
InspectionProfileModifiableModelKt.modifyAndCommitProjectProfile(project, model -> {
PyUnresolvedReferencesInspection inspection =
(PyUnresolvedReferencesInspection)model.getUnwrappedTool(PyUnresolvedReferencesInspection.class.getSimpleName(), context);
String name = myIdentifier.toString();
if (myIgnoreAllAttributes) {
name += END_WILDCARD;
}
assert inspection != null;
if (!inspection.ignoredIdentifiers.contains(name)) {
inspection.ignoredIdentifiers.add(name);
}
});
}
}
| jk1/intellij-community | python/src/com/jetbrains/python/inspections/quickfix/AddIgnoredIdentifierQuickFix.java | Java | apache-2.0 | 2,763 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.lang.jvm.actions;
public interface CreateConstructorRequest extends CreateExecutableRequest {
}
| smmribeiro/intellij-community | java/java-analysis-api/src/com/intellij/lang/jvm/actions/CreateConstructorRequest.java | Java | apache-2.0 | 259 |
package com.baidu.disconf.web.service.user.vo;
public class VisitorVo {
private Long id;
private String name;
private String role;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
@Override
public String toString() {
return "VisitorVo [id=" + id + ", name=" + name + ", role=" + role + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((role == null) ? 0 : role.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
VisitorVo other = (VisitorVo) obj;
if (id == null) {
if (other.id != null) {
return false;
}
} else if (!id.equals(other.id)) {
return false;
}
if (name == null) {
if (other.name != null) {
return false;
}
} else if (!name.equals(other.name)) {
return false;
}
if (role == null) {
if (other.role != null) {
return false;
}
} else if (!role.equals(other.role)) {
return false;
}
return true;
}
}
| markyao/disconf | disconf-web/src/main/java/com/baidu/disconf/web/service/user/vo/VisitorVo.java | Java | apache-2.0 | 1,901 |
package org.opencv.samples.puzzle15;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.core.Point;
import org.opencv.imgproc.Imgproc;
import android.util.Log;
/**
* This class is a controller for puzzle game.
* It converts the image from Camera into the shuffled image
*/
public class Puzzle15Processor {
private static final int GRID_SIZE = 4;
private static final int GRID_AREA = GRID_SIZE * GRID_SIZE;
private static final int GRID_EMPTY_INDEX = GRID_AREA - 1;
private static final String TAG = "Puzzle15Processor";
private static final Scalar GRID_EMPTY_COLOR = new Scalar(0x33, 0x33, 0x33, 0xFF);
private int[] mIndexes;
private int[] mTextWidths;
private int[] mTextHeights;
private Mat mRgba15;
private Mat[] mCells15;
private boolean mShowTileNumbers = true;
public Puzzle15Processor() {
mTextWidths = new int[GRID_AREA];
mTextHeights = new int[GRID_AREA];
mIndexes = new int [GRID_AREA];
for (int i = 0; i < GRID_AREA; i++)
mIndexes[i] = i;
}
/* this method is intended to make processor prepared for a new game */
public synchronized void prepareNewGame() {
do {
shuffle(mIndexes);
} while (!isPuzzleSolvable());
}
/* This method is to make the processor know the size of the frames that
* will be delivered via puzzleFrame.
* If the frames will be different size - then the result is unpredictable
*/
public synchronized void prepareGameSize(int width, int height) {
mRgba15 = new Mat(height, width, CvType.CV_8UC4);
mCells15 = new Mat[GRID_AREA];
for (int i = 0; i < GRID_SIZE; i++) {
for (int j = 0; j < GRID_SIZE; j++) {
int k = i * GRID_SIZE + j;
mCells15[k] = mRgba15.submat(i * height / GRID_SIZE, (i + 1) * height / GRID_SIZE, j * width / GRID_SIZE, (j + 1) * width / GRID_SIZE);
}
}
for (int i = 0; i < GRID_AREA; i++) {
Size s = Imgproc.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);
mTextHeights[i] = (int) s.height;
mTextWidths[i] = (int) s.width;
}
}
/* this method to be called from the outside. it processes the frame and shuffles
* the tiles as specified by mIndexes array
*/
public synchronized Mat puzzleFrame(Mat inputPicture) {
Mat[] cells = new Mat[GRID_AREA];
int rows = inputPicture.rows();
int cols = inputPicture.cols();
rows = rows - rows%4;
cols = cols - cols%4;
for (int i = 0; i < GRID_SIZE; i++) {
for (int j = 0; j < GRID_SIZE; j++) {
int k = i * GRID_SIZE + j;
cells[k] = inputPicture.submat(i * inputPicture.rows() / GRID_SIZE, (i + 1) * inputPicture.rows() / GRID_SIZE, j * inputPicture.cols()/ GRID_SIZE, (j + 1) * inputPicture.cols() / GRID_SIZE);
}
}
rows = rows - rows%4;
cols = cols - cols%4;
// copy shuffled tiles
for (int i = 0; i < GRID_AREA; i++) {
int idx = mIndexes[i];
if (idx == GRID_EMPTY_INDEX)
mCells15[i].setTo(GRID_EMPTY_COLOR);
else {
cells[idx].copyTo(mCells15[i]);
if (mShowTileNumbers) {
Imgproc.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / GRID_SIZE - mTextWidths[idx]) / 2,
(rows / GRID_SIZE + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2);
}
}
}
for (int i = 0; i < GRID_AREA; i++)
cells[i].release();
drawGrid(cols, rows, mRgba15);
return mRgba15;
}
public void toggleTileNumbers() {
mShowTileNumbers = !mShowTileNumbers;
}
public void deliverTouchEvent(int x, int y) {
int rows = mRgba15.rows();
int cols = mRgba15.cols();
int row = (int) Math.floor(y * GRID_SIZE / rows);
int col = (int) Math.floor(x * GRID_SIZE / cols);
if (row < 0 || row >= GRID_SIZE || col < 0 || col >= GRID_SIZE) {
Log.e(TAG, "It is not expected to get touch event outside of picture");
return ;
}
int idx = row * GRID_SIZE + col;
int idxtoswap = -1;
// left
if (idxtoswap < 0 && col > 0)
if (mIndexes[idx - 1] == GRID_EMPTY_INDEX)
idxtoswap = idx - 1;
// right
if (idxtoswap < 0 && col < GRID_SIZE - 1)
if (mIndexes[idx + 1] == GRID_EMPTY_INDEX)
idxtoswap = idx + 1;
// top
if (idxtoswap < 0 && row > 0)
if (mIndexes[idx - GRID_SIZE] == GRID_EMPTY_INDEX)
idxtoswap = idx - GRID_SIZE;
// bottom
if (idxtoswap < 0 && row < GRID_SIZE - 1)
if (mIndexes[idx + GRID_SIZE] == GRID_EMPTY_INDEX)
idxtoswap = idx + GRID_SIZE;
// swap
if (idxtoswap >= 0) {
synchronized (this) {
int touched = mIndexes[idx];
mIndexes[idx] = mIndexes[idxtoswap];
mIndexes[idxtoswap] = touched;
}
}
}
private void drawGrid(int cols, int rows, Mat drawMat) {
for (int i = 1; i < GRID_SIZE; i++) {
Imgproc.line(drawMat, new Point(0, i * rows / GRID_SIZE), new Point(cols, i * rows / GRID_SIZE), new Scalar(0, 255, 0, 255), 3);
Imgproc.line(drawMat, new Point(i * cols / GRID_SIZE, 0), new Point(i * cols / GRID_SIZE, rows), new Scalar(0, 255, 0, 255), 3);
}
}
private static void shuffle(int[] array) {
for (int i = array.length; i > 1; i--) {
int temp = array[i - 1];
int randIx = (int) (Math.random() * i);
array[i - 1] = array[randIx];
array[randIx] = temp;
}
}
private boolean isPuzzleSolvable() {
int sum = 0;
for (int i = 0; i < GRID_AREA; i++) {
if (mIndexes[i] == GRID_EMPTY_INDEX)
sum += (i / GRID_SIZE) + 1;
else {
int smaller = 0;
for (int j = i + 1; j < GRID_AREA; j++) {
if (mIndexes[j] < mIndexes[i])
smaller++;
}
sum += smaller;
}
}
return sum % 2 == 0;
}
}
| apavlenko/opencv | samples/android/15-puzzle/src/org/opencv/samples/puzzle15/Puzzle15Processor.java | Java | bsd-3-clause | 6,624 |
/* Copyright (c) 2001-2009, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb_voltpatches.lib;
/**
* This should be used as the datatype for parameters and instance variables
* instead of HsqlArrayList or HsqlLinkedList to allow interchangable use of the
* two.
*
* @author dnordahl@users
* @version 1.7.2
* @since 1.7.2
*/
public interface HsqlList extends Collection {
void add(int index, Object element);
boolean add(Object element);
Object get(int index);
Object remove(int index);
Object set(int index, Object element);
boolean isEmpty();
int size();
Iterator iterator();
}
| kumarrus/voltdb | src/hsqldb19b3/org/hsqldb_voltpatches/lib/HsqlList.java | Java | agpl-3.0 | 2,168 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.maven.slingstart.run;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.io.IOUtils;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
/**
* Stops the running launchpad instances.
*
*/
@Mojo(
name = "stop",
defaultPhase = LifecyclePhase.POST_INTEGRATION_TEST,
threadSafe = true
)
public class StopMojo extends StartMojo {
@Override
public void execute() throws MojoExecutionException {
if (this.skipLaunchpad) {
this.getLog().info("Executing of the stop-multiple launchpad mojo is disabled by configuration.");
return;
}
// read configurations
final Properties launchpadConfigProps = new Properties();
Reader reader = null;
try {
reader = new FileReader(this.systemPropertiesFile);
launchpadConfigProps.load(reader);
} catch ( final IOException ioe) {
throw new MojoExecutionException("Unable to read launchpad runner configuration properties.", ioe);
} finally {
IOUtils.closeQuietly(reader);
}
final int instances = Integer.valueOf(launchpadConfigProps.getProperty("launchpad.instances"));
final List<ProcessDescription> configurations = new ArrayList<ProcessDescription>();
for(int i=1;i<=instances;i++) {
final String id = launchpadConfigProps.getProperty("launchpad.instance.id." + String.valueOf(i));
final ProcessDescription config = ProcessDescriptionProvider.getInstance().getRunConfiguration(id);
if ( config == null ) {
getLog().warn("No launchpad configuration found for instance " + id);
} else {
configurations.add(config);
}
}
if (configurations.size() > 0) {
getLog().info(new StringBuilder("Stopping ").append(configurations.size()).append(" Launchpad instances").toString());
for (final ProcessDescription cfg : configurations) {
try {
LauncherCallable.stop(this.getLog(), cfg);
ProcessDescriptionProvider.getInstance().removeRunConfiguration(cfg.getId());
} catch (Exception e) {
throw new MojoExecutionException("Could not stop launchpad " + cfg.getId(), e);
}
}
} else {
getLog().warn("No stored configuration file was found at " + this.systemPropertiesFile + " - no Launchapd will be stopped");
}
}
}
| dulvac/sling | tooling/maven/slingstart-maven-plugin/src/main/java/org/apache/sling/maven/slingstart/run/StopMojo.java | Java | apache-2.0 | 3,583 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.ml.action;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.StatusToXContentObject;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig;
import org.elasticsearch.xpack.core.ml.job.config.PerPartitionCategorizationConfig;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
public class UpdateProcessAction extends ActionType<UpdateProcessAction.Response> {
public static final UpdateProcessAction INSTANCE = new UpdateProcessAction();
public static final String NAME = "cluster:internal/xpack/ml/job/update/process";
private UpdateProcessAction() {
super(NAME, UpdateProcessAction.Response::new);
}
public static class Response extends BaseTasksResponse implements StatusToXContentObject, Writeable {
private final boolean isUpdated;
public Response() {
super(null, null);
this.isUpdated = true;
}
public Response(StreamInput in) throws IOException {
super(in);
isUpdated = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(isUpdated);
}
public boolean isUpdated() {
return isUpdated;
}
@Override
public RestStatus status() {
return RestStatus.ACCEPTED;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("updated", isUpdated);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hashCode(isUpdated);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Response other = (Response) obj;
return this.isUpdated == other.isUpdated;
}
}
public static class Request extends JobTaskRequest<Request> {
private ModelPlotConfig modelPlotConfig;
private PerPartitionCategorizationConfig perPartitionCategorizationConfig;
private List<JobUpdate.DetectorUpdate> detectorUpdates;
private MlFilter filter;
private boolean updateScheduledEvents = false;
public Request(StreamInput in) throws IOException {
super(in);
modelPlotConfig = in.readOptionalWriteable(ModelPlotConfig::new);
perPartitionCategorizationConfig = in.readOptionalWriteable(PerPartitionCategorizationConfig::new);
if (in.readBoolean()) {
detectorUpdates = in.readList(JobUpdate.DetectorUpdate::new);
}
filter = in.readOptionalWriteable(MlFilter::new);
updateScheduledEvents = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalWriteable(modelPlotConfig);
out.writeOptionalWriteable(perPartitionCategorizationConfig);
boolean hasDetectorUpdates = detectorUpdates != null;
out.writeBoolean(hasDetectorUpdates);
if (hasDetectorUpdates) {
out.writeList(detectorUpdates);
}
out.writeOptionalWriteable(filter);
out.writeBoolean(updateScheduledEvents);
}
public Request(
String jobId,
ModelPlotConfig modelPlotConfig,
PerPartitionCategorizationConfig perPartitionCategorizationConfig,
List<JobUpdate.DetectorUpdate> detectorUpdates,
MlFilter filter,
boolean updateScheduledEvents
) {
super(jobId);
this.modelPlotConfig = modelPlotConfig;
this.perPartitionCategorizationConfig = perPartitionCategorizationConfig;
this.detectorUpdates = detectorUpdates;
this.filter = filter;
this.updateScheduledEvents = updateScheduledEvents;
}
public ModelPlotConfig getModelPlotConfig() {
return modelPlotConfig;
}
public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() {
return perPartitionCategorizationConfig;
}
public List<JobUpdate.DetectorUpdate> getDetectorUpdates() {
return detectorUpdates;
}
public MlFilter getFilter() {
return filter;
}
public boolean isUpdateScheduledEvents() {
return updateScheduledEvents;
}
@Override
public int hashCode() {
return Objects.hash(
getJobId(),
modelPlotConfig,
perPartitionCategorizationConfig,
detectorUpdates,
filter,
updateScheduledEvents
);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Request other = (Request) obj;
return Objects.equals(getJobId(), other.getJobId())
&& Objects.equals(modelPlotConfig, other.modelPlotConfig)
&& Objects.equals(perPartitionCategorizationConfig, other.perPartitionCategorizationConfig)
&& Objects.equals(detectorUpdates, other.detectorUpdates)
&& Objects.equals(filter, other.filter)
&& Objects.equals(updateScheduledEvents, other.updateScheduledEvents);
}
}
}
| GlenRSmith/elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java | Java | apache-2.0 | 6,629 |
/**
* Copyright (C) 2011, 2012 camunda services GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.integrationtest.deployment.war;
import org.camunda.bpm.engine.RepositoryService;
import org.camunda.bpm.integrationtest.util.AbstractFoxPlatformIntegrationTest;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(Arquillian.class)
public class TestWarDeployment extends AbstractFoxPlatformIntegrationTest {
@Deployment
public static WebArchive processArchive() {
return initWebArchiveDeployment()
.addAsResource("org/camunda/bpm/integrationtest/testDeployProcessArchive.bpmn20.xml");
}
@Test
public void testDeployProcessArchive() {
Assert.assertNotNull(processEngine);
RepositoryService repositoryService = processEngine.getRepositoryService();
long count = repositoryService.createProcessDefinitionQuery()
.processDefinitionKey("testDeployProcessArchive")
.count();
Assert.assertEquals(1, count);
}
}
| subhrajyotim/camunda-bpm-platform | qa/integration-tests-engine/src/test/java/org/camunda/bpm/integrationtest/deployment/war/TestWarDeployment.java | Java | apache-2.0 | 1,697 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.threadpool;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.ToXContentFragment;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
public class ThreadPoolStats implements Writeable, ToXContentFragment, Iterable<ThreadPoolStats.Stats> {
public static class Stats implements Writeable, ToXContentFragment, Comparable<Stats> {
private final String name;
private final int threads;
private final int queue;
private final int active;
private final long rejected;
private final int largest;
private final long completed;
public Stats(String name, int threads, int queue, int active, long rejected, int largest, long completed) {
this.name = name;
this.threads = threads;
this.queue = queue;
this.active = active;
this.rejected = rejected;
this.largest = largest;
this.completed = completed;
}
public Stats(StreamInput in) throws IOException {
name = in.readString();
threads = in.readInt();
queue = in.readInt();
active = in.readInt();
rejected = in.readLong();
largest = in.readInt();
completed = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeInt(threads);
out.writeInt(queue);
out.writeInt(active);
out.writeLong(rejected);
out.writeInt(largest);
out.writeLong(completed);
}
public String getName() {
return this.name;
}
public int getThreads() {
return this.threads;
}
public int getQueue() {
return this.queue;
}
public int getActive() {
return this.active;
}
public long getRejected() {
return rejected;
}
public int getLargest() {
return largest;
}
public long getCompleted() {
return this.completed;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
if (threads != -1) {
builder.field(Fields.THREADS, threads);
}
if (queue != -1) {
builder.field(Fields.QUEUE, queue);
}
if (active != -1) {
builder.field(Fields.ACTIVE, active);
}
if (rejected != -1) {
builder.field(Fields.REJECTED, rejected);
}
if (largest != -1) {
builder.field(Fields.LARGEST, largest);
}
if (completed != -1) {
builder.field(Fields.COMPLETED, completed);
}
builder.endObject();
return builder;
}
@Override
public int compareTo(Stats other) {
if ((getName() == null) && (other.getName() == null)) {
return 0;
} else if ((getName() != null) && (other.getName() == null)) {
return 1;
} else if (getName() == null) {
return -1;
} else {
int compare = getName().compareTo(other.getName());
if (compare == 0) {
compare = Integer.compare(getThreads(), other.getThreads());
}
return compare;
}
}
}
private List<Stats> stats;
public ThreadPoolStats(List<Stats> stats) {
Collections.sort(stats);
this.stats = stats;
}
public ThreadPoolStats(StreamInput in) throws IOException {
stats = in.readList(Stats::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeList(stats);
}
@Override
public Iterator<Stats> iterator() {
return stats.iterator();
}
static final class Fields {
static final String THREAD_POOL = "thread_pool";
static final String THREADS = "threads";
static final String QUEUE = "queue";
static final String ACTIVE = "active";
static final String REJECTED = "rejected";
static final String LARGEST = "largest";
static final String COMPLETED = "completed";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject(Fields.THREAD_POOL);
for (Stats stat : stats) {
stat.toXContent(builder, params);
}
builder.endObject();
return builder;
}
}
| GlenRSmith/elasticsearch | server/src/main/java/org/elasticsearch/threadpool/ThreadPoolStats.java | Java | apache-2.0 | 5,488 |
/*
* Copyright 2003,2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.cglib.proxy;
import java.util.List;
import net.sf.cglib.core.*;
interface CallbackGenerator
{
void generate(ClassEmitter ce, Context context, List methods) throws Exception;
void generateStatic(CodeEmitter e, Context context, List methods) throws Exception;
interface Context
{
ClassLoader getClassLoader();
CodeEmitter beginMethod(ClassEmitter ce, MethodInfo method);
int getOriginalModifiers(MethodInfo method);
int getIndex(MethodInfo method);
void emitCallback(CodeEmitter ce, int index);
Signature getImplSignature(MethodInfo method);
void emitInvoke(CodeEmitter e, MethodInfo method);
}
}
| vongosling/cglib-ext | src/proxy/net/sf/cglib/proxy/CallbackGenerator.java | Java | apache-2.0 | 1,311 |
/*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.models.datamodel.rule;
public class ExpressionUnboundFact extends ExpressionPart {
private String factType;
public ExpressionUnboundFact() {
}
public ExpressionUnboundFact( String factType ) {
super( factType,
factType,
factType );
this.factType = factType;
}
public String getFactType() {
return factType;
}
@Override
public void accept( ExpressionVisitor visitor ) {
visitor.visit( this );
}
@Override
public boolean equals( Object o ) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
if ( !super.equals( o ) ) {
return false;
}
ExpressionUnboundFact that = (ExpressionUnboundFact) o;
if ( factType != null ? !factType.equals( that.factType ) : that.factType != null ) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = ~~result;
result = 31 * result + ( factType != null ? factType.hashCode() : 0 );
result = ~~result;
return result;
}
}
| rokn/Count_Words_2015 | testing/drools-master/drools-workbench-models/drools-workbench-models-datamodel-api/src/main/java/org/drools/workbench/models/datamodel/rule/ExpressionUnboundFact.java | Java | mit | 1,906 |
/*
* Copyright 2014 Soichiro Kashima
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marshalchen.common.demoofui.observablescrollview;
import android.os.Bundle;
import android.support.v4.view.ViewCompat;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import com.github.ksoichiro.android.observablescrollview.ObservableScrollView;
import com.github.ksoichiro.android.observablescrollview.ObservableScrollViewCallbacks;
import com.github.ksoichiro.android.observablescrollview.ObservableWebView;
import com.github.ksoichiro.android.observablescrollview.ScrollState;
import com.marshalchen.common.demoofui.R;
import com.nineoldandroids.view.ViewHelper;
import com.nineoldandroids.view.ViewPropertyAnimator;
public class ToolbarControlWebViewActivity extends ActionBarActivity {
private View mHeaderView;
private View mToolbarView;
private ObservableScrollView mScrollView;
private boolean mFirstScroll;
private boolean mDragging;
private int mBaseTranslationY;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.observable_scroll_view_activity_toolbarcontrolwebview);
setSupportActionBar((Toolbar) findViewById(R.id.toolbar));
mHeaderView = findViewById(R.id.header);
ViewCompat.setElevation(mHeaderView, getResources().getDimension(R.dimen.toolbar_elevation));
mToolbarView = findViewById(R.id.toolbar);
mScrollView = (ObservableScrollView) findViewById(R.id.scroll);
mScrollView.setScrollViewCallbacks(mScrollViewScrollCallbacks);
ObservableWebView mWebView = (ObservableWebView) findViewById(R.id.web);
mWebView.setScrollViewCallbacks(mWebViewScrollCallbacks);
mWebView.loadUrl("file:///android_asset/lipsum.html");
}
private ObservableScrollViewCallbacks mScrollViewScrollCallbacks = new ObservableScrollViewCallbacks() {
@Override
public void onScrollChanged(int scrollY, boolean firstScroll, boolean dragging) {
if (mDragging) {
int toolbarHeight = mToolbarView.getHeight();
if (mFirstScroll) {
mFirstScroll = false;
float currentHeaderTranslationY = ViewHelper.getTranslationY(mHeaderView);
if (-toolbarHeight < currentHeaderTranslationY && toolbarHeight < scrollY) {
mBaseTranslationY = scrollY;
}
}
int headerTranslationY = Math.min(0, Math.max(-toolbarHeight, -(scrollY - mBaseTranslationY)));
ViewPropertyAnimator.animate(mHeaderView).cancel();
ViewHelper.setTranslationY(mHeaderView, headerTranslationY);
}
}
@Override
public void onDownMotionEvent() {
}
@Override
public void onUpOrCancelMotionEvent(ScrollState scrollState) {
mDragging = false;
mBaseTranslationY = 0;
float headerTranslationY = ViewHelper.getTranslationY(mHeaderView);
int toolbarHeight = mToolbarView.getHeight();
if (scrollState == ScrollState.UP) {
if (toolbarHeight < mScrollView.getCurrentScrollY()) {
if (headerTranslationY != -toolbarHeight) {
ViewPropertyAnimator.animate(mHeaderView).cancel();
ViewPropertyAnimator.animate(mHeaderView).translationY(-toolbarHeight).setDuration(200).start();
}
}
} else if (scrollState == ScrollState.DOWN) {
if (toolbarHeight < mScrollView.getCurrentScrollY()) {
if (headerTranslationY != 0) {
ViewPropertyAnimator.animate(mHeaderView).cancel();
ViewPropertyAnimator.animate(mHeaderView).translationY(0).setDuration(200).start();
}
}
}
}
};
private ObservableScrollViewCallbacks mWebViewScrollCallbacks = new ObservableScrollViewCallbacks() {
@Override
public void onScrollChanged(int scrollY, boolean firstScroll, boolean dragging) {
}
@Override
public void onDownMotionEvent() {
// Workaround: WebView inside a ScrollView absorbs down motion events, so observing
// down motion event from the WebView is required.
mFirstScroll = mDragging = true;
}
@Override
public void onUpOrCancelMotionEvent(ScrollState scrollState) {
}
};
}
| cymcsg/UltimateAndroid | deprecated/UltimateAndroidGradle/demoofui/src/main/java/com/marshalchen/common/demoofui/observablescrollview/ToolbarControlWebViewActivity.java | Java | apache-2.0 | 5,205 |
/*
* Copyright (C) 2016 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okhttp3;
import java.io.IOException;
import okio.Buffer;
import okio.BufferedSource;
import okio.Okio;
import okio.Source;
import okio.Timeout;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public final class ResponseTest {
@Test public void peekShorterThanResponse() throws Exception {
Response response = newResponse(responseBody("abcdef"));
ResponseBody peekedBody = response.peekBody(3);
assertEquals("abc", peekedBody.string());
assertEquals("abcdef", response.body().string());
}
@Test public void peekLongerThanResponse() throws Exception {
Response response = newResponse(responseBody("abc"));
ResponseBody peekedBody = response.peekBody(6);
assertEquals("abc", peekedBody.string());
assertEquals("abc", response.body().string());
}
@Test public void peekAfterReadingResponse() throws Exception {
Response response = newResponse(responseBody("abc"));
assertEquals("abc", response.body().string());
try {
response.peekBody(3);
fail();
} catch (IllegalStateException expected) {
}
}
@Test public void eachPeakIsIndependent() throws Exception {
Response response = newResponse(responseBody("abcdef"));
ResponseBody p1 = response.peekBody(4);
ResponseBody p2 = response.peekBody(2);
assertEquals("abcdef", response.body().string());
assertEquals("abcd", p1.string());
assertEquals("ab", p2.string());
}
/**
* Returns a new response body that refuses to be read once it has been closed. This is true of
* most {@link BufferedSource} instances, but not of {@link Buffer}.
*/
private ResponseBody responseBody(String content) {
final Buffer data = new Buffer().writeUtf8(content);
Source source = new Source() {
boolean closed;
@Override public void close() throws IOException {
closed = true;
}
@Override public long read(Buffer sink, long byteCount) throws IOException {
if (closed) throw new IllegalStateException();
return data.read(sink, byteCount);
}
@Override public Timeout timeout() {
return Timeout.NONE;
}
};
return ResponseBody.create(null, -1, Okio.buffer(source));
}
private Response newResponse(ResponseBody responseBody) {
return new Response.Builder()
.request(new Request.Builder()
.url("https://example.com/")
.build())
.protocol(Protocol.HTTP_1_1)
.code(200)
.body(responseBody)
.build();
}
}
| zmarkan/okhttp | okhttp-tests/src/test/java/okhttp3/ResponseTest.java | Java | apache-2.0 | 3,177 |
/*
* Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.media.sound;
/**
* This class is used to identify destinations in connection blocks,
* see ModelConnectionBlock.
*
* @author Karl Helgason
*/
public final class ModelDestination {
public static final ModelIdentifier DESTINATION_NONE = null;
public static final ModelIdentifier DESTINATION_KEYNUMBER
= new ModelIdentifier("noteon", "keynumber");
public static final ModelIdentifier DESTINATION_VELOCITY
= new ModelIdentifier("noteon", "velocity");
public static final ModelIdentifier DESTINATION_PITCH
= new ModelIdentifier("osc", "pitch"); // cent
public static final ModelIdentifier DESTINATION_GAIN
= new ModelIdentifier("mixer", "gain"); // cB
public static final ModelIdentifier DESTINATION_PAN
= new ModelIdentifier("mixer", "pan"); // 0.1 %
public static final ModelIdentifier DESTINATION_REVERB
= new ModelIdentifier("mixer", "reverb"); // 0.1 %
public static final ModelIdentifier DESTINATION_CHORUS
= new ModelIdentifier("mixer", "chorus"); // 0.1 %
public static final ModelIdentifier DESTINATION_LFO1_DELAY
= new ModelIdentifier("lfo", "delay", 0); // timecent
public static final ModelIdentifier DESTINATION_LFO1_FREQ
= new ModelIdentifier("lfo", "freq", 0); // cent
public static final ModelIdentifier DESTINATION_LFO2_DELAY
= new ModelIdentifier("lfo", "delay", 1); // timecent
public static final ModelIdentifier DESTINATION_LFO2_FREQ
= new ModelIdentifier("lfo", "freq", 1); // cent
public static final ModelIdentifier DESTINATION_EG1_DELAY
= new ModelIdentifier("eg", "delay", 0); // timecent
public static final ModelIdentifier DESTINATION_EG1_ATTACK
= new ModelIdentifier("eg", "attack", 0); // timecent
public static final ModelIdentifier DESTINATION_EG1_HOLD
= new ModelIdentifier("eg", "hold", 0); // timecent
public static final ModelIdentifier DESTINATION_EG1_DECAY
= new ModelIdentifier("eg", "decay", 0); // timecent
public static final ModelIdentifier DESTINATION_EG1_SUSTAIN
= new ModelIdentifier("eg", "sustain", 0);
// 0.1 % (I want this to be value not %)
public static final ModelIdentifier DESTINATION_EG1_RELEASE
= new ModelIdentifier("eg", "release", 0); // timecent
public static final ModelIdentifier DESTINATION_EG1_SHUTDOWN
= new ModelIdentifier("eg", "shutdown", 0); // timecent
public static final ModelIdentifier DESTINATION_EG2_DELAY
= new ModelIdentifier("eg", "delay", 1); // timecent
public static final ModelIdentifier DESTINATION_EG2_ATTACK
= new ModelIdentifier("eg", "attack", 1); // timecent
public static final ModelIdentifier DESTINATION_EG2_HOLD
= new ModelIdentifier("eg", "hold", 1); // 0.1 %
public static final ModelIdentifier DESTINATION_EG2_DECAY
= new ModelIdentifier("eg", "decay", 1); // timecent
public static final ModelIdentifier DESTINATION_EG2_SUSTAIN
= new ModelIdentifier("eg", "sustain", 1);
// 0.1 % ( I want this to be value not %)
public static final ModelIdentifier DESTINATION_EG2_RELEASE
= new ModelIdentifier("eg", "release", 1); // timecent
public static final ModelIdentifier DESTINATION_EG2_SHUTDOWN
= new ModelIdentifier("eg", "shutdown", 1); // timecent
public static final ModelIdentifier DESTINATION_FILTER_FREQ
= new ModelIdentifier("filter", "freq", 0); // cent
public static final ModelIdentifier DESTINATION_FILTER_Q
= new ModelIdentifier("filter", "q", 0); // cB
private ModelIdentifier destination = DESTINATION_NONE;
private ModelTransform transform = new ModelStandardTransform();
public ModelDestination() {
}
public ModelDestination(ModelIdentifier id) {
destination = id;
}
public ModelIdentifier getIdentifier() {
return destination;
}
public void setIdentifier(ModelIdentifier destination) {
this.destination = destination;
}
public ModelTransform getTransform() {
return transform;
}
public void setTransform(ModelTransform transform) {
this.transform = transform;
}
}
| FauxFaux/jdk9-jdk | src/java.desktop/share/classes/com/sun/media/sound/ModelDestination.java | Java | gpl-2.0 | 5,633 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
*
*/
package org.apache.axis2.jaxws.message.databinding.impl;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMOutputFormat;
import org.apache.axis2.jaxws.ExceptionFactory;
import org.apache.axis2.jaxws.message.Message;
import org.apache.axis2.jaxws.message.databinding.SOAPEnvelopeBlock;
import org.apache.axis2.jaxws.message.factory.BlockFactory;
import org.apache.axis2.jaxws.message.factory.MessageFactory;
import org.apache.axis2.jaxws.message.impl.BlockImpl;
import org.apache.axis2.jaxws.message.util.SOAPElementReader;
import org.apache.axis2.jaxws.registry.FactoryRegistry;
import javax.xml.namespace.QName;
import javax.xml.soap.SOAPElement;
import javax.xml.soap.SOAPEnvelope;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
import javax.xml.ws.WebServiceException;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
/**
*
*
*/
public class SOAPEnvelopeBlockImpl extends BlockImpl implements SOAPEnvelopeBlock {
/**
* Called by SOAPEnvelopeBlockFactory
*
* @param busObject
* @param busContext
* @param qName
* @param factory
*/
public SOAPEnvelopeBlockImpl(Object busObject, Object busContext,
QName qName, BlockFactory factory) {
super(busObject,
busContext,
(qName == null) ? getQName((SOAPEnvelope)busObject) : qName,
factory);
}
/**
* Called by SOAPEnvelopeBlockFactory
*
* @param omElement
* @param busContext
* @param qName
* @param factory
*/
public SOAPEnvelopeBlockImpl(OMElement omElement, Object busContext,
QName qName, BlockFactory factory) {
super(omElement, busContext, qName, factory);
}
/* (non-Javadoc)
* @see org.apache.axis2.jaxws.message.impl.BlockImpl#_getBOFromReader(javax.xml.stream.XMLStreamReader, java.lang.Object)
*/
@Override
protected Object _getBOFromReader(XMLStreamReader reader, Object busContext)
throws XMLStreamException, WebServiceException {
MessageFactory mf = (MessageFactory)FactoryRegistry.getFactory(MessageFactory.class);
Message message = mf.createFrom(reader, null);
SOAPEnvelope env = message.getAsSOAPEnvelope();
this.setQName(getQName(env));
return env;
}
/* (non-Javadoc)
* @see org.apache.axis2.jaxws.message.impl.BlockImpl#_getReaderFromBO(java.lang.Object, java.lang.Object)
*/
@Override
protected XMLStreamReader _getReaderFromBO(Object busObj, Object busContext)
throws XMLStreamException, WebServiceException {
return new SOAPElementReader((SOAPElement)busObj);
}
/* (non-Javadoc)
* @see org.apache.axis2.jaxws.message.impl.BlockImpl#_outputFromBO(java.lang.Object, java.lang.Object, javax.xml.stream.XMLStreamWriter)
*/
@Override
protected void _outputFromBO(Object busObject, Object busContext,
XMLStreamWriter writer)
throws XMLStreamException, WebServiceException {
XMLStreamReader reader = _getReaderFromBO(busObject, busContext);
_outputFromReader(reader, writer);
}
/**
* Get the QName of the envelope
*
* @param env
* @return QName
*/
private static QName getQName(SOAPEnvelope env) {
return new QName(env.getNamespaceURI(), env.getLocalName(), env.getPrefix());
}
public boolean isElementData() {
return true;
}
public void close() {
return; // Nothing to close
}
public InputStream getXMLInputStream(String encoding) throws UnsupportedEncodingException {
byte[] bytes = getXMLBytes(encoding);
return new ByteArrayInputStream(bytes);
}
public Object getObject() {
try {
return getBusinessObject(false);
} catch (XMLStreamException e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
public boolean isDestructiveRead() {
return false;
}
public boolean isDestructiveWrite() {
return false;
}
public byte[] getXMLBytes(String encoding) throws UnsupportedEncodingException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
OMOutputFormat format = new OMOutputFormat();
format.setCharSetEncoding(encoding);
try {
serialize(baos, format);
baos.flush();
return baos.toByteArray();
} catch (XMLStreamException e) {
throw ExceptionFactory.makeWebServiceException(e);
} catch (IOException e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
}
| arunasujith/wso2-axis2 | modules/jaxws/src/org/apache/axis2/jaxws/message/databinding/impl/SOAPEnvelopeBlockImpl.java | Java | apache-2.0 | 5,741 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.schema;
import java.util.Map;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
/**
* An immutable class representing keyspace parameters (durability and replication).
*/
public final class KeyspaceParams
{
public static final boolean DEFAULT_DURABLE_WRITES = true;
/**
* This determines durable writes for the {@link org.apache.cassandra.config.SchemaConstants#SCHEMA_KEYSPACE_NAME}
* and {@link org.apache.cassandra.config.SchemaConstants#SYSTEM_KEYSPACE_NAME} keyspaces,
* the only reason it is not final is for commitlog unit tests. It should only be changed for testing purposes.
*/
@VisibleForTesting
public static boolean DEFAULT_LOCAL_DURABLE_WRITES = true;
public enum Option
{
DURABLE_WRITES,
REPLICATION;
@Override
public String toString()
{
return name().toLowerCase();
}
}
public final boolean durableWrites;
public final ReplicationParams replication;
public KeyspaceParams(boolean durableWrites, ReplicationParams replication)
{
this.durableWrites = durableWrites;
this.replication = replication;
}
public static KeyspaceParams create(boolean durableWrites, Map<String, String> replication)
{
return new KeyspaceParams(durableWrites, ReplicationParams.fromMap(replication));
}
public static KeyspaceParams local()
{
return new KeyspaceParams(DEFAULT_LOCAL_DURABLE_WRITES, ReplicationParams.local());
}
public static KeyspaceParams simple(int replicationFactor)
{
return new KeyspaceParams(true, ReplicationParams.simple(replicationFactor));
}
public static KeyspaceParams simpleTransient(int replicationFactor)
{
return new KeyspaceParams(false, ReplicationParams.simple(replicationFactor));
}
public static KeyspaceParams nts(Object... args)
{
return new KeyspaceParams(true, ReplicationParams.nts(args));
}
public void validate(String name)
{
replication.validate(name);
}
@Override
public boolean equals(Object o)
{
if (this == o)
return true;
if (!(o instanceof KeyspaceParams))
return false;
KeyspaceParams p = (KeyspaceParams) o;
return durableWrites == p.durableWrites && replication.equals(p.replication);
}
@Override
public int hashCode()
{
return Objects.hashCode(durableWrites, replication);
}
@Override
public String toString()
{
return MoreObjects.toStringHelper(this)
.add(Option.DURABLE_WRITES.toString(), durableWrites)
.add(Option.REPLICATION.toString(), replication)
.toString();
}
}
| yhnishi/cassandra | src/java/org/apache/cassandra/schema/KeyspaceParams.java | Java | apache-2.0 | 3,724 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.storage;
import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.runtime.WorkerConfig;
import org.apache.kafka.connect.util.Callback;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/**
* Implementation of OffsetBackingStore that doesn't actually persist any data. To ensure this
* behaves similarly to a real backing store, operations are executed asynchronously on a
* background thread.
*/
public class MemoryOffsetBackingStore implements OffsetBackingStore {
private static final Logger log = LoggerFactory.getLogger(MemoryOffsetBackingStore.class);
protected Map<ByteBuffer, ByteBuffer> data = new HashMap<>();
protected ExecutorService executor;
public MemoryOffsetBackingStore() {
}
@Override
public void configure(WorkerConfig config) {
}
@Override
public void start() {
executor = Executors.newSingleThreadExecutor();
}
@Override
public void stop() {
if (executor != null) {
executor.shutdown();
// Best effort wait for any get() and set() tasks (and caller's callbacks) to complete.
try {
executor.awaitTermination(30, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (!executor.shutdownNow().isEmpty()) {
throw new ConnectException("Failed to stop MemoryOffsetBackingStore. Exiting without cleanly " +
"shutting down pending tasks and/or callbacks.");
}
executor = null;
}
}
@Override
public Future<Map<ByteBuffer, ByteBuffer>> get(
final Collection<ByteBuffer> keys,
final Callback<Map<ByteBuffer, ByteBuffer>> callback) {
return executor.submit(new Callable<Map<ByteBuffer, ByteBuffer>>() {
@Override
public Map<ByteBuffer, ByteBuffer> call() throws Exception {
Map<ByteBuffer, ByteBuffer> result = new HashMap<>();
for (ByteBuffer key : keys) {
result.put(key, data.get(key));
}
if (callback != null)
callback.onCompletion(null, result);
return result;
}
});
}
@Override
public Future<Void> set(final Map<ByteBuffer, ByteBuffer> values,
final Callback<Void> callback) {
return executor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
for (Map.Entry<ByteBuffer, ByteBuffer> entry : values.entrySet()) {
data.put(entry.getKey(), entry.getValue());
}
save();
if (callback != null)
callback.onCompletion(null, null);
return null;
}
});
}
// Hook to allow subclasses to persist data
protected void save() {
}
}
| wangcy6/storm_app | frame/kafka-0.11.0/kafka-0.11.0.1-src/connect/runtime/src/main/java/org/apache/kafka/connect/storage/MemoryOffsetBackingStore.java | Java | apache-2.0 | 4,169 |
/*
* @test /nodynamiccopyright/
* @bug 4482403
* @summary javac failed to check second bound
* @author gafter
*
* @compile/fail/ref=Multibound1.out -XDrawDiagnostics Multibound1.java
*/
package Multibound1;
interface A {}
interface B {}
class C<T extends A&B> {}
class D implements A {}
class E extends C<D> {}
| FauxFaux/jdk9-langtools | test/tools/javac/generics/Multibound1.java | Java | gpl-2.0 | 321 |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
/**
* <p>
* Contains the result of a successful invocation of the
* ModifyDBParameterGroup or ResetDBParameterGroup action.
* </p>
*/
public class ResetDBParameterGroupResult implements Serializable, Cloneable {
/**
* Provides the name of the DB parameter group.
*/
private String dBParameterGroupName;
/**
* Provides the name of the DB parameter group.
*
* @return Provides the name of the DB parameter group.
*/
public String getDBParameterGroupName() {
return dBParameterGroupName;
}
/**
* Provides the name of the DB parameter group.
*
* @param dBParameterGroupName Provides the name of the DB parameter group.
*/
public void setDBParameterGroupName(String dBParameterGroupName) {
this.dBParameterGroupName = dBParameterGroupName;
}
/**
* Provides the name of the DB parameter group.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param dBParameterGroupName Provides the name of the DB parameter group.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ResetDBParameterGroupResult withDBParameterGroupName(String dBParameterGroupName) {
this.dBParameterGroupName = dBParameterGroupName;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDBParameterGroupName() != null) sb.append("DBParameterGroupName: " + getDBParameterGroupName() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDBParameterGroupName() == null) ? 0 : getDBParameterGroupName().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof ResetDBParameterGroupResult == false) return false;
ResetDBParameterGroupResult other = (ResetDBParameterGroupResult)obj;
if (other.getDBParameterGroupName() == null ^ this.getDBParameterGroupName() == null) return false;
if (other.getDBParameterGroupName() != null && other.getDBParameterGroupName().equals(this.getDBParameterGroupName()) == false) return false;
return true;
}
@Override
public ResetDBParameterGroupResult clone() {
try {
return (ResetDBParameterGroupResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| mahaliachante/aws-sdk-java | aws-java-sdk-rds/src/main/java/com/amazonaws/services/rds/model/ResetDBParameterGroupResult.java | Java | apache-2.0 | 3,865 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.codeInsight.stdlib;
import com.intellij.psi.PsiElement;
import com.intellij.psi.util.QualifiedName;
import com.jetbrains.python.psi.resolve.PyCanonicalPathProvider;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
/**
* @author yole
*/
public class PyStdlibCanonicalPathProvider implements PyCanonicalPathProvider {
@Nullable
@Override
public QualifiedName getCanonicalPath(@NotNull QualifiedName qName, PsiElement foothold) {
return restoreStdlibCanonicalPath(qName);
}
public static QualifiedName restoreStdlibCanonicalPath(QualifiedName qName) {
if (qName.getComponentCount() > 0) {
final List<String> components = qName.getComponents();
final String head = components.get(0);
if (head.equals("_abcoll") || head.equals("_collections")) {
components.set(0, "collections");
return QualifiedName.fromComponents(components);
}
else if (head.equals("posix") || head.equals("nt")) {
components.set(0, "os");
return QualifiedName.fromComponents(components);
}
else if (head.equals("_functools")) {
components.set(0, "functools");
return QualifiedName.fromComponents(components);
}
else if (head.equals("_struct")) {
components.set(0, "struct");
return QualifiedName.fromComponents(components);
}
else if (head.equals("_io") || head.equals("_pyio") || head.equals("_fileio")) {
components.set(0, "io");
return QualifiedName.fromComponents(components);
}
else if (head.equals("_datetime")) {
components.set(0, "datetime");
return QualifiedName.fromComponents(components);
}
else if (head.equals("ntpath") || head.equals("posixpath") || head.equals("path")) {
final List<String> result = new ArrayList<String>();
result.add("os");
components.set(0, "path");
result.addAll(components);
return QualifiedName.fromComponents(result);
}
else if (head.equals("_sqlite3")) {
components.set(0, "sqlite3");
return QualifiedName.fromComponents(components);
}
else if (head.equals("_pickle")) {
components.set(0, "pickle");
return QualifiedName.fromComponents(components);
}
}
return null;
}
}
| akosyakov/intellij-community | python/src/com/jetbrains/python/codeInsight/stdlib/PyStdlibCanonicalPathProvider.java | Java | apache-2.0 | 3,015 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.script.groovy.GroovyScriptEngineService;
import org.elasticsearch.test.ESIntegTestCase;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.equalTo;
/**
* Various tests for Groovy scripting
*/
public class GroovyScriptIT extends ESIntegTestCase {
@Test
public void testGroovyBigDecimalTransformation() {
client().prepareIndex("test", "doc", "1").setSource("foo", 5).setRefresh(true).get();
// Test that something that would usually be a BigDecimal is transformed into a Double
assertScript("def n = 1.23; assert n instanceof Double;");
assertScript("def n = 1.23G; assert n instanceof Double;");
assertScript("def n = BigDecimal.ONE; assert n instanceof BigDecimal;");
}
public void assertScript(String script) {
SearchResponse resp = client().prepareSearch("test")
.setSource(new BytesArray("{\"query\": {\"match_all\": {}}," +
"\"sort\":{\"_script\": {\"script\": \""+ script +
"; 1\", \"type\": \"number\", \"lang\": \"groovy\"}}}")).get();
assertNoFailures(resp);
}
@Test
public void testGroovyExceptionSerialization() throws Exception {
List<IndexRequestBuilder> reqs = new ArrayList<>();
for (int i = 0; i < randomIntBetween(50, 500); i++) {
reqs.add(client().prepareIndex("test", "doc", "" + i).setSource("foo", "bar"));
}
indexRandom(true, false, reqs);
try {
client().prepareSearch("test")
.setQuery(
constantScoreQuery(scriptQuery(new Script("1 == not_found", ScriptType.INLINE, GroovyScriptEngineService.NAME,
null)))).get();
fail("should have thrown an exception");
} catch (SearchPhaseExecutionException e) {
assertThat(e.toString()+ "should not contained NotSerializableTransportException",
e.toString().contains("NotSerializableTransportException"), equalTo(false));
assertThat(e.toString()+ "should have contained GroovyScriptExecutionException",
e.toString().contains("GroovyScriptExecutionException"), equalTo(true));
assertThat(e.toString()+ "should have contained not_found",
e.toString().contains("No such property: not_found"), equalTo(true));
}
try {
client().prepareSearch("test")
.setQuery(constantScoreQuery(scriptQuery(new Script("assert false", ScriptType.INLINE, "groovy", null)))).get();
fail("should have thrown an exception");
} catch (SearchPhaseExecutionException e) {
assertThat(e.toString() + "should not contained NotSerializableTransportException",
e.toString().contains("NotSerializableTransportException"), equalTo(false));
assertThat(e.toString() + "should have contained GroovyScriptExecutionException",
e.toString().contains("GroovyScriptExecutionException"), equalTo(true));
assertThat(e.toString()+ "should have contained an assert error",
e.toString().contains("AssertionError[assert false"), equalTo(true));
}
}
@Test
public void testGroovyScriptAccess() {
client().prepareIndex("test", "doc", "1").setSource("foo", "quick brow fox jumped over the lazy dog", "bar", 1).get();
client().prepareIndex("test", "doc", "2").setSource("foo", "fast jumping spiders", "bar", 2).get();
client().prepareIndex("test", "doc", "3").setSource("foo", "dog spiders that can eat a dog", "bar", 3).get();
refresh();
// doc[] access
SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery())
.add(
scriptFunction(new Script("doc['bar'].value", ScriptType.INLINE, "groovy", null)))
.boostMode(CombineFunction.REPLACE)).get();
assertNoFailures(resp);
assertOrderedSearchHits(resp, "3", "2", "1");
}
public void testScoreAccess() {
client().prepareIndex("test", "doc", "1").setSource("foo", "quick brow fox jumped over the lazy dog", "bar", 1).get();
client().prepareIndex("test", "doc", "2").setSource("foo", "fast jumping spiders", "bar", 2).get();
client().prepareIndex("test", "doc", "3").setSource("foo", "dog spiders that can eat a dog", "bar", 3).get();
refresh();
// _score can be accessed
SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(matchQuery("foo", "dog"))
.add(scriptFunction(new Script("_score", ScriptType.INLINE, "groovy", null)))
.boostMode(CombineFunction.REPLACE)).get();
assertNoFailures(resp);
assertSearchHits(resp, "3", "1");
// _score is comparable
// NOTE: it is important to use 0.0 instead of 0 instead Groovy will do an integer comparison
// and if the score if between 0 and 1 it will be considered equal to 0 due to the cast
resp = client()
.prepareSearch("test")
.setQuery(
functionScoreQuery(matchQuery("foo", "dog")).add(
scriptFunction(new Script("_score > 0.0 ? _score : 0", ScriptType.INLINE, "groovy", null))).boostMode(
CombineFunction.REPLACE)).get();
assertNoFailures(resp);
assertSearchHits(resp, "3", "1");
}
}
| wimvds/elasticsearch | core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java | Java | apache-2.0 | 7,046 |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticfilesystem.model;
import com.amazonaws.AmazonServiceException;
/**
* <p>
* Returned if the specified <code>FileSystemId</code> does not exist in the
* requester's AWS account.
* </p>
*/
public class FileSystemNotFoundException extends AmazonServiceException {
private static final long serialVersionUID = 1L;
private String errorCode;
/**
* Constructs a new FileSystemNotFoundException with the specified error
* message.
*
* @param message
* Describes the error encountered.
*/
public FileSystemNotFoundException(String message) {
super(message);
}
/**
* Sets the value of the ErrorCode property for this object.
*
* @param errorCode
* The new value for the ErrorCode property for this object.
*/
public void setErrorCode(String errorCode) {
this.errorCode = errorCode;
}
/**
* Returns the value of the ErrorCode property for this object.
*
* @return The value of the ErrorCode property for this object.
*/
public String getErrorCode() {
return this.errorCode;
}
/**
* Sets the value of the ErrorCode property for this object.
*
* @param errorCode
* The new value for the ErrorCode property for this object.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public FileSystemNotFoundException withErrorCode(String errorCode) {
setErrorCode(errorCode);
return this;
}
} | mahaliachante/aws-sdk-java | aws-java-sdk-efs/src/main/java/com/amazonaws/services/elasticfilesystem/model/FileSystemNotFoundException.java | Java | apache-2.0 | 2,190 |
/*
* #%L
* BroadleafCommerce Open Admin Platform
* %%
* Copyright (C) 2009 - 2013 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.openadmin.web.filter;
import org.broadleafcommerce.common.web.BroadleafTimeZoneResolverImpl;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.WebRequest;
import java.util.TimeZone;
/**
*
* @author Phillip Verheyden (phillipuniverse)
*/
@Component("blAdminTimeZoneResolver")
public class BroadleafAdminTimeZoneResolver extends BroadleafTimeZoneResolverImpl {
@Override
public TimeZone resolveTimeZone(WebRequest request) {
//TODO: eventually this should support a using a timezone from the currently logged in Admin user preferences
return super.resolveTimeZone(request);
}
}
| cloudbearings/BroadleafCommerce | admin/broadleaf-open-admin-platform/src/main/java/org/broadleafcommerce/openadmin/web/filter/BroadleafAdminTimeZoneResolver.java | Java | apache-2.0 | 1,370 |
/*
* #%L
* BroadleafCommerce Open Admin Platform
* %%
* Copyright (C) 2009 - 2014 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.openadmin.server.security.service;
import java.util.List;
/**
* <p>
* Provides row-level security to the various CRUD operations in the admin
*
* <p>
* This security service can be extended by the use of {@link RowLevelSecurityProviders}, of which this service has a list.
* To add additional providers, add this to an applicationContext merged into the admin application:
*
* {@code
* <bean id="blCustomRowSecurityProviders" class="org.springframework.beans.factory.config.ListFactoryBean" >
* <property name="sourceList">
* <list>
* <ref bean="customProvider" />
* </list>
* </property>
* </bean>
* <bean class="org.broadleafcommerce.common.extensibility.context.merge.LateStageMergeBeanPostProcessor">
* <property name="collectionRef" value="blCustomRowSecurityProviders" />
* <property name="targetRef" value="blRowLevelSecurityProviders" />
* </bean>
* }
*
* @author Phillip Verheyden (phillipuniverse)
* @author Brian Polster (bpolster)
*/
public interface RowLevelSecurityService extends RowLevelSecurityProvider {
/**
* Gets all of the registered providers
* @return the providers configured for this service
*/
public List<RowLevelSecurityProvider> getProviders();
}
| cengizhanozcan/BroadleafCommerce | admin/broadleaf-open-admin-platform/src/main/java/org/broadleafcommerce/openadmin/server/security/service/RowLevelSecurityService.java | Java | apache-2.0 | 1,996 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.