code
stringlengths 5
1.04M
| repo_name
stringlengths 7
108
| path
stringlengths 6
299
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1.04M
|
---|---|---|---|---|---|
/*
* Copyright 2011 Yuri Kanivets
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.darly.dlclent.widget.wheel.adapter;
import android.content.Context;
/**
* Numeric Wheel adapter.
*/
public class NumericWheelAdapter extends AbstractWheelTextAdapter {
/** The default min value */
public static final int DEFAULT_MAX_VALUE = 9;
/** The default max value */
private static final int DEFAULT_MIN_VALUE = 0;
// Values
private int minValue;
private int maxValue;
// format
private String format;
/**
* Constructor
* @param context the current context
*/
public NumericWheelAdapter(Context context) {
this(context, DEFAULT_MIN_VALUE, DEFAULT_MAX_VALUE);
}
/**
* Constructor
* @param context the current context
* @param minValue the wheel min value
* @param maxValue the wheel max value
*/
public NumericWheelAdapter(Context context, int minValue, int maxValue) {
this(context, minValue, maxValue, null);
}
/**
* Constructor
* @param context the current context
* @param minValue the wheel min value
* @param maxValue the wheel max value
* @param format the format string
*/
public NumericWheelAdapter(Context context, int minValue, int maxValue, String format) {
super(context);
this.minValue = minValue;
this.maxValue = maxValue;
this.format = format;
}
@Override
public CharSequence getItemText(int index) {
if (index >= 0 && index < getItemsCount()) {
int value = minValue + index;
return format != null ? String.format(format, value) : Integer.toString(value);
}
return null;
}
@Override
public int getItemsCount() {
return maxValue - minValue + 1;
}
}
| darlyhellen/oto | DLClent_A/src/com/darly/dlclent/widget/wheel/adapter/NumericWheelAdapter.java | Java | apache-2.0 | 2,397 |
package com.cqut.auth.auth;
import com.alibaba.fastjson.JSONObject;
import com.cqut.auth.entity.AuthUser;
import com.cqut.auth.entity.JWTAuthenticationToken;
import com.cqut.auth.service.AuthenticationService;
import com.cqut.entity.global.JSONResult;
import com.cqut.util.constants.ResponseCodeConstants;
import com.cqut.util.string.StringUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
import org.springframework.web.filter.OncePerRequestFilter;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.Writer;
@Component
public class HeaderTokenWebFilter extends OncePerRequestFilter {
private static String HEADER_TOKEN_NAME = "Authorization";
private static String TOKEN_PREFIX = "Bearer ";
@Autowired
private AuthenticationService authenticationService;
@Override
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException {
String accessToken = request.getHeader(HEADER_TOKEN_NAME);
if (StringUtil.isNullOrEmpty(accessToken)) {
filterChain.doFilter(request, response);
return;
}
String token = accessToken.replace(TOKEN_PREFIX, "");
AuthUser authUser = authenticationService.findUserByToken(token);
if (authUser != null) {
Authentication auth = new JWTAuthenticationToken(authUser);
SecurityContextHolder.getContext().setAuthentication(auth);
filterChain.doFilter(request, response);
} else {
handleInvalidToken(response);
}
}
private static String DEFAULT_CONTENT_TYPE = "application/json;charset=utf-8";
private static String ACCESS_CONTROL_ALLOW_ORIGIN_NAME = "Access-Control-Allow-Origin";
private static String ACCESS_CONTROL_ALLOW_ORIGIN_VALUE = "*";
private void handleInvalidToken(HttpServletResponse response) {
response.setContentType(DEFAULT_CONTENT_TYPE);
response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN_NAME, ACCESS_CONTROL_ALLOW_ORIGIN_VALUE);
try (Writer writer = response.getWriter()) {
writer.write(invalidTokenResult());
} catch (IOException e) {
e.printStackTrace();
}
}
private static String INVALID_TOKEN_MSG = "token值无效或过期";
private String invalidTokenResult() {
JSONObject result = new JSONObject();
result.put(JSONResult.CODE_KEY, ResponseCodeConstants.INVALID_TOKEN);
result.put(JSONResult.MESSAGE_KEY, INVALID_TOKEN_MSG);
return result.toJSONString();
}
}
| yangqiang1997/yangqiang1997.github.io | nevis_activiti/nevis-auth/src/main/java/com/cqut/auth/auth/HeaderTokenWebFilter.java | Java | apache-2.0 | 2,950 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.commons.api.edm.provider.annotation;
public interface NavigationPropertyPath extends DynamicAnnotationExpression {
String getValue();
}
| mtaal/olingo-odata4-jpa | lib/commons-api/src/main/java/org/apache/olingo/commons/api/edm/provider/annotation/NavigationPropertyPath.java | Java | apache-2.0 | 971 |
package com.nulldreams.wowpaper.activity;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.BottomNavigationView;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.LinearLayout;
import com.nulldreams.base.content.It;
import com.nulldreams.base.fragment.AbsPagerFragment;
import com.nulldreams.base.utils.BuildHelper;
import com.nulldreams.base.utils.UiHelper;
import com.nulldreams.wowpaper.R;
import com.nulldreams.wowpaper.fragment.HomeFragment;
import com.nulldreams.wowpaper.fragment.LikeFragment;
import com.nulldreams.wowpaper.fragment.TagStyleFragment;
import org.xutils.view.annotation.Event;
import org.xutils.view.annotation.ViewInject;
import org.xutils.x;
import java.util.ArrayList;
import java.util.List;
public class MainActivity extends WowActivity
implements BottomNavigationView.OnNavigationItemReselectedListener,
BottomNavigationView.OnNavigationItemSelectedListener{
private static final String TAG = MainActivity.class.getSimpleName();
@ViewInject(value = R.id.main_tb)
private Toolbar mTb;
@ViewInject(value = R.id.main_bottom_nav)
private BottomNavigationView mBottomNav;
@ViewInject(value = R.id.main_nav_place_holder)
private View mNavPlaceHolderView;
@ViewInject(value = R.id.main_status_bar_place_holder)
private View mStatusPlaceHolderView;
private HomeFragment mHomeFragment;
private TagStyleFragment mTagFragment;
private LikeFragment mLikeFragment;
private AbsPagerFragment mLastFragment;
private List<Fragment> mFragments = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
/*if (BuildHelper.nAndAbove(Build.VERSION_CODES.KITKAT)) {
Window w = getWindow(); // in Activity's onCreate() for instance
w.setFlags(WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS,
WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS);
}*/
setContentView(R.layout.activity_main);
x.view().inject(this);
mFragments = new ArrayList<>();
if (savedInstanceState != null) {
mHomeFragment = (HomeFragment) getSupportFragmentManager()
.findFragmentByTag(HomeFragment.class.getName());
/*mTagFragment = (TagStyleFragment) getSupportFragmentManager()
.findFragmentByTag(TagStyleFragment.class.getName());*/
mLikeFragment = (LikeFragment) getSupportFragmentManager()
.findFragmentByTag(LikeFragment.class.getName());
}
if (mHomeFragment == null) {
mHomeFragment = new HomeFragment();
}
/*if (mTagFragment == null) {
mTagFragment = new TagStyleFragment();
}*/
if (mLikeFragment == null) {
mLikeFragment = new LikeFragment();
}
mFragments.add(mHomeFragment);
// mFragments.add(mTagFragment);
mFragments.add(mLikeFragment);
setSupportActionBar(mTb);
mBottomNav.setOnNavigationItemReselectedListener(this);
mBottomNav.setOnNavigationItemSelectedListener(this);
}
@Override
public void onBackPressed() {
super.onBackPressed();
}
@Override
public void onPostCreate(@Nullable Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
int mLastSelectId = -1;
if (savedInstanceState != null) {
mLastSelectId = savedInstanceState.getInt("nav_last_select_id", -1);
}
Log.v(TAG, "onPostCreate mLastSelectId=" + mLastSelectId);
switch (mLastSelectId) {
case R.id.nav_home:
mLastFragment = mHomeFragment;
mBottomNav.setSelectedItemId(mLastSelectId);
break;
/*case R.id.nav_category:
mLastFragment = mTagFragment;
mBottomNav.setSelectedItemId(mLastSelectId);
break;
case R.id.nav_collection:
break;*/
case R.id.nav_like:
mLastFragment = mLikeFragment;
mBottomNav.setSelectedItemId(mLastSelectId);
break;
default:
mBottomNav.setSelectedItemId(R.id.nav_home);
break;
}
if (BuildHelper.kitkatAndAbove()) {
if (hasVirtualNavBar()) {
int navSize = UiHelper.getNavigationBarSize(this);
ViewGroup.LayoutParams params = mNavPlaceHolderView.getLayoutParams();
if (params == null) {
params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, navSize);
} else {
params.height = navSize;
}
mNavPlaceHolderView.setLayoutParams(params);
}
int statusSize = UiHelper.getStatusBarHeight(this);
ViewGroup.LayoutParams statusParams = mStatusPlaceHolderView.getLayoutParams();
if (statusParams == null) {
statusParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, statusSize);
} else {
statusParams.height = statusSize;
}
mStatusPlaceHolderView.setLayoutParams(statusParams);
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putInt("nav_last_select_id", mBottomNav.getSelectedItemId());
super.onSaveInstanceState(outState);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_nav, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.nav_settings:
It.newInstance().startActivity(this, SettingsActivity.class);
return true;
case R.id.nav_about:
It.newInstance().startActivity(this, AboutActivity.class);
return true;
}
return super.onOptionsItemSelected(item);
}
@Event(value = {
R.id.main_tb
})
private void onClick (View view) {
switch (view.getId()) {
case R.id.main_tb:
if (mLastFragment != null) {
mLastFragment.actionCommand(1, null);
}
break;
}
}
@Override
public void onNavigationItemReselected(@NonNull MenuItem item) {
AbsPagerFragment fragment = null;
switch (item.getItemId()) {
case R.id.nav_home:
fragment = mHomeFragment;
break;
/*case R.id.nav_category:
fragment = mTagFragment;
break;*/
case R.id.nav_like:
fragment = mLikeFragment;
break;
}
if (fragment == null) {
return;
}
if (fragment.isAdded()) {
fragment.actionCommand(2, null);
} else {
showFragment(fragment, item);
}
}
@Override
public boolean onNavigationItemSelected(@NonNull MenuItem item) {
AbsPagerFragment fragment = null;
switch (item.getItemId()) {
case R.id.nav_home:
fragment = mHomeFragment;
break;
/*case R.id.nav_category:
fragment = mTagFragment;
break;*/
case R.id.nav_like:
fragment = mLikeFragment;
break;
}
showFragment(fragment, item);
return fragment != null;
}
private void showFragment (AbsPagerFragment fragment, MenuItem item) {
if (fragment == null) {
return;
}
mTb.setTitle(item.getTitle());
FragmentTransaction transaction
= getSupportFragmentManager().beginTransaction();
transaction.setCustomAnimations(android.R.anim.fade_in, android.R.anim.fade_out);
/*int index = mFragments.indexOf(fragment);
if (index > mLastFragmentIndex) {
transaction.setCustomAnimations(R.anim.slide_in_right, R.anim.slide_out_left);
} else if (index < mLastFragmentIndex) {
transaction.setCustomAnimations(R.anim.slide_in_left, R.anim.slide_out_right);
}*/
if (fragment.isAdded()) {
transaction.show(fragment);
} else {
transaction.add(R.id.main_fragment_container, fragment, fragment.getClass().getName());
}
Log.v(TAG, "showFragment mLastFragment=" + mLastFragment);
if (mLastFragment != null) {
transaction.hide(mLastFragment);
}
transaction.commitNow();
mLastFragment = fragment;
}
}
| boybeak/WowPaper | app/src/main/java/com/nulldreams/wowpaper/activity/MainActivity.java | Java | apache-2.0 | 9,282 |
/*
* Copyright (C) 2012 Ricardo Juan Palma Durán
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jbt.tools.bteditor.actions;
import java.util.List;
import jbt.tools.bteditor.editor.BTEditor;
import jbt.tools.bteditor.editor.BTEditorInput;
import jbt.tools.bteditor.model.BT;
import jbt.tools.bteditor.util.Extensions;
import jbt.tools.bteditor.util.Utilities;
import org.eclipse.jface.action.Action;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.ui.PlatformUI;
/**
* Action that saves a behaviour tree in a XML file. It first asks the user to
* enter a file name in a dialog, and then saves the tree. If there is any
* problem in the saving process, an expcetion is thrown. If there is an open
* tree coming from a file with the same name that the user specifies, the
* saving process also fails, and an exception is thrown.
*/
public class SaveBTAsAction extends Action {
/** The tree to save. */
private BT tree;
/** The file where the tree will be stored. */
private String selectedFile;
/** The file name that is initially displayed in the file dialog. */
private String initialFileName;
/**
* Constructor.
*
* @param tree
* tree to save.
* @param initialFileNAme
* the file name that is initially displayed in the file dialog.
*/
public SaveBTAsAction(BT tree, String initialFileName) {
this.tree = tree;
this.initialFileName = initialFileName;
}
/**
*
* @see org.eclipse.jface.action.Action#run()
*/
public void run() {
FileDialog dialog = new FileDialog(
PlatformUI.getWorkbench().getWorkbenchWindows()[0].getShell(), SWT.SAVE);
dialog.setOverwrite(true);
dialog.setFilterExtensions(Extensions.getFiltersFromExtensions(Extensions
.getBTFileExtensions()));
dialog.setText("Save BT as");
dialog.setFileName(this.initialFileName);
String fileName = dialog.open();
if (fileName != null) {
List<BTEditor> editors = Utilities.getBTEditors();
for (BTEditor editor : editors) {
BTEditorInput editorInput = (BTEditorInput) editor.getEditorInput();
if (editorInput.isFromFile() && editorInput.getTreeName().equals(fileName)) {
throw new RuntimeException(
"There is a behaviour tree already open with the same name ("
+ fileName + "). Close it first.");
}
}
String targetFileName = Extensions.joinFileNameAndExtension(fileName,
Extensions.getBTFileExtensions()[dialog.getFilterIndex()]);
new SaveBTAction(this.tree, targetFileName).run();
this.selectedFile = targetFileName;
}
}
/**
* Returns the name of the file where the tree has been stored. Returns null
* if the tree could not be saved or if {@link #run()} has not been called
* yet.
*/
public String getSelectedFile() {
return this.selectedFile;
}
}
| lanen/jbt | JBTEditor/jbt.tools.bteditor/src/jbt/tools/bteditor/actions/SaveBTAsAction.java | Java | apache-2.0 | 3,349 |
/*
* Copyright 2017 DOCOMO Innovations, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* See the License for the specific language governing permissions and limitations under the License.
*/
package net.dataninja.smartsentiment.client;
public class Keyword {
String keyword;
String sentiment;
int count;
String locations;
double keyword_score;
double sentiment_score;
public String getKeyword() {
return keyword;
}
public void setKeyword(String keyword) {
this.keyword = keyword;
}
public String getSentiment() {
return sentiment;
}
public void setSentiment(String sentiment) {
this.sentiment = sentiment;
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
public String getLocations() {
return locations;
}
public void setLocations(String locations) {
this.locations = locations;
}
public double getKeyword_score() {
return keyword_score;
}
public void setKeyword_score(double keyword_score) {
this.keyword_score = keyword_score;
}
public double getSentiment_score() {
return sentiment_score;
}
public void setSentiment_score(double sentiment_score) {
this.sentiment_score = sentiment_score;
}
@Override
public String toString() {
return "Keyword{" +
"keyword='" + keyword + '\'' +
", sentiment='" + sentiment + '\'' +
", count=" + count +
", locations='" + locations + '\'' +
", keyword_score=" + keyword_score +
", sentiment_score=" + sentiment_score +
'}';
}
}
| DataNinjaAPI/dataninja-api-oracle-sdk-java | src/main/java/net/dataninja/smartsentiment/client/Keyword.java | Java | apache-2.0 | 2,754 |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package nl.uva.cs.lobcder.optimization;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.ParserConfigurationException;
import nl.uva.cs.lobcder.catalogue.SDNSweep;
import nl.uva.cs.lobcder.catalogue.SDNSweep.Port;
import nl.uva.cs.lobcder.rest.wrappers.AttachmentPoint;
import nl.uva.cs.lobcder.rest.wrappers.Link;
import nl.uva.cs.lobcder.rest.wrappers.NetworkEntity;
import nl.uva.cs.lobcder.util.PropertiesHelper;
import org.jgrapht.alg.DijkstraShortestPath;
import org.jgrapht.ext.DOTExporter;
import org.jgrapht.ext.EdgeNameProvider;
import org.jgrapht.ext.IntegerNameProvider;
import org.jgrapht.ext.VertexNameProvider;
import org.jgrapht.graph.DefaultWeightedEdge;
import org.jgrapht.graph.SimpleWeightedGraph;
import org.xml.sax.SAXException;
/**
*
* @author S. Koulouzis
*/
public class SDNControllerClient {
// private final Client client;
private String uri;
// private int floodlightPort = 8080;
// private int sflowRTPrt = 8008;
// private static List<Switch> switches;
// private static Map<String, String> networkEntitySwitchMap;
// private static Map<String, Integer> sFlowHostPortMap;
// private static Map<String, List<NetworkEntity>> networkEntityCache;
private static SimpleWeightedGraph<String, DefaultWeightedEdge> graph;
// private static List<Link> linkCache;
private DOTExporter dot;
private final Double factor;
public SDNControllerClient(String uri) throws IOException {
// ClientConfig clientConfig = new DefaultClientConfig();
// clientConfig.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
// client = Client.create(clientConfig);
// this.uri = uri;
factor = PropertiesHelper.getDelayFactor();
}
public List<DefaultWeightedEdge> getShortestPath(String dest, Set<String> sources) throws InterruptedException, IOException {
if (graph == null) {
graph = new SimpleWeightedGraph<>(DefaultWeightedEdge.class);
}
Collection<SDNSweep.Switch> switchesColl = SDNSweep.getSwitches().values();
List<SDNSweep.Switch> switches;
if (switchesColl instanceof List) {
switches = (List) switchesColl;
} else {
switches = new ArrayList(switchesColl);
}
for (int i = 0; i < switches.size(); i++) {
List<Port> ports = switches.get(i).ports;
if (ports != null) {
for (int j = 0; j < ports.size(); j++) {
for (int k = 0; k < ports.size(); k++) {
if (ports.get(j).state == 0 && ports.get(k).state == 0 && j != k) {
String vertex1 = switches.get(i).dpid + "-" + ports.get(j).portNumber;
String vertex2 = switches.get(i).dpid + "-" + ports.get(k).portNumber;
// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, "From: {0} to: {1}", new Object[]{vertex1, vertex2});
if (!graph.containsVertex(vertex1)) {
graph.addVertex(vertex1);
}
if (!graph.containsVertex(vertex2)) {
graph.addVertex(vertex2);
}
DefaultWeightedEdge e1;
if (!graph.containsEdge(vertex1, vertex2)) {
e1 = graph.addEdge(vertex1, vertex2);
} else {
e1 = graph.getEdge(vertex1, vertex2);
}
graph.setEdgeWeight(e1, 1);
}
}
}
}
}
// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, "Destination: {0}", new Object[]{dest});
if (!graph.containsVertex(dest)) {
graph.addVertex(dest);
}
NetworkEntity destinationEntityArray = SDNSweep.getNetworkEntity(dest);
// List<NetworkEntity> destinationEntityArray = getNetworkEntity(dest);
// for (SDNSweep.NetworkEntity ne : destinationEntityArray) {
if (destinationEntityArray != null) {
for (AttachmentPoint ap : destinationEntityArray.getAttachmentPoint()) {
String vertex = ap.getSwitchDPID() + "-" + ap.getPort();
// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, "vertex: {0}", new Object[]{vertex});
if (!graph.containsVertex(vertex)) {
graph.addVertex(vertex);
}
DefaultWeightedEdge e1;
if (!graph.containsEdge(dest, vertex)) {
e1 = graph.addEdge(dest, vertex);
} else {
e1 = graph.getEdge(dest, vertex);
}
//Don't calculate the cost from the destination to the switch.
//There is nothing we can do about it so why waste cycles ?
// graph.setEdgeWeight(e1, 2);
graph.setEdgeWeight(e1, getCost(dest, vertex));
}
}
// }
// List<NetworkEntity> sourceEntityArray = getNetworkEntity(sources);
for (String s : sources) {
NetworkEntity ne = SDNSweep.getNetworkEntity(s);
if (ne != null) {
for (String ip : ne.getIpv4()) {
if (!graph.containsVertex(ip)) {
graph.addVertex(ip);
}
for (AttachmentPoint ap : ne.getAttachmentPoint()) {
String vertex = ap.getSwitchDPID() + "-" + ap.getPort();
if (!graph.containsVertex(vertex)) {
graph.addVertex(vertex);
}
DefaultWeightedEdge e2;
if (!graph.containsEdge(ip, vertex)) {
e2 = graph.addEdge(ip, vertex);
} else {
e2 = graph.getEdge(ip, vertex);
}
graph.setEdgeWeight(e2, getCost(ip, vertex));
}
}
}
}
List<Link> links = SDNSweep.getSwitchLinks();
for (Link l : links) {
String srcVertex = l.srcSwitch + "-" + l.srcPort;
if (!graph.containsVertex(srcVertex)) {
graph.addVertex(srcVertex);
}
String dstVertex = l.dstSwitch + "-" + l.dstPort;
if (!graph.containsVertex(dstVertex)) {
graph.addVertex(dstVertex);
}
DefaultWeightedEdge e3;
if (!graph.containsEdge(srcVertex, dstVertex)) {
e3 = graph.addEdge(srcVertex, dstVertex);
} else {
e3 = graph.getEdge(srcVertex, dstVertex);
}
// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, "dstVertex: {0}", new Object[]{dstVertex});
graph.setEdgeWeight(e3, getCost(srcVertex, dstVertex));
}
double cost = Double.MAX_VALUE;
List<DefaultWeightedEdge> shortestPath = null;
// exportGraph();
StringBuilder msg = new StringBuilder();
msg.append("\n");
for (String s : sources) {
if (graph.containsVertex(dest) && graph.containsVertex(s)) {
List<DefaultWeightedEdge> shorPath = null;
shorPath = DijkstraShortestPath.findPathBetween(graph, s, dest);
double w = 0;
if (shorPath != null) {
for (DefaultWeightedEdge e : shorPath) {
w += graph.getEdgeWeight(e);
}
DefaultWeightedEdge p = shorPath.get(0);
String e = graph.getEdgeSource(p);
msg.append("source: ").append(e).append(" cost: ").append(w).append("\n");
if (w <= cost) {
cost = w;
shortestPath = shorPath;
if (cost <= sources.size() + 1) {
break;
}
}
}
}
}
Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, msg.toString());
return shortestPath;
}
private double getCost(String v1, String v2) throws InterruptedException, IOException {
// String[] agentPort = getsFlowPort(v1, v2);
// double tpp = getTimePerPacket(agentPort[0], Integer.valueOf(agentPort[1]));
String dpi;
if (v1.contains(":")) {
dpi = v1;
} else {
dpi = v2;
}
Double rbytes = SDNSweep.getReceiveBytesMap().get(dpi);
if (rbytes == null) {
rbytes = Double.valueOf(1);
}
Double tbytes = SDNSweep.getTransmitBytesMap().get(dpi);
if (tbytes == null) {
tbytes = Double.valueOf(1);
}
double bps = (rbytes + tbytes) / SDNSweep.getInterval();
double cost = 1.0 / bps;
Double averageLinkUsage = SDNSweep.getAverageLinkUsageMap().get(dpi);
if (averageLinkUsage != null) {
if (factor > -1) {
cost += averageLinkUsage * factor;
}
}
return (cost > 0) ? cost : 1.2;
}
// private double getCost(String v1, String v2) throws InterruptedException, IOException {
//// String[] agentPort = getsFlowPort(v1, v2);
//// double tpp = getTimePerPacket(agentPort[0], Integer.valueOf(agentPort[1]));
// String dpi;
// if (v1.contains(":")) {
// dpi = v1;
// } else {
// dpi = v2;
// }
//
// // SDNSweep.FloodlightStats[] stats = getFloodlightPortStats(dpi, getPort());
// Double rpps = SDNSweep.getReceivePacketsMap().get(dpi);
// Double tpps = SDNSweep.getTransmitPacketsMap().get(dpi);
// if (rpps == null) {
// rpps = Double.valueOf(1);
// }
// if (tpps == null) {
// tpps = Double.valueOf(1);
// }
//// double rrrt = (interval / rpps);
//// double trrt = (interval / tpps);
//
// double tpp = (rpps > tpps) ? rpps : tpps;
// if (tpp <= 0) {
// tpp = 1;
// }
// Double rbytes = SDNSweep.getReceiveBytesMap().get(dpi);
// if (rbytes == null) {
// rbytes = Double.valueOf(1);
// }
// Double tbytes = SDNSweep.getTransmitBytesMap().get(dpi);
// if (tbytes == null) {
// tbytes = Double.valueOf(1);
// }
// double rbps = rbytes / SDNSweep.interval;
// double tbps = tbytes / SDNSweep.interval;
// double cost = 1.0 / ((rbps + tbps) / 2.0);
//
//
// if (rbytes <= 0) {
// rbytes = Double.valueOf(1);
// }
// if (tbytes <= 0) {
// tbytes = Double.valueOf(1);
// }
//
// double rMTU = rbytes / rpps * 1.0;
// double tMTU = tbytes / tpps * 1.0;
// double mtu = (rMTU > tMTU) ? rMTU : tMTU;
//// if (mtu <= 500) {
//// mtu = 1500;
//// }
//
// //TT=TpP * NoP
// //NoP = {MTU}/{FS}
// //TpP =[({MTU} / {bps}) + RTT] // is the time it takes to transmit one packet or time per packet
// //TT = [({MTU} / {bps}) + RTT] * [ {MTU}/{FS}]
// double nop = mtu / 1024.0;
// double mtt = tpp * nop;
//
//// SDNSweep.OFlow f = SDNSweep.getOFlowsMap().get(dpi);
//// double bps = -1;
//// if (f != null) {
//// bps = f.byteCount / f.durationSeconds * 1.0;
//// double tmp = f.packetCount / f.durationSeconds * 1.0;
//// if (tpp <= 1 && tmp > tpp) {
//// mtt = tmp * nop;
//// }
//// }
//// Double averageLinkUsage = SDNSweep.getAverageLinkUsageMap().get(dpi);
//// if (averageLinkUsage != null) {
////
//// if (factor > -1) {
//// mtt += averageLinkUsage * factor;
//// }
////// For each sec of usage how much extra time we get ?
////// We asume a liner ralationship
////// The longer the usage it means either more transfers per flow or larger files or both
////
////// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, "dpi: " + dpi + " averageLinkUsage: " + averageLinkUsage);
//// } else {
//// mtt-=factor;
//// }
//
//// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, "From: {0} to: {1} tt: {2}", new Object[]{v1, v2, mtt});
// return (mtt > 0) ? mtt : 1.2;
// }
public void pushFlow(final List<DefaultWeightedEdge> shortestPath) throws IOException {
if (shortestPath != null && !shortestPath.isEmpty()) {
Thread thread = new Thread() {
public void run() {
try {
DefaultWeightedEdge e = shortestPath.get(0);
String pair = e.toString().substring(1, e.toString().length() - 1);
String[] workerSwitch = pair.toString().split(" : ");
String srcIP = workerSwitch[0];
String srcMac = SDNSweep.getNetworkEntity(srcIP).getMac().get(0);
String srcSwitchAndPort = workerSwitch[1];
String srcSwitch = srcSwitchAndPort.split("-")[0];
String srcIngressPort = String.valueOf(SDNSweep.getNetworkEntity(srcIP).getAttachmentPoint().get(0).getPort());
String srcOutput;
e = shortestPath.get(1);
pair = e.toString().substring(1, e.toString().length() - 1);
workerSwitch = pair.split(" : ");
if (workerSwitch[0].equals(srcSwitch + "-" + srcIngressPort)) {
srcOutput = workerSwitch[1].split("-")[1];
} else {
srcOutput = workerSwitch[0].split("-")[1];
}
e = shortestPath.get(shortestPath.size() - 1);
pair = e.toString().substring(1, e.toString().length() - 1);
workerSwitch = pair.toString().split(" : ");
String dstIP = workerSwitch[0];
String dstMac = SDNSweep.getNetworkEntity(dstIP).getMac().get(0);
String dstSwitchAndPort = workerSwitch[1];
String dstSwitch = dstSwitchAndPort.split("-")[0];
String dstOutput = String.valueOf(SDNSweep.getNetworkEntity(dstIP).getAttachmentPoint().get(0).getPort());
e = shortestPath.get(shortestPath.size() - 2);
pair = e.toString().substring(1, e.toString().length() - 1);
workerSwitch = pair.toString().split(" : ");
String node1 = workerSwitch[0];
String node2 = workerSwitch[1];
String dstIngressPort = "";
if (node1.equals(dstSwitch + "-" + dstOutput)) {
dstIngressPort = node2.split("-")[1];
} else {
dstIngressPort = node1.split("-")[1];
}
// String rulesrcToSw = "{\"switch\": \"" + srcSwitch + "\", \"name\":\"tmp\", \"cookie\":\"0\", \"priority\":\"5\", "
// + "\"src-ip\":\"" + srcIP + "\", \"ingress-getPort()\":\"" + srcIngressPort + "\", "
// + "\"dst-ip\": \"" + dstIP + "\", \"active\":\"true\",\"ether-type\":\"0x0800\", "
// + "\"actions\":\"output=" + srcOutput + "\"}";
//
//
// String ruleSwTodst = "{\"switch\": \"" + dstSwitch + "\", \"name\":\"tmp\", \"cookie\":\"0\", \"priority\":\"5\", "
// + "\"src-ip\":\"" + srcIP + "\", \"ingress-getPort()\":\"" + dstIngressPort + "\", "
// + "\"dst-ip\": \"" + dstIP + "\", \"active\":\"true\",\"ether-type\":\"0x0800\", "
// + "\"actions\":\"output=" + dstOutput + "\"}";
String rule11 = "{\"switch\": \"" + srcSwitch + "\", \"name\":\"tmp1-1\", \"cookie\":\"0\", \"priority\":\"5\", "
+ "\"src-mac\":\"" + srcMac + "\", \"ingress-getPort()\":\"" + srcIngressPort + "\", "
+ "\"dst-mac\": \"" + dstMac + "\", \"active\":\"true\",\"vlan-id\":\"-1\", "
+ "\"actions\":\"output=" + srcOutput + "\"}";
String rule12 = "{\"switch\": \"" + srcSwitch + "\", \"name\":\"tmp1-2\", \"cookie\":\"0\", \"priority\":\"5\", "
+ "\"src-mac\":\"" + dstMac + "\", \"ingress-getPort()\":\"" + srcOutput + "\", "
+ "\"dst-mac\": \"" + srcMac + "\", \"active\":\"true\",\"vlan-id\":\"-1\", "
+ "\"actions\":\"output=" + srcIngressPort + "\"}";
String rule21 = "{\"switch\": \"" + dstSwitch + "\", \"name\":\"tmp2-1\", \"cookie\":\"0\", \"priority\":\"5\", "
+ "\"src-mac\":\"" + srcMac + "\", \"ingress-getPort()\":\"" + dstIngressPort + "\", "
+ "\"dst-mac\": \"" + dstMac + "\", \"active\":\"true\",\"vlan-id\":\"-1\", "
+ "\"actions\":\"output=" + dstOutput + "\"}";
String rule22 = "{\"switch\": \"" + dstSwitch + "\", \"name\":\"tmp2-2\", \"cookie\":\"0\", \"priority\":\"5\", "
+ "\"src-mac\":\"" + dstMac + "\", \"ingress-getPort()\":\"" + dstOutput + "\", "
+ "\"dst-mac\": \"" + srcMac + "\", \"active\":\"true\",\"vlan-id\":\"-1\", "
+ "\"actions\":\"output=" + dstIngressPort + "\"}";
List<String> rules = new ArrayList<>();
rules.add(rule11);
rules.add(rule12);
rules.add(rule21);
rules.add(rule22);
try {
new SDNSweep(null).pushFlows(rules);
} catch (IOException ex) {
Logger.getLogger(SDNControllerClient.class.getName()).log(Level.SEVERE, null, ex);
} catch (ParserConfigurationException ex) {
Logger.getLogger(SDNControllerClient.class.getName()).log(Level.SEVERE, null, ex);
} catch (SAXException ex) {
Logger.getLogger(SDNControllerClient.class.getName()).log(Level.SEVERE, null, ex);
}
} catch (IOException ex) {
Logger.getLogger(SDNControllerClient.class.getName()).log(Level.SEVERE, null, ex);
}
}
};
thread.start();
}
}
// private SDNSweep.FloodlightStats[] getFloodlightPortStats(String dpi, int getPort()) throws IOException, InterruptedException {
// SDNSweep.FloodlightStats stats1 = null;
// SDNSweep.FloodlightStats stats2 = null;
// // List<FloodlightStats> stats1 = getFloodlightPortStats(dpi);
// // Thread.sleep((long) interval);
// // List<FloodlightStats> stats2 = getFloodlightPortStats(dpi);
// Map<String, SDNSweep.StatsHolder> map = SDNSweep.getStatsMap();
// if (map != null) {
// SDNSweep.StatsHolder h = map.get(dpi+"-"+getPort());
// if (h != null) {
// stats1 = h.getStats1();
// stats2 = h.getStats2();
// }
// }
// SDNSweep.FloodlightStats stat1 = null;
// for (SDNSweep.FloodlightStats s : stats1) {
// if (s.portNumber == getPort()) {
// stat1 = s;
// break;
// }
// }
//
// SDNSweep.FloodlightStats stat2 = null;
// for (SDNSweep.FloodlightStats s : stats2) {
// if (s.portNumber == getPort()) {
// stat2 = s;
// break;
// }
// }
// return new SDNSweep.FloodlightStats[]{stats1, stats2};
// }
// private String[] getsFlowPort(String v1, String v2) {
// String[] tuple = new String[2];
// if (sFlowHostPortMap == null) {
// sFlowHostPortMap = new HashMap<>();
// }
// if (v1.contains(":") && v2.contains(":")) {
// String switch1IP = getSwitchIPFromDPI(v1);
//// String switch2IP = getSwitchIPFromDPI(v2);
// if (!sFlowHostPortMap.containsKey(switch1IP)) {
// List<Flow> flows = getAgentFlows(switch1IP);
// for (Flow f : flows) {
// String[] keys = f.flowKeys.split(",");
// String from = keys[0];
// String to = keys[1];
// if (!isAttached(from, v1) && isAttached(to, v1)) {
//// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, "Switch: " + switch1IP + " -> " + f.dataSource);
// sFlowHostPortMap.put(switch1IP, f.dataSource);
// break;
// }
// }
// }
//// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, "Host: " + switch1IP + " getPort(): " + sFlowHostPortMap.get(switch1IP));
// tuple[0] = switch1IP;
// tuple[1] = String.valueOf(sFlowHostPortMap.get(switch1IP));
// return tuple;
// } else {
// String switchIP = null;
// String hostIP = null;
// if (v1.contains(".")) {
// switchIP = getSwitchIPFromHostIP(v1);
// hostIP = v1;
// } else {
// switchIP = getSwitchIPFromHostIP(v2);
// hostIP = v2;
// }
//
// if (!sFlowHostPortMap.containsKey(hostIP)) {
// List<Flow> flows = getAgentFlows(switchIP);
// for (Flow f : flows) {
// String[] keys = f.flowKeys.split(",");
// if (keys[0].equals(hostIP)) {
// sFlowHostPortMap.put(hostIP, f.dataSource);
// break;
// }
// }
// }
// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, "Host: " + hostIP + " is attached to: " + switchIP + " getPort(): " + sFlowHostPortMap.get(hostIP));
// tuple[0] = switchIP;
// tuple[1] = String.valueOf(sFlowHostPortMap.get(hostIP));
// return tuple;
// }
// }
//
// private Map<String, Integer> getifNameOpenFlowPortNumberMap(String dpi) {
// HashMap<String, Integer> ifNamePortMap = new HashMap<>();
// List<Switch> switches = getSwitches();
// for (Switch s : switches) {
// if (s.dpid.equals(dpi)) {
// List<Port> ports = s.ports;
// for (Port p : ports) {
// ifNamePortMap.put(p.name, p.portNumber);
// }
// break;
// }
// }
// return ifNamePortMap;
// }
//
// private String getSwitchIPFromDPI(String dpi) {
// for (Switch s : getSwitches()) {
// if (s.dpid.equals(dpi)) {
// return s.inetAddress.split(":")[0].substring(1);
// }
// }
// return null;
// }
//
// private boolean isAttached(String from, String dpi) {
// for (NetworkEntity ne : getNetworkEntity(from)) {
// for (AttachmentPoint ap : ne.getAttachmentPoint()) {
// if (ap.getSwitchDPID().equals(dpi)) {
// return true;
// }
// }
// }
// return false;
// }
// private List<NetworkEntity> getNetworkEntity(String address) {
// if (networkEntityCache == null) {
// networkEntityCache = new HashMap();
// }
// if (!networkEntityCache.containsKey(address)) {
// WebResource webResource = client.resource(uri + ":" + floodlightPort);
// WebResource res = null;
// if (address.contains(".")) {
// // http://145.100.133.131:8080/wm/device/?getIpv4()=192.168.100.1
// res = webResource.path("wm").path("device/").queryParam("getIpv4()", address);
// } else {
// // http://145.100.133.131:8080/wm/device/?getMac()=fe:16:3e:00:26:b1
// res = webResource.path("wm").path("device/").queryParam("getMac()", address);
// }
// List<NetworkEntity> ne = res.get(new GenericType<List<NetworkEntity>>() {
// });
// networkEntityCache.put(address, ne);
// }
// return networkEntityCache.get(address);
// }
//
// private List<NetworkEntity> getNetworkEntity(Set<String> sources) {
// List<NetworkEntity> entities = new ArrayList<>();
// for (String e : sources) {
// entities.addAll(getNetworkEntity(e));
// }
// return entities;
// }
//
// private List<Link> getSwitchLinks() {
// if (linkCache == null) {
// linkCache = new ArrayList<>();
// }
// if (linkCache.isEmpty()) {
// WebResource webResource = client.resource(uri + ":" + floodlightPort);
// WebResource res = webResource.path("wm").path("topology").path("links").path("json");
// linkCache = res.get(new GenericType<List<Link>>() {
// });
// }
//
// return linkCache;
// }
//
// private List<Switch> getSwitches() {
// if (switches == null) {
// WebResource webResource = client.resource(uri + ":" + floodlightPort);
// WebResource res = webResource.path("wm").path("core").path("controller").path("switches").path("json");
// switches = res.get(new GenericType<List<Switch>>() {
// });
// }
// return switches;
// }
//
// private String getSwitchIPFromHostIP(String address) {
// if (networkEntitySwitchMap == null) {
// networkEntitySwitchMap = new HashMap<>();
// }
// if (!networkEntitySwitchMap.containsKey(address)) {
// List<NetworkEntity> ne = getNetworkEntity(address);
// String dpi = ne.get(0).getAttachmentPoint().get(0).getSwitchDPID();
// for (Switch switches : getSwitches()) {
// if (switches.dpid.equals(dpi)) {
// String ip = switches.inetAddress.split(":")[0].substring(1);
// networkEntitySwitchMap.put(address, ip);
// break;
// }
// }
// }
//
// return networkEntitySwitchMap.get(address);
// }
//
// private List<Flow> getAgentFlows(String switchIP) {
// List<Flow> agentFlows = new ArrayList<>();
// for (Flow f : getAllFlows()) {
// if (f.agent.equals(switchIP)) {
// agentFlows.add(f);
// }
// }
// return agentFlows;
// }
//
// private List<Flow> getAllFlows() {
// WebResource webResource = client.resource(uri + ":" + sflowRTPrt);
// WebResource res = webResource.path("flows").path("json");
// return res.get(new GenericType<List<Flow>>() {
// });
// }
//
// private List<Ifpkts> getifoutpktsMetric(String agent, int getPort()) {
// WebResource webResource = client.resource(uri + ":" + sflowRTPrt);
// WebResource res = webResource.path("metric").path(agent).path(getPort() + ".ifoutpkts").path("json");
// return res.get(new GenericType<List<Ifpkts>>() {
// });
// }
//
// private List<FloodlightStats> getFloodlightPortStats(String dpi) throws IOException {
// //http://145.100.133.130:8080/wm/core/switch/00:00:4e:cd:a6:8d:c9:44/getPort()/json
// WebResource webResource = client.resource(uri + ":" + floodlightPort);
// WebResource res = webResource.path("wm").path("core").path("switch").path(dpi).path("getPort()").path("json");
//
//
// String output = res.get(String.class);
// String out = output.substring(27, output.length() - 1);
//
// ObjectMapper mapper = new ObjectMapper();
// return mapper.readValue(out, mapper.getTypeFactory().constructCollectionType(List.class, FloodlightStats.class));
// }
//
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// public static class Ifpkts {
//
// @XmlElement(name = "agent")
// String agent;
// @XmlElement(name = "dataSource")
// int dataSource;
// @XmlElement(name = "lastUpdate")
// long lastUpdate;
// /**
// * The lastUpdateMax and lastUpdateMin values indicate how long ago (in
// * milliseconds) the most recent and oldest updates
// */
// @XmlElement(name = "lastUpdateMax")
// long lastUpdateMax;
// @XmlElement(name = "lastUpdateMin")
// /**
// * The metricN field in the query result indicates the number of data
// * sources that contributed to the summary metrics
// */
// long lastUpdateMin;
// @XmlElement(name = "metricN")
// int metricN;
// @XmlElement(name = "metricName")
// String metricName;
// @XmlElement(name = "metricValue")
// double value;
// }
//
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// public static class Flow {
//
// @XmlElement(name = "agent")
// String agent;
// @XmlElement(name = "dataSource")
// int dataSource;
// @XmlElement(name = "end")
// String end;
// @XmlElement(name = "flowID")
// int flowID;
// @XmlElement(name = "flowKeys")
// String flowKeys;
// @XmlElement(name = "name")
// String name;
// @XmlElement(name = "start")
// long start;
// @XmlElement(name = "value")
// double value;
// }
//
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// public static class NetworkEntity {
//
// @XmlElement(name = "entityClass")
// String entityClass;
// @XmlElement(name = "lastSeen")
// String lastSeen;
// @XmlElement(name = "getIpv4()")
// List<String> getIpv4();
// @XmlElement(name = "vlan")
// List<String> vlan;
// @XmlElement(name = "getMac()")
// List<String> getMac();
// @XmlElement(name = "getAttachmentPoint()")
// List<AttachmentPoint> getAttachmentPoint();
// }
//
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// public static class AttachmentPoint {
//
// @XmlElement(name = "getPort()")
// int getPort();
// @XmlElement(name = "errorStatus")
// String errorStatus;
// @XmlElement(name = "getSwitchDPID()")
// String getSwitchDPID();
// }
//
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// private static class Link {
//
// @XmlElement(name = "src-switch")
// String srcSwitch;
// @XmlElement(name = "src-getPort()")
// int getSrcPort();
// @XmlElement(name = "dst-switch")
// String dstSwitch;
// @XmlElement(name = "dst-getPort()")
// int dstPort;
// @XmlElement(name = "type")
// String type;
// @XmlElement(name = "direction")
// String direction;
// }
//
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// public static class Switch {
//
// @XmlElement(name = "actions")
// int actions;
// @XmlElement(name = "attributes")
// Attributes attributes;
// @XmlElement(name = "ports")
// List<Port> ports;
// @XmlElement(name = "buffers")
// int buffers;
// @XmlElement(name = "description")
// Description description;
// @XmlElement(name = "capabilities")
// int capabilities;
// @XmlElement(name = "inetAddress")
// String inetAddress;
// @XmlElement(name = "connectedSince")
// long connectedSince;
// @XmlElement(name = "dpid")
// String dpid;
// @XmlElement(name = "harole")
// String harole;
// }
//
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// private static class Description {
//
// @XmlElement(name = "software")
// String software;
// @XmlElement(name = "hardware")
// String hardware;
// @XmlElement(name = "manufacturer")
// String manufacturer;
// @XmlElement(name = "serialNum")
// String serialNum;
// @XmlElement(name = "datapath")
// String datapath;
// }
//
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// private static class Port {
//
// @XmlElement(name = "portNumber")
// int portNumber;
// @XmlElement(name = "hardwareAddress")
// String hardwareAddress;
// @XmlElement(name = "name")
// String name;
// @XmlElement(name = "config")
// int config;
// @XmlElement(name = "state")
// int state;
// @XmlElement(name = "currentFeatures")
// int currentFeatures;
// @XmlElement(name = "advertisedFeatures")
// int advertisedFeatures;
// @XmlElement(name = "supportedFeatures")
// int supportedFeatures;
// @XmlElement(name = "peerFeatures")
// int peerFeatures;
// }
//
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// private static class Attributes {
//
// @XmlElement(name = "supportsOfppFlood")
// boolean supportsOfppFlood;
// @XmlElement(name = "supportsNxRole")
// boolean supportsNxRole;
// @XmlElement(name = "FastWildcards")
// int fastWildcards;
// @XmlElement(name = "supportsOfppTable")
// boolean supportsOfppTable;
// }
//
//// @XmlRootElement
//// @XmlAccessorType(XmlAccessType.FIELD)
////// @JsonIgnoreProperties(ignoreUnknown = true)
//// public static class FloodlightStatsWrapper {
////
////// @XmlElement(name = "00:00:4e:cd:a6:8d:c9:44")
//// @XmlElementWrapper
////// @XmlElement
//// List<FloodlightStats> stats;
//// }
// @XmlRootElement
// @XmlAccessorType(XmlAccessType.FIELD)
// public static class FloodlightStats {
//
// @JsonProperty("portNumber")
// int portNumber;
// @JsonProperty("receivePackets")
// long receivePackets;
// @JsonProperty("transmitPackets")
// long transmitPackets;
// @JsonProperty("receiveBytes")
// long receiveBytes;
// @JsonProperty("transmitBytes")
// long transmitBytes;
// @JsonProperty("receiveDropped")
// long receiveDropped;
// @JsonProperty("transmitDropped")
// long transmitDropped;
// @JsonProperty("receiveErrors")
// long receiveErrors;
// @JsonProperty("transmitErrors")
// long transmitErrors;
// @JsonProperty("receiveFrameErrors")
// long receiveFrameErrors;
// @JsonProperty("receiveOverrunErrors")
// long receiveOverrunErrors;
// @JsonProperty("receiveCRCErrors")
// long receiveCRCErrors;
// @JsonProperty("collisions")
// long collisions;
// }
private void exportGraph() throws IOException {
if (dot == null) {
dot = new DOTExporter(new IntegerNameProvider(), new VertexNameProvider() {
public String getVertexName(Object object) {
if (object == null) {
return "none";
}
return object.toString().replaceAll("\"", "\'");
}
}, new EdgeNameProvider<Object>() {
public String getEdgeName(Object object) {
if (object == null) {
return "none";
}
DefaultWeightedEdge e1 = (DefaultWeightedEdge) object;
// Logger.getLogger(SDNControllerClient.class.getName()).log(Level.INFO, object.getClass().getName());
return String.valueOf(graph.getEdgeWeight(e1)); //object.toString().replaceAll("\"", "\'");
}
});
}
File f = new File("/home/alogo/Downloads/sdn_graph.dot");
f.delete();
dot.export(new FileWriter(f), graph);
}
}
| skoulouzis/lobcder | lobcder-master/src/main/java/nl/uva/cs/lobcder/optimization/SDNControllerClient.java | Java | apache-2.0 | 37,401 |
/*
* BJAF - Beetle J2EE Application Framework
* 甲壳虫J2EE企业应用开发框架
* 版权所有2003-2015 余浩东 (www.beetlesoft.net)
*
* 这是一个免费开源的软件,您必须在
*<http://www.apache.org/licenses/LICENSE-2.0>
*协议下合法使用、修改或重新发布。
*
* 感谢您使用、推广本框架,若有建议或问题,欢迎您和我联系。
* 邮件: <yuhaodong@gmail.com/>.
*/
package com.beetle.framework.persistence.seq.imp;
import com.beetle.framework.persistence.access.operator.QueryOperator;
import com.beetle.framework.persistence.access.operator.RsDataSet;
import com.beetle.framework.persistence.seq.ISequence;
import com.beetle.framework.persistence.seq.SeqType;
import com.beetle.framework.persistence.seq.SeqType.SeqImpType;
public class OracleSeqGenerator implements ISequence {
private static OracleSeqGenerator generator = new OracleSeqGenerator();
private OracleSeqGenerator() {
}
public static ISequence getInstance() {
return generator;
}
/**
* nextSequenceNumber
*
* @param seqtype
* SeqType
* @return long
* @todo Implement this com.beetle.framework.persistence.dao.ISequence
* method
*/
public long nextSequenceNum(SeqType seqtype) {
long r;
QueryOperator qo = new QueryOperator();
qo.setDataSourceName(seqtype.getDataSourceName());
qo.setSql("SELECT " + seqtype.getSequenceName() + ".nextval FROM dual");
try {
qo.access();
RsDataSet rs = new RsDataSet(qo.getSqlResultSet());
try {
r = rs.getFieldValueAsLong(0).longValue();
} catch (java.lang.ClassCastException cce) {
r = rs.getFieldValueAsInteger(0).longValue();
}
rs.clearAll();
} catch (Exception e) {
e.printStackTrace();
throw new java.lang.RuntimeException("get oracle sequence err", e);
}
return r;
}
public SeqImpType getImpType() {
return SeqType.SeqImpType.Oracle;
}
public void initSequenceValue(int initValue, SeqType seqtype) {
throw new com.beetle.framework.AppRuntimeException("没有实现");
}
}
| jbeetle/BJAF3.x | src/main/com/beetle/framework/persistence/seq/imp/OracleSeqGenerator.java | Java | apache-2.0 | 2,119 |
/**
* Copyright (c) 2013, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.science.ml.parallel.crossfold;
import java.io.Serializable;
import java.util.Random;
import org.apache.crunch.MapFn;
import org.apache.crunch.PCollection;
import org.apache.crunch.Pair;
import org.apache.crunch.types.PType;
import org.apache.crunch.types.PTypeFamily;
import com.google.common.base.Preconditions;
/**
* Supports creating partitions of {@code PCollection}s for performing
* cross-validations.
*/
public class Crossfold implements Serializable {
/**
* The default seed allows us to partition an identical dataset the
* same way on every pass over it, even from different Crunch jobs.
*/
public static final long DEFAULT_SEED = 1729L;
private final int numFolds;
private final long seed;
public Crossfold(int numFolds) {
this(numFolds, DEFAULT_SEED);
}
public Crossfold(int numFolds, long seed) {
Preconditions.checkArgument(numFolds > 0, "Number of folds must be greater than zero");
this.numFolds = numFolds;
this.seed = seed;
}
public int getNumFolds() {
return numFolds;
}
public <T> PCollection<Pair<Integer, T>> apply(PCollection<T> pcollect) {
PTypeFamily ptf = pcollect.getTypeFamily();
PType<Pair<Integer, T>> pt = ptf.pairs(ptf.ints(), pcollect.getPType());
return pcollect.parallelDo("crossfold", new MapFn<T, Pair<Integer, T>>() {
private transient Random rand;
@Override
public void initialize() {
if (rand == null) {
this.rand = new Random(seed);
}
}
@Override
public Pair<Integer, T> map(T t) {
return Pair.of(rand.nextInt(numFolds), t);
}
}, pt);
}
}
| xwjxwj30abc/ml | parallel/src/main/java/com/cloudera/science/ml/parallel/crossfold/Crossfold.java | Java | apache-2.0 | 2,257 |
package com.skycaster.geomapper.adapter;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.util.DisplayMetrics;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.TextView;
import com.skycaster.geomapper.R;
import java.util.ArrayList;
/**
* Created by 廖华凯 on 2017/10/28.
*/
public class RawDataRecyclerViewAdapter extends RecyclerView.Adapter<RawDataRecyclerViewAdapter.ViewHolder> {
private ArrayList<String> mList;
private Context mContext;
private float mTextSize;
public RawDataRecyclerViewAdapter(ArrayList<String> list, Context context) {
mList = list;
mContext = context;
DisplayMetrics metrics=new DisplayMetrics();
WindowManager manager= (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
manager.getDefaultDisplay().getMetrics(metrics);
mTextSize = mContext.getResources().getDimension(R.dimen.sp_24) / metrics.scaledDensity;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new ViewHolder(View.inflate(mContext,android.R.layout.simple_list_item_1,null));
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
holder.getTextView().setText(mList.get(position));
}
@Override
public int getItemCount() {
return mList.size();
}
class ViewHolder extends RecyclerView.ViewHolder{
private TextView mTextView;
public ViewHolder(View itemView) {
super(itemView);
mTextView= (TextView) itemView;
mTextView.setTextSize(mTextSize);
}
public TextView getTextView() {
return mTextView;
}
}
}
| leoliao2008/GeoMapper | app/src/main/java/com/skycaster/geomapper/adapter/RawDataRecyclerViewAdapter.java | Java | apache-2.0 | 1,817 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticsearch.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Container for response returned by <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetCompatibleElasticsearchVersionsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable,
Cloneable {
/**
* <p>
* A map of compatible Elasticsearch versions returned as part of the
* <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
* </p>
*/
private java.util.List<CompatibleVersionsMap> compatibleElasticsearchVersions;
/**
* <p>
* A map of compatible Elasticsearch versions returned as part of the
* <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
* </p>
*
* @return A map of compatible Elasticsearch versions returned as part of the
* <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
*/
public java.util.List<CompatibleVersionsMap> getCompatibleElasticsearchVersions() {
return compatibleElasticsearchVersions;
}
/**
* <p>
* A map of compatible Elasticsearch versions returned as part of the
* <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
* </p>
*
* @param compatibleElasticsearchVersions
* A map of compatible Elasticsearch versions returned as part of the
* <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
*/
public void setCompatibleElasticsearchVersions(java.util.Collection<CompatibleVersionsMap> compatibleElasticsearchVersions) {
if (compatibleElasticsearchVersions == null) {
this.compatibleElasticsearchVersions = null;
return;
}
this.compatibleElasticsearchVersions = new java.util.ArrayList<CompatibleVersionsMap>(compatibleElasticsearchVersions);
}
/**
* <p>
* A map of compatible Elasticsearch versions returned as part of the
* <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setCompatibleElasticsearchVersions(java.util.Collection)} or
* {@link #withCompatibleElasticsearchVersions(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param compatibleElasticsearchVersions
* A map of compatible Elasticsearch versions returned as part of the
* <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetCompatibleElasticsearchVersionsResult withCompatibleElasticsearchVersions(CompatibleVersionsMap... compatibleElasticsearchVersions) {
if (this.compatibleElasticsearchVersions == null) {
setCompatibleElasticsearchVersions(new java.util.ArrayList<CompatibleVersionsMap>(compatibleElasticsearchVersions.length));
}
for (CompatibleVersionsMap ele : compatibleElasticsearchVersions) {
this.compatibleElasticsearchVersions.add(ele);
}
return this;
}
/**
* <p>
* A map of compatible Elasticsearch versions returned as part of the
* <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
* </p>
*
* @param compatibleElasticsearchVersions
* A map of compatible Elasticsearch versions returned as part of the
* <code> <a>GetCompatibleElasticsearchVersions</a> </code> operation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetCompatibleElasticsearchVersionsResult withCompatibleElasticsearchVersions(
java.util.Collection<CompatibleVersionsMap> compatibleElasticsearchVersions) {
setCompatibleElasticsearchVersions(compatibleElasticsearchVersions);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCompatibleElasticsearchVersions() != null)
sb.append("CompatibleElasticsearchVersions: ").append(getCompatibleElasticsearchVersions());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetCompatibleElasticsearchVersionsResult == false)
return false;
GetCompatibleElasticsearchVersionsResult other = (GetCompatibleElasticsearchVersionsResult) obj;
if (other.getCompatibleElasticsearchVersions() == null ^ this.getCompatibleElasticsearchVersions() == null)
return false;
if (other.getCompatibleElasticsearchVersions() != null
&& other.getCompatibleElasticsearchVersions().equals(this.getCompatibleElasticsearchVersions()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCompatibleElasticsearchVersions() == null) ? 0 : getCompatibleElasticsearchVersions().hashCode());
return hashCode;
}
@Override
public GetCompatibleElasticsearchVersionsResult clone() {
try {
return (GetCompatibleElasticsearchVersionsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| aws/aws-sdk-java | aws-java-sdk-elasticsearch/src/main/java/com/amazonaws/services/elasticsearch/model/GetCompatibleElasticsearchVersionsResult.java | Java | apache-2.0 | 6,810 |
package com.kowaisugoi.game.audio;
public enum MusicId {
NONE, MAIN_MENU, DARK, DRONE, BEDROOM, COZY, CRAWLSPACE, HOWL, WIND
}
| gjhutchison/pixelhorrorjam2016 | core/src/com/kowaisugoi/game/audio/MusicId.java | Java | apache-2.0 | 132 |
/*
* Copyright 2016-2017 Testify Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.testifyproject.junit4.integration;
import static org.assertj.core.api.Assertions.assertThat;
import javax.inject.Provider;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.testifyproject.annotation.Fake;
import org.testifyproject.annotation.Module;
import org.testifyproject.annotation.Sut;
import org.testifyproject.junit4.IntegrationTest;
import org.testifyproject.junit4.fixture.GreetingModule;
import org.testifyproject.junit4.fixture.common.Greeting;
import org.testifyproject.junit4.fixture.service.ProviderGreeting;
/**
*
* @author saden
*/
@Module(GreetingModule.class)
@RunWith(IntegrationTest.class)
public class ProviderGreeterFakeIT {
@Sut
ProviderGreeting sut;
@Fake
Provider<Greeting> greeting;
@Test
public void verifyInjection() {
assertThat(sut).isNotNull();
assertThat(greeting).isNotNull().isSameAs(sut.getGreeting());
assertThat(Mockito.mockingDetails(greeting).isMock()).isTrue();
}
}
| testify-project/testify | modules/junit4/guice-integration-test/src/test/java/org/testifyproject/junit4/integration/ProviderGreeterFakeIT.java | Java | apache-2.0 | 1,633 |
package com.unipad.singlebrain.longPoker.view;
import android.support.annotation.NonNull;
import android.support.v4.animation.AnimatorCompatHelper;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewPropertyAnimatorCompat;
import android.support.v4.view.ViewPropertyAnimatorListener;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SimpleItemAnimator;
import android.view.View;
import java.util.ArrayList;
import java.util.List;
/**
* Created by gongkan on 2016/8/19.
*/
public class NoAlphaItemAnimator extends SimpleItemAnimator {
private static final boolean DEBUG = false;
private ArrayList<RecyclerView.ViewHolder> mPendingRemovals = new ArrayList<>();
private ArrayList<RecyclerView.ViewHolder> mPendingAdditions = new ArrayList<>();
private ArrayList<MoveInfo> mPendingMoves = new ArrayList<>();
private ArrayList<ChangeInfo> mPendingChanges = new ArrayList<>();
private ArrayList<ArrayList<RecyclerView.ViewHolder>> mAdditionsList = new ArrayList<>();
private ArrayList<ArrayList<MoveInfo>> mMovesList = new ArrayList<>();
private ArrayList<ArrayList<ChangeInfo>> mChangesList = new ArrayList<>();
private ArrayList<RecyclerView.ViewHolder> mAddAnimations = new ArrayList<>();
private ArrayList<RecyclerView.ViewHolder> mMoveAnimations = new ArrayList<>();
private ArrayList<RecyclerView.ViewHolder> mRemoveAnimations = new ArrayList<>();
private ArrayList<RecyclerView.ViewHolder> mChangeAnimations = new ArrayList<>();
private static class MoveInfo {
public RecyclerView.ViewHolder holder;
public int fromX, fromY, toX, toY;
private MoveInfo(RecyclerView.ViewHolder holder, int fromX, int fromY, int toX, int toY) {
this.holder = holder;
this.fromX = fromX;
this.fromY = fromY;
this.toX = toX;
this.toY = toY;
}
}
private static class ChangeInfo {
public RecyclerView.ViewHolder oldHolder, newHolder;
public int fromX, fromY, toX, toY;
private ChangeInfo(RecyclerView.ViewHolder oldHolder, RecyclerView.ViewHolder newHolder) {
this.oldHolder = oldHolder;
this.newHolder = newHolder;
}
private ChangeInfo(RecyclerView.ViewHolder oldHolder, RecyclerView.ViewHolder newHolder,
int fromX, int fromY, int toX, int toY) {
this(oldHolder, newHolder);
this.fromX = fromX;
this.fromY = fromY;
this.toX = toX;
this.toY = toY;
}
@Override
public String toString() {
return "ChangeInfo{" +
"oldHolder=" + oldHolder +
", newHolder=" + newHolder +
", fromX=" + fromX +
", fromY=" + fromY +
", toX=" + toX +
", toY=" + toY +
'}';
}
}
@Override
public void runPendingAnimations() {
boolean removalsPending = !mPendingRemovals.isEmpty();
boolean movesPending = !mPendingMoves.isEmpty();
boolean changesPending = !mPendingChanges.isEmpty();
boolean additionsPending = !mPendingAdditions.isEmpty();
if (!removalsPending && !movesPending && !additionsPending && !changesPending) {
// nothing to animate
return;
}
// First, remove stuff
for (RecyclerView.ViewHolder holder : mPendingRemovals) {
animateRemoveImpl(holder);
}
mPendingRemovals.clear();
// Next, move stuff
if (movesPending) {
final ArrayList<MoveInfo> moves = new ArrayList<>();
moves.addAll(mPendingMoves);
mMovesList.add(moves);
mPendingMoves.clear();
Runnable mover = new Runnable() {
@Override
public void run() {
for (MoveInfo moveInfo : moves) {
animateMoveImpl(moveInfo.holder, moveInfo.fromX, moveInfo.fromY,
moveInfo.toX, moveInfo.toY);
}
moves.clear();
mMovesList.remove(moves);
}
};
if (removalsPending) {
View view = moves.get(0).holder.itemView;
ViewCompat.postOnAnimationDelayed(view, mover, getRemoveDuration());
} else {
mover.run();
}
}
// Next, change stuff, to run in parallel with move animations
if (changesPending) {
final ArrayList<ChangeInfo> changes = new ArrayList<>();
changes.addAll(mPendingChanges);
mChangesList.add(changes);
mPendingChanges.clear();
Runnable changer = new Runnable() {
@Override
public void run() {
for (ChangeInfo change : changes) {
animateChangeImpl(change);
}
changes.clear();
mChangesList.remove(changes);
}
};
if (removalsPending) {
RecyclerView.ViewHolder holder = changes.get(0).oldHolder;
ViewCompat.postOnAnimationDelayed(holder.itemView, changer, getRemoveDuration());
} else {
changer.run();
}
}
// Next, add stuff
if (additionsPending) {
final ArrayList<RecyclerView.ViewHolder> additions = new ArrayList<>();
additions.addAll(mPendingAdditions);
mAdditionsList.add(additions);
mPendingAdditions.clear();
Runnable adder = new Runnable() {
public void run() {
for (RecyclerView.ViewHolder holder : additions) {
animateAddImpl(holder);
}
additions.clear();
mAdditionsList.remove(additions);
}
};
if (removalsPending || movesPending || changesPending) {
long removeDuration = removalsPending ? getRemoveDuration() : 0;
long moveDuration = movesPending ? getMoveDuration() : 0;
long changeDuration = changesPending ? getChangeDuration() : 0;
long totalDelay = removeDuration + Math.max(moveDuration, changeDuration);
View view = additions.get(0).itemView;
ViewCompat.postOnAnimationDelayed(view, adder, totalDelay);
} else {
adder.run();
}
}
}
@Override
public boolean animateRemove(final RecyclerView.ViewHolder holder) {
resetAnimation(holder);
mPendingRemovals.add(holder);
return true;
}
private void animateRemoveImpl(final RecyclerView.ViewHolder holder) {
final View view = holder.itemView;
final ViewPropertyAnimatorCompat animation = ViewCompat.animate(view);
mRemoveAnimations.add(holder);
animation.setDuration(getRemoveDuration())
.alpha(0).setListener(new VpaListenerAdapter() {
@Override
public void onAnimationStart(View view) {
dispatchRemoveStarting(holder);
}
@Override
public void onAnimationEnd(View view) {
animation.setListener(null);
ViewCompat.setAlpha(view, 1);
dispatchRemoveFinished(holder);
mRemoveAnimations.remove(holder);
dispatchFinishedWhenDone();
}
}).start();
}
@Override
public boolean animateAdd(final RecyclerView.ViewHolder holder) {
resetAnimation(holder);
ViewCompat.setAlpha(holder.itemView, 0);
mPendingAdditions.add(holder);
return true;
}
private void animateAddImpl(final RecyclerView.ViewHolder holder) {
final View view = holder.itemView;
final ViewPropertyAnimatorCompat animation = ViewCompat.animate(view);
mAddAnimations.add(holder);
animation.alpha(1).setDuration(getAddDuration()).
setListener(new VpaListenerAdapter() {
@Override
public void onAnimationStart(View view) {
dispatchAddStarting(holder);
}
@Override
public void onAnimationCancel(View view) {
ViewCompat.setAlpha(view, 1);
}
@Override
public void onAnimationEnd(View view) {
animation.setListener(null);
dispatchAddFinished(holder);
mAddAnimations.remove(holder);
dispatchFinishedWhenDone();
}
}).start();
}
@Override
public boolean animateMove(final RecyclerView.ViewHolder holder, int fromX, int fromY,
int toX, int toY) {
final View view = holder.itemView;
fromX += ViewCompat.getTranslationX(holder.itemView);
fromY += ViewCompat.getTranslationY(holder.itemView);
resetAnimation(holder);
int deltaX = toX - fromX;
int deltaY = toY - fromY;
if (deltaX == 0 && deltaY == 0) {
dispatchMoveFinished(holder);
return false;
}
if (deltaX != 0) {
ViewCompat.setTranslationX(view, -deltaX);
}
if (deltaY != 0) {
ViewCompat.setTranslationY(view, -deltaY);
}
mPendingMoves.add(new MoveInfo(holder, fromX, fromY, toX, toY));
return true;
}
private void animateMoveImpl(final RecyclerView.ViewHolder holder, int fromX, int fromY, int toX, int toY) {
final View view = holder.itemView;
final int deltaX = toX - fromX;
final int deltaY = toY - fromY;
if (deltaX != 0) {
ViewCompat.animate(view).translationX(0);
}
if (deltaY != 0) {
ViewCompat.animate(view).translationY(0);
}
// TODO: make EndActions end listeners instead, since end actions aren't called when
// vpas are canceled (and can't end them. why?)
// need listener functionality in VPACompat for this. Ick.
final ViewPropertyAnimatorCompat animation = ViewCompat.animate(view);
mMoveAnimations.add(holder);
animation.setDuration(getMoveDuration()).setListener(new VpaListenerAdapter() {
@Override
public void onAnimationStart(View view) {
dispatchMoveStarting(holder);
}
@Override
public void onAnimationCancel(View view) {
if (deltaX != 0) {
ViewCompat.setTranslationX(view, 0);
}
if (deltaY != 0) {
ViewCompat.setTranslationY(view, 0);
}
}
@Override
public void onAnimationEnd(View view) {
animation.setListener(null);
dispatchMoveFinished(holder);
mMoveAnimations.remove(holder);
dispatchFinishedWhenDone();
}
}).start();
}
@Override
public boolean animateChange(RecyclerView.ViewHolder oldHolder, RecyclerView.ViewHolder newHolder,
int fromX, int fromY, int toX, int toY) {
if (oldHolder == newHolder) {
// Don't know how to run change animations when the same view holder is re-used.
// run a move animation to handle position changes.
return animateMove(oldHolder, fromX, fromY, toX, toY);
}
final float prevTranslationX = ViewCompat.getTranslationX(oldHolder.itemView);
final float prevTranslationY = ViewCompat.getTranslationY(oldHolder.itemView);
final float prevAlpha = ViewCompat.getAlpha(oldHolder.itemView);
resetAnimation(oldHolder);
int deltaX = (int) (toX - fromX - prevTranslationX);
int deltaY = (int) (toY - fromY - prevTranslationY);
// recover prev translation state after ending animation
ViewCompat.setTranslationX(oldHolder.itemView, prevTranslationX);
ViewCompat.setTranslationY(oldHolder.itemView, prevTranslationY);
ViewCompat.setAlpha(oldHolder.itemView, prevAlpha);
if (newHolder != null) {
// carry over translation values
resetAnimation(newHolder);
ViewCompat.setTranslationX(newHolder.itemView, -deltaX);
ViewCompat.setTranslationY(newHolder.itemView, -deltaY);
ViewCompat.setAlpha(newHolder.itemView, 0);
}
mPendingChanges.add(new ChangeInfo(oldHolder, newHolder, fromX, fromY, toX, toY));
return true;
}
private void animateChangeImpl(final ChangeInfo changeInfo) {
final RecyclerView.ViewHolder holder = changeInfo.oldHolder;
final View view = holder == null ? null : holder.itemView;
final RecyclerView.ViewHolder newHolder = changeInfo.newHolder;
final View newView = newHolder != null ? newHolder.itemView : null;
if (view != null) {
final ViewPropertyAnimatorCompat oldViewAnim = ViewCompat.animate(view).setDuration(
getChangeDuration());
mChangeAnimations.add(changeInfo.oldHolder);
oldViewAnim.translationX(changeInfo.toX - changeInfo.fromX);
oldViewAnim.translationY(changeInfo.toY - changeInfo.fromY);
oldViewAnim.setListener(new VpaListenerAdapter() {
@Override
public void onAnimationStart(View view) {
dispatchChangeStarting(changeInfo.oldHolder, true);
}
@Override
public void onAnimationEnd(View view) {
oldViewAnim.setListener(null);
ViewCompat.setAlpha(view, 1);
ViewCompat.setTranslationX(view, 0);
ViewCompat.setTranslationY(view, 0);
dispatchChangeFinished(changeInfo.oldHolder, true);
mChangeAnimations.remove(changeInfo.oldHolder);
dispatchFinishedWhenDone();
}
}).start();
}
if (newView != null) {
final ViewPropertyAnimatorCompat newViewAnimation = ViewCompat.animate(newView);
mChangeAnimations.add(changeInfo.newHolder);
newViewAnimation.translationX(0).translationY(0).setDuration(getChangeDuration()).
setListener(new VpaListenerAdapter() {
@Override
public void onAnimationStart(View view) {
dispatchChangeStarting(changeInfo.newHolder, false);
}
@Override
public void onAnimationEnd(View view) {
newViewAnimation.setListener(null);
ViewCompat.setAlpha(newView, 1);
ViewCompat.setTranslationX(newView, 0);
ViewCompat.setTranslationY(newView, 0);
dispatchChangeFinished(changeInfo.newHolder, false);
mChangeAnimations.remove(changeInfo.newHolder);
dispatchFinishedWhenDone();
}
}).start();
}
}
private void endChangeAnimation(List<ChangeInfo> infoList, RecyclerView.ViewHolder item) {
for (int i = infoList.size() - 1; i >= 0; i--) {
ChangeInfo changeInfo = infoList.get(i);
if (endChangeAnimationIfNecessary(changeInfo, item)) {
if (changeInfo.oldHolder == null && changeInfo.newHolder == null) {
infoList.remove(changeInfo);
}
}
}
}
private void endChangeAnimationIfNecessary(ChangeInfo changeInfo) {
if (changeInfo.oldHolder != null) {
endChangeAnimationIfNecessary(changeInfo, changeInfo.oldHolder);
}
if (changeInfo.newHolder != null) {
endChangeAnimationIfNecessary(changeInfo, changeInfo.newHolder);
}
}
private boolean endChangeAnimationIfNecessary(ChangeInfo changeInfo, RecyclerView.ViewHolder item) {
boolean oldItem = false;
if (changeInfo.newHolder == item) {
changeInfo.newHolder = null;
} else if (changeInfo.oldHolder == item) {
changeInfo.oldHolder = null;
oldItem = true;
} else {
return false;
}
ViewCompat.setAlpha(item.itemView, 1);
ViewCompat.setTranslationX(item.itemView, 0);
ViewCompat.setTranslationY(item.itemView, 0);
dispatchChangeFinished(item, oldItem);
return true;
}
@Override
public void endAnimation(RecyclerView.ViewHolder item) {
final View view = item.itemView;
// this will trigger end callback which should set properties to their target values.
ViewCompat.animate(view).cancel();
// TODO if some other animations are chained to end, how do we cancel them as well?
for (int i = mPendingMoves.size() - 1; i >= 0; i--) {
MoveInfo moveInfo = mPendingMoves.get(i);
if (moveInfo.holder == item) {
ViewCompat.setTranslationY(view, 0);
ViewCompat.setTranslationX(view, 0);
dispatchMoveFinished(item);
mPendingMoves.remove(i);
}
}
endChangeAnimation(mPendingChanges, item);
if (mPendingRemovals.remove(item)) {
ViewCompat.setAlpha(view, 1);
dispatchRemoveFinished(item);
}
if (mPendingAdditions.remove(item)) {
ViewCompat.setAlpha(view, 1);
dispatchAddFinished(item);
}
for (int i = mChangesList.size() - 1; i >= 0; i--) {
ArrayList<ChangeInfo> changes = mChangesList.get(i);
endChangeAnimation(changes, item);
if (changes.isEmpty()) {
mChangesList.remove(i);
}
}
for (int i = mMovesList.size() - 1; i >= 0; i--) {
ArrayList<MoveInfo> moves = mMovesList.get(i);
for (int j = moves.size() - 1; j >= 0; j--) {
MoveInfo moveInfo = moves.get(j);
if (moveInfo.holder == item) {
ViewCompat.setTranslationY(view, 0);
ViewCompat.setTranslationX(view, 0);
dispatchMoveFinished(item);
moves.remove(j);
if (moves.isEmpty()) {
mMovesList.remove(i);
}
break;
}
}
}
for (int i = mAdditionsList.size() - 1; i >= 0; i--) {
ArrayList<RecyclerView.ViewHolder> additions = mAdditionsList.get(i);
if (additions.remove(item)) {
ViewCompat.setAlpha(view, 1);
dispatchAddFinished(item);
if (additions.isEmpty()) {
mAdditionsList.remove(i);
}
}
}
// animations should be ended by the cancel above.
//noinspection PointlessBooleanExpression,ConstantConditions
if (mRemoveAnimations.remove(item) && DEBUG) {
throw new IllegalStateException("after animation is cancelled, item should not be in "
+ "mRemoveAnimations list");
}
//noinspection PointlessBooleanExpression,ConstantConditions
if (mAddAnimations.remove(item) && DEBUG) {
throw new IllegalStateException("after animation is cancelled, item should not be in "
+ "mAddAnimations list");
}
//noinspection PointlessBooleanExpression,ConstantConditions
if (mChangeAnimations.remove(item) && DEBUG) {
throw new IllegalStateException("after animation is cancelled, item should not be in "
+ "mChangeAnimations list");
}
//noinspection PointlessBooleanExpression,ConstantConditions
if (mMoveAnimations.remove(item) && DEBUG) {
throw new IllegalStateException("after animation is cancelled, item should not be in "
+ "mMoveAnimations list");
}
dispatchFinishedWhenDone();
}
private void resetAnimation(RecyclerView.ViewHolder holder) {
AnimatorCompatHelper.clearInterpolator(holder.itemView);
endAnimation(holder);
}
@Override
public boolean isRunning() {
return (!mPendingAdditions.isEmpty() ||
!mPendingChanges.isEmpty() ||
!mPendingMoves.isEmpty() ||
!mPendingRemovals.isEmpty() ||
!mMoveAnimations.isEmpty() ||
!mRemoveAnimations.isEmpty() ||
!mAddAnimations.isEmpty() ||
!mChangeAnimations.isEmpty() ||
!mMovesList.isEmpty() ||
!mAdditionsList.isEmpty() ||
!mChangesList.isEmpty());
}
/**
* Check the state of currently pending and running animations. If there are none
* pending/running, call {@link #dispatchAnimationsFinished()} to notify any
* listeners.
*/
private void dispatchFinishedWhenDone() {
if (!isRunning()) {
dispatchAnimationsFinished();
}
}
@Override
public void endAnimations() {
int count = mPendingMoves.size();
for (int i = count - 1; i >= 0; i--) {
MoveInfo item = mPendingMoves.get(i);
View view = item.holder.itemView;
ViewCompat.setTranslationY(view, 0);
ViewCompat.setTranslationX(view, 0);
dispatchMoveFinished(item.holder);
mPendingMoves.remove(i);
}
count = mPendingRemovals.size();
for (int i = count - 1; i >= 0; i--) {
RecyclerView.ViewHolder item = mPendingRemovals.get(i);
dispatchRemoveFinished(item);
mPendingRemovals.remove(i);
}
count = mPendingAdditions.size();
for (int i = count - 1; i >= 0; i--) {
RecyclerView.ViewHolder item = mPendingAdditions.get(i);
View view = item.itemView;
ViewCompat.setAlpha(view, 1);
dispatchAddFinished(item);
mPendingAdditions.remove(i);
}
count = mPendingChanges.size();
for (int i = count - 1; i >= 0; i--) {
endChangeAnimationIfNecessary(mPendingChanges.get(i));
}
mPendingChanges.clear();
if (!isRunning()) {
return;
}
int listCount = mMovesList.size();
for (int i = listCount - 1; i >= 0; i--) {
ArrayList<MoveInfo> moves = mMovesList.get(i);
count = moves.size();
for (int j = count - 1; j >= 0; j--) {
MoveInfo moveInfo = moves.get(j);
RecyclerView.ViewHolder item = moveInfo.holder;
View view = item.itemView;
ViewCompat.setTranslationY(view, 0);
ViewCompat.setTranslationX(view, 0);
dispatchMoveFinished(moveInfo.holder);
moves.remove(j);
if (moves.isEmpty()) {
mMovesList.remove(moves);
}
}
}
listCount = mAdditionsList.size();
for (int i = listCount - 1; i >= 0; i--) {
ArrayList<RecyclerView.ViewHolder> additions = mAdditionsList.get(i);
count = additions.size();
for (int j = count - 1; j >= 0; j--) {
RecyclerView.ViewHolder item = additions.get(j);
View view = item.itemView;
ViewCompat.setAlpha(view, 1);
dispatchAddFinished(item);
additions.remove(j);
if (additions.isEmpty()) {
mAdditionsList.remove(additions);
}
}
}
listCount = mChangesList.size();
for (int i = listCount - 1; i >= 0; i--) {
ArrayList<ChangeInfo> changes = mChangesList.get(i);
count = changes.size();
for (int j = count - 1; j >= 0; j--) {
endChangeAnimationIfNecessary(changes.get(j));
if (changes.isEmpty()) {
mChangesList.remove(changes);
}
}
}
cancelAll(mRemoveAnimations);
cancelAll(mMoveAnimations);
cancelAll(mAddAnimations);
cancelAll(mChangeAnimations);
dispatchAnimationsFinished();
}
void cancelAll(List<RecyclerView.ViewHolder> viewHolders) {
for (int i = viewHolders.size() - 1; i >= 0; i--) {
ViewCompat.animate(viewHolders.get(i).itemView).cancel();
}
}
/**
* {@inheritDoc}
* <p>
* If the payload list is not empty, DefaultItemAnimator returns <code>true</code>.
* When this is the case:
* <ul>
* <li>If you override {@link #animateChange(RecyclerView.ViewHolder, RecyclerView.ViewHolder, int, int, int, int)}, both
* ViewHolder arguments will be the same instance.
* </li>
* <li>
* If you are not overriding {@link #animateChange(RecyclerView.ViewHolder, RecyclerView.ViewHolder, int, int, int, int)},
* then DefaultItemAnimator will call {@link #animateMove(RecyclerView.ViewHolder, int, int, int, int)} and
* run a move animation instead.
* </li>
* </ul>
*/
@Override
public boolean canReuseUpdatedViewHolder(@NonNull RecyclerView.ViewHolder viewHolder,
@NonNull List<Object> payloads) {
return !payloads.isEmpty() || super.canReuseUpdatedViewHolder(viewHolder, payloads);
}
private static class VpaListenerAdapter implements ViewPropertyAnimatorListener {
@Override
public void onAnimationStart(View view) {}
@Override
public void onAnimationEnd(View view) {}
@Override
public void onAnimationCancel(View view) {}
}
}
| qiuzichi/SingleMadBrain | app/src/main/java/com/unipad/singlebrain/longPoker/view/NoAlphaItemAnimator.java | Java | apache-2.0 | 26,384 |
package Udemy_Tutorials;
import java.io.Serializable;
/**
* Created by Alternate on 4/26/14.
*/
public class Person implements Info, Serializable {
public static final long serialVersionUID = -129308122892342420L;
private transient int id; //Will not be serialized
private String name;
private int age;
public Person(int id, String name, int age) {
this.name = name;
this.age = age;
}
public void greet() {
System.out.println("Hello");
}
public void addressByName() {
if (age < 30) {
System.out.println("Hey there, " + name);
} else {
System.out.println("Hello, " + name);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Person person = (Person) o;
if (age != person.age) return false;
if (id != person.id) return false;
if (name != null ? !name.equals(person.name) : person.name != null) return false;
return true;
}
@Override
public int hashCode() {
int result = id;
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + age;
return result;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return "Person{" +
"id=" + id +
", name='" + name + '\'' +
", age=" + age +
'}';
}
@Override
public void showInfo() {
System.out.println("u r nigger lol stop trying to get info");
}
}
| Blimeo/Java | src/Udemy_Tutorials/Person.java | Java | apache-2.0 | 1,702 |
package com.itheima.mobileguard.activities;
import com.itheima.mobileguard.R;
import com.itheima.mobileguard.fragments.LockedFragment;
import com.itheima.mobileguard.fragments.UnlockFragment;
import android.app.Activity;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
public class AppLockActivity extends FragmentActivity implements OnClickListener {
private static final String TAG = "AppLockActivity";
private TextView tv_locked;
private TextView tv_unlock;
private FragmentManager fm;
private UnlockFragment unlockFragment;
private LockedFragment lockedFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_app_lock);
tv_unlock = (TextView) findViewById(R.id.tv_unlock);
tv_locked = (TextView) findViewById(R.id.tv_locked);
tv_locked.setOnClickListener(this);
tv_unlock.setOnClickListener(this);
//³õʼ»¯Ö¡¹ÜÀíÆ÷¡£
fm = getSupportFragmentManager();
unlockFragment = new UnlockFragment();
lockedFragment = new LockedFragment();
//¿ªÆô½çÃæ±ä»¯µÄÊÂÎñ
FragmentTransaction ft = fm.beginTransaction();
ft.replace(R.id.fl_container, unlockFragment);
ft.commit();
}
@Override
public void onClick(View v) {
//¿ªÆô½çÃæ±ä»¯µÄÊÂÎñ
FragmentTransaction ft = fm.beginTransaction();
switch (v.getId()) {
case R.id.tv_locked:
tv_locked.setBackgroundResource(R.drawable.tab_right_pressed);
tv_unlock.setBackgroundResource(R.drawable.tab_left_default);
Log.i(TAG,"Ìæ»»fragmentµÄ½çÃæ");
ft.replace(R.id.fl_container, lockedFragment);
break;
case R.id.tv_unlock:
tv_locked.setBackgroundResource(R.drawable.tab_right_default);
tv_unlock.setBackgroundResource(R.drawable.tab_left_pressed);
Log.i(TAG,"Ìæ»»fragmentµÄ½çÃæ");
ft.replace(R.id.fl_container, unlockFragment);
break;
}
ft.commit();//Ìá½»ÊÂÎñ¡£
}
}
| treejames/mobilesafe-1 | src/com/itheima/mobileguard/activities/AppLockActivity.java | Java | apache-2.0 | 2,119 |
package org.wso2.carbon.appmgt.impl.dto;
import org.wso2.carbon.appmgt.impl.AppMConstants;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class VerbInfoDTO implements Serializable {
private String httpVerb;
private String authType;
private String throttling;
private String requestKey;
private boolean skipThrottling;
public String getThrottling() {
return throttling;
}
public void setThrottling(String throttling) {
this.throttling = throttling;
}
public String getRequestKey() {
return requestKey;
}
public void setRequestKey(String requestKey) {
this.requestKey = requestKey;
}
public String getHttpVerb() {
return httpVerb;
}
public void setHttpVerb(String httpVerb) {
this.httpVerb = httpVerb;
}
public String getAuthType() {
return authType;
}
public void setAuthType(String authType) {
this.authType = authType;
}
public boolean requiresAuthentication() {
return !AppMConstants.AUTH_TYPE_NONE.equalsIgnoreCase(authType);
}
public boolean isSkipThrottling() {
return skipThrottling;
}
public void setSkipThrottling(boolean skipThrottling) {
this.skipThrottling = skipThrottling;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
VerbInfoDTO that = (VerbInfoDTO) o;
if (httpVerb != null ? !httpVerb.equals(that.getHttpVerb()) : that.getHttpVerb() != null)
return false;
return true;
}
@Override
public int hashCode() {
return httpVerb != null ? httpVerb.hashCode() : 0;
}
private Map<String, Boolean> allowAnonymousUrlMap = new HashMap<String, Boolean>();
/**
* add values to allowAnonymousUrlMap
*
* @param key
* @param value
*/
public void addAllowAnonymousUrl(String key, Boolean value) {
allowAnonymousUrlMap.put(key, value);
}
/**
* get value from allowAnonymousUrlMap for the given key
*
* @param key
* @return boolean result
*/
public boolean getAllowAnonymousUrl(String key) {
return allowAnonymousUrlMap.get(key);
}
/**
* check if the allowAnonymousUrlMap is empty/null
*
* @return boolean result
*/
public boolean isEmptyAllowAnonymousUrlMap() {
return ((allowAnonymousUrlMap == null) || (allowAnonymousUrlMap.isEmpty()));
}
/**
* Get all allowAnonymousUrlMap key/value list
*
* @return keySet list
*/
public Set<String> getAllowAnonymousUrlList() {
return allowAnonymousUrlMap.keySet();
}
}
| lakshani/carbon-mobile-appmgt | components/org.wso2.carbon.appmgt.impl/src/main/java/org/wso2/carbon/appmgt/impl/dto/VerbInfoDTO.java | Java | apache-2.0 | 2,632 |
package it.unibz.krdb.obda.owlrefplatform.owlapi3;
/*
* #%L
* ontop-quest-owlapi3
* %%
* Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import it.unibz.krdb.obda.model.OBDAModel;
import it.unibz.krdb.obda.owlrefplatform.core.QuestConstants;
import it.unibz.krdb.obda.owlrefplatform.core.QuestPreferences;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.reasoner.IllegalConfigurationException;
import org.semanticweb.owlapi.reasoner.OWLReasonerConfiguration;
import org.semanticweb.owlapi.reasoner.OWLReasonerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import java.util.Properties;
import static com.google.common.base.Preconditions.checkArgument;
/***
* TODO: rewrite the doc
* <p>
* Implementation of an OWLReasonerFactory that can create instances of Quest.
* Note, to create an instance of Quest first you must call the method
* {@code #setPreferenceHolder(Properties)} with your parameters see Quest.java
* for a description of the preferences. Also, if you use Quest in Virtual ABox
* mode you must set an {@link OBDAModel} with your mappings.
*
* @see OBDAModel
*/
public class QuestOWLFactory implements OWLReasonerFactory {
@SuppressWarnings("unused")
private final Logger log = LoggerFactory.getLogger(QuestOWLFactory.class);
@Nonnull
@Override
public String getReasonerName() {
return "Ontop/Quest";
}
@Nonnull
@Override
public QuestOWL createNonBufferingReasoner(@Nonnull OWLOntology ontology) {
throw new UnsupportedOperationException("Quest is a buffering reasoner");
}
@Nonnull
@Override
public QuestOWL createNonBufferingReasoner(@Nonnull OWLOntology ontology, @Nonnull OWLReasonerConfiguration config)
throws IllegalConfigurationException {
throw new UnsupportedOperationException("Quest is a buffering reasoner");
}
@Nonnull
@Override
public QuestOWL createReasoner(@Nonnull OWLOntology ontology) {
QuestPreferences preferences = new QuestPreferences();
preferences.get(QuestPreferences.ABOX_MODE).equals(QuestConstants.CLASSIC);
return createReasoner(ontology, QuestOWLConfiguration.builder().preferences(preferences).build());
}
/**
*
* @deprecated use {@link #createReasoner(OWLOntology, QuestOWLConfiguration)} instead
*
* @throws IllegalConfigurationException
*/
@Nonnull
@Override
@Deprecated
public QuestOWL createReasoner(@Nonnull OWLOntology ontology, @Nonnull OWLReasonerConfiguration config) throws IllegalConfigurationException {
checkArgument(config instanceof QuestOWLConfiguration, "config %s is not an instance of QuestOWLConfiguration", config);
return createReasoner(ontology, (QuestOWLConfiguration) config);
}
@Nonnull
public QuestOWL createReasoner(@Nonnull OWLOntology ontology, @Nonnull QuestOWLConfiguration config) throws IllegalConfigurationException {
OBDAModel obdaModel = config.getObdaModel();
Properties preferences = config.getPreferences();
if (obdaModel == null && preferences.get(QuestPreferences.ABOX_MODE).equals(QuestConstants.VIRTUAL)) {
throw new IllegalConfigurationException("mappings are not specified in virtual mode", config);
} else if (obdaModel != null && preferences.get(QuestPreferences.ABOX_MODE).equals(QuestConstants.CLASSIC)) {
throw new IllegalConfigurationException("mappings are specified in classic mode", config);
}
return new QuestOWL(ontology, config);
}
}
| eschwert/ontop | quest-owlapi3/src/main/java/it/unibz/krdb/obda/owlrefplatform/owlapi3/QuestOWLFactory.java | Java | apache-2.0 | 4,214 |
package org.neo4art.importer.wikipedia.parser.settlement;
import java.util.Map;
import org.neo4art.domain.Coordinate;
import org.neo4art.domain.Settlement;
import org.neo4art.importer.wikipedia.parser.util.InfoboxMap;
import org.neo4art.importer.wikipedia.parser.util.InfoboxParserUtil;
import org.neo4art.importer.wikipedia.parser.util.InfoboxTypeParserUtil;
import org.neo4art.importer.wikipedia.parser.util.InfoboxWebsiteParserUtil;
public class WikipediaSettlementPhilippineRegionInfoboxParser {
public static final String NAME = "name";
public static final String OFFICIAL_NAME = "official_name";
public static final String NATIVE_NAME = "native_name";
public static final String NATIVE_NAME_LANG = "native_name_lang";
public static final String OTHER_NAME = "other_name";
public static final String SETTLEMENT_TYPE = "settlement_type";
public static final String LATD = "latd";
public static final String LATM = "latm";
public static final String LATS = "lats";
public static final String LATNS = "latNS";
public static final String LONGD = "longd";
public static final String LONGM = "longm";
public static final String LONGS = "longs";
public static final String LONGEW = "longEW";
public static final String LATDEG = "lat_deg";
public static final String LATMIN = "lat_min";
public static final String LATSEC = "lat_sec";
public static final String LONGDEG = "lon_deg";
public static final String LONGMIN = "lon_min";
public static final String LONGSEC = "lon_sec";
public static final String LATITUDE = "latitude";
public static final String LONGITUDE = "longitude";
public static final String WEBSITE = "website";
public static final String WEB = "web";
public static final String STYLE = "infobox";
public WikipediaSettlementPhilippineRegionInfoboxParser() {
}
public static Settlement parse(String text) {
Map<String, String> map = InfoboxMap.asMap(text);
Settlement settlement = new Settlement();
Coordinate coordinate = new Coordinate();
for (String key : map.keySet()) {
switch (key) {
case NAME:
settlement
.setName(InfoboxParserUtil.removeAllParenthesis(map.get(key)));
break;
case STYLE:
settlement
.setType(InfoboxTypeParserUtil.getType(map.get(key)));
break;
case OFFICIAL_NAME:
settlement.setOfficialName(InfoboxParserUtil.removeAllParenthesis(map
.get(key)));
break;
case NATIVE_NAME:
settlement.setNativeName(InfoboxParserUtil.removeAllParenthesis(map
.get(key)));
break;
case NATIVE_NAME_LANG:
settlement.setNativeNameLang(InfoboxParserUtil.removeAllParenthesis(map.get(key)));
break;
case OTHER_NAME:
settlement.setOtherName(InfoboxParserUtil.removeAllParenthesis(map
.get(key)));
break;
case SETTLEMENT_TYPE:
settlement.setSettlementType(InfoboxParserUtil.removeAllParenthesis(map.get(key)));
break;
case LATITUDE:
coordinate.setLatD(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LONGITUDE:
coordinate.setLongD(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LATD:
coordinate.setLatD(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LATM:
coordinate.setLatM(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LATS:
coordinate.setLatS(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LATDEG:
coordinate.setLatD(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LATMIN:
coordinate.setLatM(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LATSEC:
coordinate.setLatS(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LATNS:
coordinate.setLatNS(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LONGD:
coordinate.setLongD(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LONGM:
coordinate.setLongM(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LONGS:
coordinate.setLongS(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LONGDEG:
coordinate.setLongD(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LONGMIN:
coordinate.setLongM(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LONGSEC:
coordinate.setLongS(map.get(key));
settlement.setCoordinate(coordinate);
break;
case LONGEW:
coordinate.setLongEW(map.get(key));
settlement.setCoordinate(coordinate);
break;
case WEB:
settlement.setWebsite(InfoboxWebsiteParserUtil.getWebsite(map.get(key)));
break;
case WEBSITE:
settlement.setWebsite(InfoboxWebsiteParserUtil.getWebsite(map.get(key)));
break;
}
}
return settlement;
}
}
| MZaratin-Larus/neo4art | neo4art-wikipedia-importer/src/main/java/org/neo4art/importer/wikipedia/parser/settlement/WikipediaSettlementPhilippineRegionInfoboxParser.java | Java | apache-2.0 | 4,944 |
package com.gj.administrator.gjerp.adapter;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.gj.administrator.gjerp.R;
import java.util.List;
/**
* Created by guojun on 2015/12/9.
*/
public class RecyclerListAdapter extends RecyclerView.Adapter<RecyclerListAdapter.ViewHolder> {
private static String TAG = "MyAdapter";
protected Context context;
private List<rowData> rowDataList;
public static class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
public LinearLayout mLinearLayout;
public IMyViewHolderClicks mListener;
public int position;
public ViewHolder(LinearLayout linearLayout, IMyViewHolderClicks listener) {
super(linearLayout);
mListener = listener;
mLinearLayout = linearLayout;
linearLayout.setOnClickListener(this);
}
@Override
public void onClick(View v) {
if (v instanceof LinearLayout) {
//TODO
}
}
public void setPosition(int p) {
position = p;
}
public static interface IMyViewHolderClicks {
public void onClick(LinearLayout caller, int position);
}
}
public RecyclerListAdapter(Context context, List<rowData> rowDataList) {
this.context = context;
this.rowDataList = rowDataList;
}
@Override
public RecyclerListAdapter.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View v = LayoutInflater.from(parent.getContext())
.inflate(R.layout.recycler_list_item, parent, false);
ViewHolder holder = new ViewHolder(
(LinearLayout) v,
new RecyclerListAdapter.ViewHolder.IMyViewHolderClicks() {
@Override
public void onClick(LinearLayout caller, int position) {
//TODO
}
}
);
return holder;
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
holder.setPosition(position);
TextView guestRmIdTextView =
(TextView) holder.mLinearLayout.findViewById(R.id.first_row_text);
TextView guestNameTextView =
(TextView) holder.mLinearLayout.findViewById(R.id.second_row_text);
TextView guestIncomeTextView =
(TextView) holder.mLinearLayout.findViewById(R.id.third_row_text);
TextView guestTimeSpanTextView =
(TextView) holder.mLinearLayout.findViewById(R.id.forth_row_text);
guestRmIdTextView.setText(rowDataList.get(position).first);
guestNameTextView.setText(rowDataList.get(position).second);
guestIncomeTextView.setText(rowDataList.get(position).third);
guestTimeSpanTextView.setText(rowDataList.get(position).forth);
}
@Override
public int getItemCount() {
if (null != rowDataList) return rowDataList.size();
else return 0;
}
public static class rowData {
private String first;
private String second;
private String third;
private String forth;
public rowData(String firstString, String secondString, String thirdString, String forthString) {
first = firstString;
second = secondString;
third = thirdString;
forth = forthString;
}
}
}
| gjSCUT/MoonHotelMater | app/src/main/java/com/gj/administrator/gjerp/adapter/RecyclerListAdapter.java | Java | apache-2.0 | 3,655 |
package io.cattle.platform.servicediscovery.deployment.impl;
import io.cattle.platform.core.constants.CommonStatesConstants;
import io.cattle.platform.core.constants.InstanceConstants;
import io.cattle.platform.core.model.Instance;
import io.cattle.platform.core.model.Service;
import io.cattle.platform.core.model.ServiceExposeMap;
import io.cattle.platform.docker.constants.DockerInstanceConstants;
import io.cattle.platform.engine.process.impl.ProcessCancelException;
import io.cattle.platform.object.process.StandardProcess;
import io.cattle.platform.object.resource.ResourcePredicate;
import io.cattle.platform.process.common.util.ProcessUtils;
import io.cattle.platform.servicediscovery.api.constants.ServiceDiscoveryConstants;
import io.cattle.platform.servicediscovery.api.resource.ServiceDiscoveryConfigItem;
import io.cattle.platform.servicediscovery.api.util.ServiceDiscoveryUtil;
import io.cattle.platform.servicediscovery.deployment.AbstractInstanceUnit;
import io.cattle.platform.servicediscovery.deployment.DeploymentUnitInstance;
import io.cattle.platform.servicediscovery.deployment.impl.DeploymentManagerImpl.DeploymentServiceContext;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
public class DefaultDeploymentUnitInstance extends AbstractInstanceUnit {
protected String instanceName;
protected boolean startOnce;
public DefaultDeploymentUnitInstance(DeploymentServiceContext context, String uuid,
Service service, String instanceName, Instance instance, Map<String, String> labels, String launchConfigName) {
super(context, uuid, service, launchConfigName);
this.instanceName = instanceName;
this.instance = instance;
if (this.instance != null) {
exposeMap = context.exposeMapDao.findInstanceExposeMap(this.instance);
}
setStartOnce(service, launchConfigName);
}
@SuppressWarnings("unchecked")
public void setStartOnce(Service service, String launghConfig) {
Object serviceLabels = ServiceDiscoveryUtil.getLaunchConfigObject(service, launchConfigName,
InstanceConstants.FIELD_LABELS);
if (serviceLabels != null) {
String startOnceLabel = ((Map<String, String>) serviceLabels)
.get(ServiceDiscoveryConstants.LABEL_SERVICE_CONTAINER_CREATE_ONLY);
if (StringUtils.equalsIgnoreCase(startOnceLabel, "true")) {
startOnce = true;
}
}
}
@Override
public boolean isError() {
return this.instance != null && this.instance.getRemoved() != null;
}
@Override
protected void removeUnitInstance() {
if (!(instance.getState().equals(CommonStatesConstants.REMOVED) || instance.getState().equals(
CommonStatesConstants.REMOVING))) {
try {
context.objectProcessManager.scheduleStandardProcessAsync(StandardProcess.REMOVE, instance,
null);
} catch (ProcessCancelException e) {
context.objectProcessManager.scheduleProcessInstanceAsync(InstanceConstants.PROCESS_STOP,
instance, ProcessUtils.chainInData(new HashMap<String, Object>(),
InstanceConstants.PROCESS_STOP, InstanceConstants.PROCESS_REMOVE));
}
}
}
@Override
public DeploymentUnitInstance create(Map<String, Object> deployParams) {
if (createNew()) {
Map<String, Object> launchConfigData = populateLaunchConfigData(deployParams);
Pair<Instance, ServiceExposeMap> instanceMapPair = context.exposeMapDao.createServiceInstance(launchConfigData,
service);
this.instance = instanceMapPair.getLeft();
this.exposeMap = instanceMapPair.getRight();
}
if (instance.getState().equalsIgnoreCase(CommonStatesConstants.REQUESTED)) {
context.objectProcessManager.scheduleStandardProcessAsync(StandardProcess.CREATE, instance,
null);
}
if (exposeMap.getState().equalsIgnoreCase(CommonStatesConstants.REQUESTED)) {
context.objectProcessManager.scheduleStandardProcessAsync(StandardProcess.CREATE, exposeMap,
null);
}
this.instance = context.objectManager.reload(this.instance);
return this;
}
@SuppressWarnings("unchecked")
protected Map<String, Object> populateLaunchConfigData(Map<String, Object> deployParams) {
Map<String, Object> launchConfigData = ServiceDiscoveryUtil.buildServiceInstanceLaunchData(service,
deployParams, launchConfigName, context.allocatorService);
launchConfigData.put("name", this.instanceName);
launchConfigData.remove(ServiceDiscoveryConfigItem.RESTART.getCattleName());
Object labels = launchConfigData.get(InstanceConstants.FIELD_LABELS);
if (labels != null) {
String overrideHostName = ((Map<String, String>) labels)
.get(ServiceDiscoveryConstants.LABEL_OVERRIDE_HOSTNAME);
if (StringUtils.equalsIgnoreCase(overrideHostName, "container_name")) {
String domainName = (String) launchConfigData.get(DockerInstanceConstants.FIELD_DOMAIN_NAME);
String overrideName = getOverrideHostName(domainName, this.instanceName);
launchConfigData.put(InstanceConstants.FIELD_HOSTNAME, overrideName);
}
}
return launchConfigData;
}
private String getOverrideHostName(String domainName, String instanceName) {
String overrideName = instanceName;
if (instanceName != null && instanceName.length() > 64) {
String serviceNumber = instanceName.substring(instanceName.lastIndexOf("_"));
int truncateIndex = 64 - serviceNumber.length();
if (domainName != null) {
truncateIndex = truncateIndex - domainName.length() - 1;
}
overrideName = instanceName.substring(0, truncateIndex) + serviceNumber;
}
return overrideName;
}
@Override
public boolean createNew() {
return this.instance == null;
}
@Override
public DeploymentUnitInstance waitForStartImpl() {
this.instance = context.resourceMonitor.waitFor(this.instance,
new ResourcePredicate<Instance>() {
@Override
public boolean evaluate(Instance obj) {
return InstanceConstants.STATE_RUNNING.equals(obj.getState());
}
});
return this;
}
@Override
protected boolean isStartedImpl() {
if (startOnce) {
List<String> validStates = Arrays.asList(InstanceConstants.STATE_STOPPED, InstanceConstants.STATE_STOPPING,
InstanceConstants.STATE_RUNNING);
return validStates.contains(context.objectManager.reload(this.instance).getState());
}
return context.objectManager.reload(this.instance).getState().equalsIgnoreCase(InstanceConstants.STATE_RUNNING);
}
@Override
public void waitForNotTransitioning() {
if (this.instance != null) {
this.instance = context.resourceMonitor.waitForNotTransitioning(this.instance);
}
}
}
| stresler/cattle | code/iaas/service-discovery/server/src/main/java/io/cattle/platform/servicediscovery/deployment/impl/DefaultDeploymentUnitInstance.java | Java | apache-2.0 | 7,443 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.inspector.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.inspector.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* Tag JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class TagJsonUnmarshaller implements Unmarshaller<Tag, JsonUnmarshallerContext> {
public Tag unmarshall(JsonUnmarshallerContext context) throws Exception {
Tag tag = new Tag();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("key", targetDepth)) {
context.nextToken();
tag.setKey(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("value", targetDepth)) {
context.nextToken();
tag.setValue(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return tag;
}
private static TagJsonUnmarshaller instance;
public static TagJsonUnmarshaller getInstance() {
if (instance == null)
instance = new TagJsonUnmarshaller();
return instance;
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-inspector/src/main/java/com/amazonaws/services/inspector/model/transform/TagJsonUnmarshaller.java | Java | apache-2.0 | 2,806 |
package com.pe.droid.appquejas.repository;
import org.springframework.data.repository.CrudRepository;
import com.pe.droid.appquejas.domain.Cliente;
public interface ClienteRepository extends CrudRepository<Cliente, Long> {
}
| RicardoFigueroa/droid-proyects | droid-appquejas-ws/src/main/java/com/pe/droid/appquejas/repository/ClienteRepository.java | Java | apache-2.0 | 229 |
/*
* Copyright 2002-2019 Barcelona Supercomputing Center (www.bsc.es)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package es.bsc.compss.util;
import es.bsc.compss.types.data.LogicalData;
import es.bsc.compss.types.resources.Resource;
import java.io.File;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
/**
* The Shared Disk Manager is an utility to manage the disk shared by many resources. It keeps information about which
* disk are mounted in a machine, the path where they are mounted and which files are present on the disk.
*/
public class SharedDiskManager {
/**
* Relation shared disk name --> worker names where it is mounted.
*/
private static final Map<String, List<Resource>> SHARED_TO_MACHINES = new HashMap<>();
/**
* Relation resource name --> Shared disks contained.
*/
private static final Map<Resource, Machine> MACHINE_TO_SHARED = new HashMap<>();
/**
* LogicalData stored in any sharedDisk.
*/
private static final Map<String, Set<LogicalData>> SHARED_DISK_TO_SHARED_FILES = new TreeMap<>();
/**
* Adds a new resource to be managed.
*
* @param host Resource
*/
public static synchronized void addMachine(Resource host) {
Machine m = new Machine();
MACHINE_TO_SHARED.put(host, m);
}
/**
* Links a shared disk with a resource.
*
* @param diskName shared disk identifier
* @param mountpoint path where the shared disk is mounted
* @param host containing resource
*/
public static synchronized void addSharedToMachine(String diskName, String mountpoint, Resource host) {
Machine resource = MACHINE_TO_SHARED.get(host);
if (resource != null) {
resource.addSharedDisk(diskName, mountpoint);
List<Resource> machines = SHARED_TO_MACHINES.get(diskName);
if (machines == null) {
machines = new LinkedList<>();
SHARED_TO_MACHINES.put(diskName, machines);
}
synchronized (machines) {
machines.add(host);
}
} else {
ErrorManager.warn("Host " + host.getName() + " not registered as machine in the Shared disk manager.");
}
}
/**
* Gets the name of a shared disk which contains the files in a resource path.
*
* @param host Name of the resource
* @param path File path contained by the disk
* @return null if there is no shared disk containing that file path on the resource. The shared disk identifier
* containing that file path.
*/
public static synchronized String getSharedName(Resource host, String path) {
Machine m = MACHINE_TO_SHARED.get(host);
if (m == null) {
return null;
}
return m.getSharedName(path);
}
/**
* Returns a string describing the current state of the shared disk configuration and the files contained on them.
*
* @return description of the current state of the shared disk configuration and the files contained on them.
*/
public static synchronized String getSharedStatus() {
StringBuilder sb = new StringBuilder("Shared disk in machines:\n");
for (Entry<String, List<Resource>> e : SHARED_TO_MACHINES.entrySet()) {
sb.append(e.getKey()).append("--> {");
for (int i = 0; i < e.getValue().size(); i++) {
sb.append(e.getValue().get(i).getName()).append(", ");
}
sb.append("}\n");
}
sb.append("Machines :\n");
for (Entry<Resource, Machine> e : MACHINE_TO_SHARED.entrySet()) {
sb.append(e.getKey().getName()).append("--> {");
for (Entry<String, String> me : e.getValue().name2Mountpoint.entrySet()) {
sb.append(me.getKey()).append("@").append(me.getValue()).append(", ");
}
sb.append("}\n");
}
return sb.toString();
}
/**
* Returns a list with all the name of all the shared disks mounted on a resource.
*
* @param host resource
* @return a list with all the name of all the shared disks mounted on a resource
*/
public static synchronized List<String> getAllSharedNames(Resource host) {
Machine m = MACHINE_TO_SHARED.get(host);
if (m == null) {
return new LinkedList<>();
}
return m.getAllSharedNames();
}
/**
* Returns the mountpoint of a shared disk in a resource.
*
* @param host resource
* @param sharedDisk shared disk name
* @return mountpoint of the shared disk in the resource
*/
public static synchronized String getMounpoint(Resource host, String sharedDisk) {
Machine m = MACHINE_TO_SHARED.get(host);
if (m == null) {
return null;
}
return m.getPath(sharedDisk);
}
/**
* Returns a list of machines with a shared disk mounted.
*
* @param diskName name of the shared disk we are looking for
* @return list of machines with a shared disk mounted
*/
public static synchronized List<Resource> getAllMachinesfromDisk(String diskName) {
return SHARED_TO_MACHINES.get(diskName);
}
/**
* Removes all the information of a resource.
*
* @param host Machine to remove
* @return returns the correlation diskName->mountpoint
*/
public static synchronized Map<String, String> terminate(Resource host) {
Machine m;
m = MACHINE_TO_SHARED.remove(host);
if (m != null) {
for (String sharedName : m.allShared) {
List<Resource> machines = SHARED_TO_MACHINES.get(sharedName);
synchronized (machines) {
machines.remove(host);
}
}
}
if (m != null) {
return m.name2Mountpoint;
} else {
return new HashMap<>();
}
}
/**
* Adds a LogicalData to a diskName.
*
* @param diskName Disk name
* @param ld Logical data
*/
public static synchronized void addLogicalData(String diskName, LogicalData ld) {
Set<LogicalData> lds = SHARED_DISK_TO_SHARED_FILES.get(diskName);
if (lds == null) {
lds = new HashSet<>();
SHARED_DISK_TO_SHARED_FILES.put(diskName, lds);
}
synchronized (lds) {
lds.add(ld);
}
}
/**
* Removes all the obsolete logical data appearances in the given shared disk. It doesn't have any effect if the
* diskName or the logicalData don't exist
*
* @param diskName Shared disk name
* @param obsolete obsoleted logical data
*/
public static synchronized void removeLogicalData(String diskName, LogicalData obsolete) {
Set<LogicalData> lds = SHARED_DISK_TO_SHARED_FILES.get(diskName);
if (lds != null) {
synchronized (lds) {
lds.remove(obsolete);
}
}
}
/**
* Recovers all the data of a given sharedDisk.
*
* @param diskName Shared Disk name
* @return
*/
public static synchronized Set<LogicalData> getAllSharedFiles(String diskName) {
Set<LogicalData> lds = SHARED_DISK_TO_SHARED_FILES.get(diskName);
return lds;
}
private static class Machine {
private final List<String> allShared;
private final HashMap<String, String> mountpoint2Name;
private final HashMap<String, String> name2Mountpoint;
public Machine() {
allShared = new LinkedList<>();
mountpoint2Name = new HashMap<>();
name2Mountpoint = new HashMap<>();
}
public void addSharedDisk(String diskName, String mountpoint) {
if (!allShared.contains(diskName)) {
allShared.add(diskName);
}
if (!mountpoint.endsWith(File.separator)) {
mountpoint += File.separator;
}
mountpoint2Name.put(mountpoint, diskName);
name2Mountpoint.put(diskName, mountpoint);
}
public String getSharedName(String path) {
if (path == null) {
return null;
}
for (Entry<String, String> e : mountpoint2Name.entrySet()) {
if (path.startsWith(e.getKey())) {
return e.getValue();
}
}
return null;
}
public String getPath(String sharedDisk) {
return name2Mountpoint.get(sharedDisk);
}
public List<String> getAllSharedNames() {
return allShared;
}
}
}
| mF2C/COMPSs | compss/runtime/adaptors/engine/src/main/java/es/bsc/compss/util/SharedDiskManager.java | Java | apache-2.0 | 9,432 |
package org.grobid.core.data.util;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class EmailSanitizer {
private static final Pattern DASHES_PATTERN = Pattern.compile("(%E2%80%90|%e2%80%90)");
private static final Set<String> BLACKLISTED_EMAIL_WORDS = Sets.newHashSet(
"firstname",
"lastname",
"publication",
"theses",
"thesis",
"editor",
"press",
"contact",
"info",
"feedback",
"journal",
"please",
"pubs",
"iza@iza",
"admin",
"help",
"subs",
"news",
"archives",
"order",
"postmaster@",
"informa",
"reprint",
"comunicacion@",
"revista",
"digitalcommons",
"group@",
"root@",
"deposit@",
"studies",
"permiss",
"print",
"paper",
"report",
"support",
"pedocs",
"investigaciones@",
"medicin",
"copyright",
"rights",
"sales@",
"pacific@",
"redaktion",
"publicidad",
"surface@",
"comstat@",
"service@",
"omnia@",
"letter",
"scholar",
"staff",
"delivery",
"epubs",
"office",
"technolog",
"compute",
"elsevier"
);
private static final Pattern[] EMAIL_STRIP_PATTERNS = new Pattern[] {
Pattern.compile("^(e\\-mail|email|e\\smail|mail):"),
Pattern.compile("[\\r\\n\\t ]"), // newlines, tabs and spaces
Pattern.compile("\\(.*\\)$"),
};
private static final Pattern[] AT_SYMBOL_REPLACEMENTS = new Pattern[] {
Pattern.compile("@"),
Pattern.compile("@\\."),
Pattern.compile("\\.@"),
};
private static final Pattern EMAIL_SPLITTER_PATTERN = Pattern.compile("(\\sor\\s|,|;|/)");
private static final Pattern AT_SPLITTER = Pattern.compile("@");
/**
* @param addresses email addresses
* @return cleaned addresses
*/
public List<String> splitAndClean(List<String> addresses) {
if (addresses == null) {
return null;
}
List<String> result = new ArrayList<String>();
Set<String> emails = new HashSet<String>();
for (String emailAddress : addresses) {
emailAddress = initialReplace(emailAddress);
// StringTokenizer st = new StringTokenizer(emailAddress, ", ");
// List<String> emails = new ArrayList<String>();
// while (st.hasMoreTokens()) {
// String token = st.nextToken();
// if (token.length() > 2) {
// emails.add(token);
// }
// }
//
// int i = 0;
// for (String token : emails) {
// if (!token.contains("@")) {
// // the domain information is missing, we are taking the first one of the next tokens
// String newToken = null;
// int j = 0;
// for (String token2 : emails) {
// if (j <= i) {
// j++;
// } else {
// int ind = token2.indexOf("@");
// if (ind != -1) {
// newToken = token + token2.substring(ind, token2.length());
// break;
// }
// j++;
// }
// }
// if (newToken != null) {
// emails.set(i, newToken);
// }
// }
// i++;
// }
//
List<String> splitEmails = Lists.newArrayList(Splitter.on(EMAIL_SPLITTER_PATTERN)
.omitEmptyStrings()
.split(emailAddress.toLowerCase()).iterator());
if (splitEmails.size() > 1) {
// Some emails are of the form jiglesia,cmt@ll.iac.es or jiglesia;cmt@ll.iac.es or bono/caputo/vittorio@mporzio.astro.it
List<String> atSeparatedStrings = Lists.newArrayList(Splitter.on(AT_SPLITTER)
.omitEmptyStrings()
.split(emailAddress.toLowerCase()).iterator());
if (atSeparatedStrings.size() == 2) {
// Only the last email address has a domain, so append it to the rest of the splitted emails
int atIndex = splitEmails.get(splitEmails.size() - 1).indexOf('@');
String domain = splitEmails.get(splitEmails.size() - 1).substring(atIndex + 1);
for (int i = 0; i < splitEmails.size() - 1; i++) {
splitEmails.set(i, splitEmails.get(i) + "@" + domain);
}
}
}
for (String splitEmail : splitEmails) {
String email;
try {
email = cleanEmail(splitEmail);
} catch (Exception e) {
// Cleaning failed so its probably an invalid email so don't keep it
continue;
}
if (email != null && !email.isEmpty()) {
// Check for duplicate emails
if (emails.contains(email)) {
continue;
}
email = postValidateAddress(email);
if (email == null) {
continue;
}
emails.add(email);
result.add(email);
}
}
}
if (result.isEmpty()) {
return null;
}
return result;
}
private String initialReplace(String email) {
email = email.replace("{", "");
email = email.replace("}", "");
email = email.replace("(", "");
email = email.replace(")", "").trim();
email = email.replaceAll("(E|e)lectronic(\\s)(A|a)ddress(\\:)?", "");
email = email.replaceAll("^(e|E)?(\\-)?mail(\\:)?(\\s)(A|a)ddress(\\:)?", "");
email = email.replaceAll("^(e|E)?(\\-)?mail(\\:)?(\\s)?", "");
// case: Peter Pan -peter.pan@email.org with asterisks and spaces
email = email.replaceAll("^[A-Z][a-z]+\\s+[A-Z][a-z]+(\\*)?(\\s)*-(\\s)*", "");
return email;
}
private static String postValidateAddress(String emStr) {
String orig = emStr;
for (String b : BLACKLISTED_EMAIL_WORDS) {
if (orig.contains(b)) {
return null;
}
}
for (Pattern p : EMAIL_STRIP_PATTERNS) {
Matcher matcher = p.matcher(orig);
orig = matcher.replaceAll("");
}
if (!orig.contains("@")) {
return null;
}
return orig;
}
private static String cleanEmail(String email) throws UnsupportedEncodingException {
if (email == null) {
return null;
}
// Fix any incorrect dashes
Matcher dashes = DASHES_PATTERN.matcher(email);
email = dashes.replaceAll("-");
// Some emails may contain HTML encoded characters, so decode just in case
email = URLDecoder.decode(email, "UTF-8");
email = email.toLowerCase().trim();
for (Pattern p : EMAIL_STRIP_PATTERNS) {
Matcher matcher = p.matcher(email);
email = matcher.replaceAll("");
}
for (Pattern r : AT_SYMBOL_REPLACEMENTS) {
Matcher matcher = r.matcher(email);
email = matcher.replaceAll("@");
}
return email;
}
}
| kermitt2/grobid | grobid-core/src/main/java/org/grobid/core/data/util/EmailSanitizer.java | Java | apache-2.0 | 8,356 |
package com.fishercoder;
import com.fishercoder.common.classes.TreeNode;
import com.fishercoder.common.utils.TreeUtils;
import com.fishercoder.solutions._270;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.Arrays;
import static junit.framework.Assert.assertEquals;
public class _270Test {
private static _270.Solution1 solution1;
private static int expected;
private static TreeNode root;
private static double target;
@BeforeClass
public static void setup() {
solution1 = new _270.Solution1();
}
@Test
public void test1() {
root = TreeUtils.constructBinaryTree(Arrays.asList(4, 2, 5, 1, 3));
expected = 4;
target = 3.714286;
assertEquals(expected, solution1.closestValue(root, target));
}
}
| fishercoder1534/Leetcode | src/test/java/com/fishercoder/_270Test.java | Java | apache-2.0 | 802 |
package com.alwex.tree;
import java.util.ArrayList;
/**
* Created by alwex on 28/05/2015.
*/
public class QuadTree<T> {
// the current nodes
ArrayList<QuadNode<T>> nodes;
// current rectangle zone
private QuadRectangle zone;
// GLOBAL CONFIGRATION
// if this is reached,
// the zone is subdivised
public static int maxItemByNode = 5;
public static int maxLevel = 10;
int level;
// the four sub regions,
// may be null if not needed
QuadTree<T>[] regions;
public static final int REGION_SELF = -1;
public static final int REGION_NW = 0;
public static final int REGION_NE = 1;
public static final int REGION_SW = 2;
public static final int REGION_SE = 3;
public QuadTree(QuadRectangle definition, int level) {
zone = definition;
nodes = new ArrayList<QuadNode<T>>();
this.level = level;
}
protected QuadRectangle getZone() {
return this.zone;
}
private int findRegion(QuadRectangle r, boolean split) {
int region = REGION_SELF;
if (nodes.size() >= maxItemByNode && this.level < maxLevel) {
// we don't want to split if we just need to retrieve
// the region, not inserting an element
if (regions == null && split) {
// then create the subregions
this.split();
}
// can be null if not splitted
if (regions != null) {
if (regions[REGION_NW].getZone().contains(r)) {
region = REGION_NW;
} else if (regions[REGION_NE].getZone().contains(r)) {
region = REGION_NE;
} else if (regions[REGION_SW].getZone().contains(r)) {
region = REGION_SW;
} else if (regions[REGION_SE].getZone().contains(r)) {
region = REGION_SE;
}
}
}
return region;
}
private void split() {
regions = new QuadTree[4];
float newWidth = zone.width / 2;
float newHeight = zone.height / 2;
int newLevel = level + 1;
regions[REGION_NW] = new QuadTree<T>(new QuadRectangle(
zone.x,
zone.y + zone.height / 2,
newWidth,
newHeight
), newLevel);
regions[REGION_NE] = new QuadTree<T>(new QuadRectangle(
zone.x + zone.width / 2,
zone.y + zone.height / 2,
newWidth,
newHeight
), newLevel);
regions[REGION_SW] = new QuadTree<T>(new QuadRectangle(
zone.x,
zone.y,
newWidth,
newHeight
), newLevel);
regions[REGION_SE] = new QuadTree<T>(new QuadRectangle(
zone.x + zone.width / 2,
zone.y,
newWidth,
newHeight
), newLevel);
}
public void insert(QuadRectangle r, T element) {
int region = this.findRegion(r, true);
if (region == REGION_SELF || this.level == maxLevel) {
nodes.add(new QuadNode<T>(r, element));
return;
} else {
regions[region].insert(r, element);
}
if (nodes.size() >= maxItemByNode && this.level < maxLevel) {
// redispatch the elements
ArrayList<QuadNode<T>> tempNodes = new ArrayList<QuadNode<T>>();
int length = nodes.size();
for (int i = 0; i < length; i++) {
tempNodes.add(nodes.get(i));
}
nodes.clear();
for (QuadNode<T> node : tempNodes) {
this.insert(node.r, node.element);
}
}
}
public ArrayList<T> getElements(ArrayList<T> list, QuadRectangle r) {
int region = this.findRegion(r, false);
int length = nodes.size();
for (int i = 0; i < length; i++) {
list.add(nodes.get(i).element);
}
if (region != REGION_SELF) {
regions[region].getElements(list, r);
} else {
getAllElements(list, true);
}
return list;
}
public ArrayList<T> getAllElements(ArrayList<T> list, boolean firstCall) {
if (regions != null) {
regions[REGION_NW].getAllElements(list, false);
regions[REGION_NE].getAllElements(list, false);
regions[REGION_SW].getAllElements(list, false);
regions[REGION_SE].getAllElements(list, false);
}
if (!firstCall) {
int length = nodes.size();
for (int i = 0; i < length; i++) {
list.add(nodes.get(i).element);
}
}
return list;
}
public void getAllZones(ArrayList<QuadRectangle> list) {
list.add(this.zone);
if (regions != null) {
regions[REGION_NW].getAllZones(list);
regions[REGION_NE].getAllZones(list);
regions[REGION_SW].getAllZones(list);
regions[REGION_SE].getAllZones(list);
}
}
}
| alwex/QuadTree | src/main/java/com/alwex/tree/QuadTree.java | Java | apache-2.0 | 5,320 |
/*
* Copyright © 2010, 2011, 2012 Talis Systems Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.talis.hbase.rdf.test.junit;
import static org.openjena.atlas.lib.StrUtils.strjoinNL ;
import java.util.ArrayList;
import java.util.List;
import org.openjena.atlas.iterator.Iter;
import org.openjena.atlas.iterator.Transform;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.util.FileManager;
import com.talis.hbase.rdf.HBaseRdf;
import com.talis.hbase.rdf.Store;
import com.talis.hbase.rdf.StoreDesc;
import com.talis.hbase.rdf.store.StoreFactory;
import com.talis.hbase.rdf.util.Pair;
import com.talis.hbase.rdf.util.Vocab;
public class StoreList
{
static Property description = Vocab.property( HBaseRdf.namespace, "description" ) ;
static Property list = Vocab.property( HBaseRdf.namespace, "list" ) ;
static Resource storeListClass = Vocab.property( HBaseRdf.namespace, "StoreList" ) ;
static boolean formatStores = false ;
static String queryString = strjoinNL
(
"PREFIX hbaserdf: <http://rdf.hbase.talis.com/2011/hbase-rdf#>" ,
"PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" ,
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>" ,
"PREFIX list: <http://jena.hpl.hp.com/ARQ/list#>" ,
"SELECT ?desc ?label" ,
"{ [] rdf:type hbaserdf:StoreList ;" ,
" hbaserdf:list ?l ." ,
" ?l list:member [ rdfs:label ?label ; hbaserdf:description ?desc ]",
"}") ;
// Not Java's finest hour ...
static Transform<Pair<String, String>, Pair<String, StoreDesc>> t1 = new Transform<Pair<String, String>, Pair<String, StoreDesc>>()
{
public Pair<String, StoreDesc> convert(Pair<String, String> pair)
{
return new Pair<String, StoreDesc>( pair.car(), StoreDesc.read( pair.cdr() ) ) ;
}
} ;
static Transform<Pair<String, StoreDesc>, Pair<String, Store>> t2 = new Transform<Pair<String, StoreDesc>, Pair<String, Store>>()
{
public Pair<String, Store> convert( Pair<String, StoreDesc> pair )
{
Store store = testStore( pair.cdr() ) ;
return new Pair<String, Store>( pair.car(), store ) ;
}
} ;
public static Store testStore( StoreDesc desc )
{
Store store = StoreFactory.create( desc ) ;
if ( formatStores || inMem( store ) )
store.getTableFormatter().create() ;
return store ;
}
public static boolean inMem( Store store ) { return false ; }
public static List<Pair<String, StoreDesc>> stores( String fn )
{
List<Pair<String, String>> x = storesByQuery( fn ) ;
List<Pair<String, StoreDesc>> z = Iter.iter( x ).map( t1 ).toList() ;
//List<Pair<String, Store>> z = Iter.iter(x).map(t1).map(t2).toList() ;
return z ;
}
public static List<Pair<String, StoreDesc>> storeDesc( String fn )
{
List<Pair<String, String>> x = storesByQuery( fn ) ;
List<Pair<String, StoreDesc>> y = Iter.iter( x ).map( t1 ).toList() ;
return y ;
}
private static List<Pair<String, String>> storesByQuery( String fn )
{
Model model = FileManager.get().loadModel( fn ) ;
List<Pair<String, String>> data = new ArrayList<Pair<String, String>>();
Query query = QueryFactory.create( queryString ) ;
QueryExecution qExec = QueryExecutionFactory.create( query, model ) ;
try
{
ResultSet rs = qExec.execSelect() ;
for ( ; rs.hasNext() ; )
{
QuerySolution qs = rs.nextSolution() ;
String label = qs.getLiteral( "label" ).getLexicalForm() ;
String desc = qs.getResource( "desc" ).getURI() ;
data.add( new Pair<String, String>( label, desc ) ) ;
}
} finally { qExec.close() ; }
return data ;
}
} | castagna/hbase-rdf | src/test/java/com/talis/hbase/rdf/test/junit/StoreList.java | Java | apache-2.0 | 4,892 |
package com.guanqing.subredditor.UI.Fragments;
import android.app.Dialog;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.DialogFragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.resource.drawable.GlideDrawable;
import com.bumptech.glide.request.RequestListener;
import com.bumptech.glide.request.target.Target;
import com.guanqing.subredditor.FrontPageModel;
import com.guanqing.subredditor.R;
import com.guanqing.subredditor.UI.Widgets.LoadingIndicatorView;
import com.guanqing.subredditor.UI.Widgets.UpvoteTextSwitcher;
import com.guanqing.subredditor.Utils.Constants;
import com.guanqing.subredditor.Utils.ImageUtil;
import butterknife.Bind;
import butterknife.ButterKnife;
/**
* Created by Guanqing on 2015/12/3.
* Pop out and show a boarderless image view
*/
public class ZoomDialog extends DialogFragment {
public static final String DIALOG_FLAG = "ZoomDialog.DIALOG_FLAG";
public static final String SUBMISSION_MODEL_KEY = "ZoomDialog.SUBMISSION_MODEL_KEY";
// an array storing the width and height of current screen
static int[] screenSize;
// model containing all data of this submission
protected FrontPageModel model;
//custom view holder
private ViewHolder holder;
public static ZoomDialog newInstance(FrontPageModel model){
ZoomDialog fragment = new ZoomDialog();
Bundle bundle = new Bundle();
bundle.putParcelable(SUBMISSION_MODEL_KEY, model);
fragment.setArguments(bundle);
return fragment;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//get screen size in pixels
screenSize = Constants.getScreenSizeInPixels(getActivity());
if(getArguments()!=null) {
//get the data passed in
model = getArguments().getParcelable(SUBMISSION_MODEL_KEY);
}
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
//inflate the views and find views by id
final View view = inflater.inflate(R.layout.dialog_zoom, container, false);
holder = new ViewHolder(view);
//set title of the submission
holder.tvTitle.setText(model.getTitle());
//set comments count
holder.tvCommentCount.setText(model.getCommentCount() + "");
//change text and pic after user upvotes a submission
holder.tsUpvote.setCurrentText(model.getKarma() + "");
holder.tsUpvote.setListener(model.getKarma());
holder.ivUpvotes.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
holder.ivUpvotes.setImageDrawable(
holder.ivUpvotes.getDrawable() == getResources().getDrawable(R.drawable.ic_arrow_up) ?
getResources().getDrawable(R.drawable.ic_arrow_up_blue) : getResources().getDrawable(R.drawable.ic_arrow_up));
holder.tsUpvote.performClick();
}
});
//dismiss the dialog when user clicks the image
holder.ivImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dismiss();
}
});
return view;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Dialog dialog = super.onCreateDialog(savedInstanceState);
dialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
dialog.getWindow().getAttributes().windowAnimations = R.style.dialog_animation_zoom;
return dialog;
}
@Override
public void onResume() {
super.onResume();
//error check
if (getDialog() == null) return;
//load image
loadImage();
//get a suitable width for the zoomed view
int width = ImageUtil.getAppropriateDialogWidth(model.getAspectRatio());
//set view size to fit the screen
if (getResources().getConfiguration().orientation== 1){
getDialog().getWindow().setLayout(width, WindowManager.LayoutParams.WRAP_CONTENT);
}
}
//load image into imageview
private void loadImage(){
//inflate the image
if(model.getAspectRatio() > 0){
int width = screenSize[0] *10/11;
int height = Float.valueOf(width / model.getAspectRatio()).intValue();
Glide.with(getActivity()).load(model.getLink())
.placeholder(R.drawable.avatar_loading)
.override(width, height)
.error(R.drawable.error)
.listener(new RequestListener<String, GlideDrawable>() {
@Override
public boolean onException(Exception e, String model, Target<GlideDrawable> target, boolean isFirstResource) {
holder.loadingIndicatorView.setVisibility(View.GONE);
return false;
}
@Override
public boolean onResourceReady(GlideDrawable resource, String model, Target<GlideDrawable> target, boolean isFromMemoryCache, boolean isFirstResource) {
holder.loadingIndicatorView.setVisibility(View.GONE);
float ratio = (float) resource.getIntrinsicWidth()/resource.getIntrinsicHeight();
//get a suitable width for the zoomed view
int width = ImageUtil.getAppropriateDialogWidth(ratio);
holder.ivImage.setMinimumHeight(new Float(width/ratio).intValue());
return false;
}
})
.thumbnail(0.1f)
.crossFade()
.into(holder.ivImage);
} else {
Glide.with(getActivity()).load(model.getLink())
.placeholder(R.drawable.avatar_loading)
.error(R.drawable.error)
.listener(new RequestListener<String, GlideDrawable>() {
@Override
public boolean onException(Exception e, String model, Target<GlideDrawable> target, boolean isFirstResource) {
holder.loadingIndicatorView.setVisibility(View.GONE);
return false;
}
@Override
public boolean onResourceReady(GlideDrawable resource, String model, Target<GlideDrawable> target, boolean isFromMemoryCache, boolean isFirstResource) {
holder.loadingIndicatorView.setVisibility(View.GONE);
return false;
}
})
.thumbnail(0.1f)
.crossFade()
.into(holder.ivImage);
}
}
protected class ViewHolder{
protected View view;
// UI reference
@Bind(R.id.ivImage_zoom) protected ImageView ivImage;
@Bind(R.id.btnSave_zoom) protected ImageButton btnSave;
@Bind(R.id.btnShare_zoom) protected ImageButton btnShare;
@Bind(R.id.btnComments_zoom) protected ImageButton btnComments;
@Bind(R.id.tvCommentCount_zoom) protected TextView tvCommentCount;
@Bind(R.id.tvFeedTitle_zoom) protected TextView tvTitle;
@Bind(R.id.tsUpvotesCounter_zoom) protected UpvoteTextSwitcher tsUpvote;
@Bind(R.id.ivUpvotes_zoom) protected ImageView ivUpvotes;
@Bind(R.id.loadingIndicator_zoom) protected LoadingIndicatorView loadingIndicatorView;
public ViewHolder(View view){
this.view = view;
ButterKnife.bind(this, view);
}
}
}
| haoguanqing/Subreddit_Reader | app/src/main/java/com/guanqing/subredditor/UI/Fragments/ZoomDialog.java | Java | apache-2.0 | 8,229 |
/**
* Paging.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.adwords.axis.v201509.cm;
/**
* Specifies the page of results to return in the response. A page
* is specified
* by the result position to start at and the maximum number
* of results to
* return.
*/
public class Paging implements java.io.Serializable {
/* Index of the first result to return in this page.
* <span class="constraint InRange">This field must be
* greater than or equal to 0.</span> */
private java.lang.Integer startIndex;
/* Maximum number of results to return in this page. Set this
* to a reasonable value to limit
* the number of results returned per page.
* <span class="constraint InRange">This field must be
* greater than or equal to 0.</span> */
private java.lang.Integer numberResults;
public Paging() {
}
public Paging(
java.lang.Integer startIndex,
java.lang.Integer numberResults) {
this.startIndex = startIndex;
this.numberResults = numberResults;
}
/**
* Gets the startIndex value for this Paging.
*
* @return startIndex * Index of the first result to return in this page.
* <span class="constraint InRange">This field must be
* greater than or equal to 0.</span>
*/
public java.lang.Integer getStartIndex() {
return startIndex;
}
/**
* Sets the startIndex value for this Paging.
*
* @param startIndex * Index of the first result to return in this page.
* <span class="constraint InRange">This field must be
* greater than or equal to 0.</span>
*/
public void setStartIndex(java.lang.Integer startIndex) {
this.startIndex = startIndex;
}
/**
* Gets the numberResults value for this Paging.
*
* @return numberResults * Maximum number of results to return in this page. Set this
* to a reasonable value to limit
* the number of results returned per page.
* <span class="constraint InRange">This field must be
* greater than or equal to 0.</span>
*/
public java.lang.Integer getNumberResults() {
return numberResults;
}
/**
* Sets the numberResults value for this Paging.
*
* @param numberResults * Maximum number of results to return in this page. Set this
* to a reasonable value to limit
* the number of results returned per page.
* <span class="constraint InRange">This field must be
* greater than or equal to 0.</span>
*/
public void setNumberResults(java.lang.Integer numberResults) {
this.numberResults = numberResults;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof Paging)) return false;
Paging other = (Paging) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.startIndex==null && other.getStartIndex()==null) ||
(this.startIndex!=null &&
this.startIndex.equals(other.getStartIndex()))) &&
((this.numberResults==null && other.getNumberResults()==null) ||
(this.numberResults!=null &&
this.numberResults.equals(other.getNumberResults())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getStartIndex() != null) {
_hashCode += getStartIndex().hashCode();
}
if (getNumberResults() != null) {
_hashCode += getNumberResults().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(Paging.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201509", "Paging"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("startIndex");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201509", "startIndex"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("numberResults");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201509", "numberResults"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| gawkermedia/googleads-java-lib | modules/adwords_axis/src/main/java/com/google/api/ads/adwords/axis/v201509/cm/Paging.java | Java | apache-2.0 | 6,475 |
package org.wikipedia.nearby;
import org.wikipedia.page.PageActivityLongPressHandler;
import org.wikipedia.page.PageLongPressHandler;
import org.wikipedia.page.PageTitle;
import org.wikipedia.R;
import org.wikipedia.Site;
import org.wikipedia.Utils;
import org.wikipedia.WikipediaApp;
import org.wikipedia.history.HistoryEntry;
import org.wikipedia.page.PageActivity;
import org.wikipedia.util.ApiUtil;
import org.wikipedia.util.FeedbackUtil;
import org.wikipedia.util.log.L;
import com.mapbox.mapboxsdk.events.MapListener;
import com.mapbox.mapboxsdk.events.RotateEvent;
import com.mapbox.mapboxsdk.events.ScrollEvent;
import com.mapbox.mapboxsdk.events.ZoomEvent;
import com.mapbox.mapboxsdk.geometry.LatLng;
import com.mapbox.mapboxsdk.overlay.Icon;
import com.mapbox.mapboxsdk.overlay.Marker;
import com.mapbox.mapboxsdk.overlay.UserLocationOverlay;
import com.mapbox.mapboxsdk.tileprovider.tilesource.WebSourceTileLayer;
import com.mapbox.mapboxsdk.views.MapView;
import com.squareup.picasso.Picasso;
import android.annotation.TargetApi;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.hardware.GeomagneticField;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.location.Location;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.provider.Settings;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.content.ContextCompat;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v7.app.AlertDialog;
import android.text.TextUtils;
import android.util.Log;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.Surface;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.MissingResourceException;
/**
* Displays a list of nearby pages.
*/
public class NearbyFragment extends Fragment implements SensorEventListener {
private static final String PREF_KEY_UNITS = "nearbyUnits";
private static final String NEARBY_LAST_RESULT = "lastRes";
private static final String NEARBY_CURRENT_LOCATION = "currentLoc";
private static final int ONE_THOUSAND = 1000;
private static final double METER_TO_FEET = 3.280839895;
private static final int ONE_MILE = 5280;
private final List<Marker> mMarkerList = new ArrayList<>();
private View nearbyListContainer;
private ListView nearbyList;
private MapView mapView;
private NearbyAdapter adapter;
private Icon mMarkerIconPassive;
private Icon mMarkerIconActive;
private WikipediaApp app;
private Site site;
private NearbyResult lastResult;
@Nullable private Location currentLocation;
private SensorManager mSensorManager;
private Sensor mAccelerometer;
private Sensor mMagnetometer;
//this holds the actual data from the accelerometer and magnetometer, and automatically
//maintains a moving average (low-pass filter) to reduce jitter.
private MovingAverageArray accelData;
private MovingAverageArray magneticData;
//The size with which we'll initialize our low-pass filters. This size seems like
//a good balance between effectively removing jitter, and good response speed.
//(Mimics a physical compass needle)
private static final int MOVING_AVERAGE_SIZE = 8;
//geomagnetic field data, to be updated whenever we update location.
//(will provide us with declination from true north)
private GeomagneticField geomagneticField;
//we'll maintain a list of CompassViews that are currently being displayed, and update them
//whenever we receive updates from sensors.
private List<NearbyCompassView> compassViews;
//whether to display distances in imperial units (feet/miles) instead of metric
private boolean showImperial = false;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
app = WikipediaApp.getInstance();
site = app.getPrimarySite();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_nearby, container, false);
rootView.setPadding(0, Utils.getContentTopOffsetPx(getActivity()), 0, 0);
nearbyListContainer = rootView.findViewById(R.id.nearby_list_container);
nearbyListContainer.setVisibility(View.GONE);
nearbyList = (ListView) rootView.findViewById(R.id.nearby_list);
mapView = (MapView) rootView.findViewById(R.id.mapview);
rootView.findViewById(R.id.user_location_button).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// don't change zoom level: https://github.com/mapbox/mapbox-android-sdk/issues/453
mapView.setUserLocationRequiredZoom(mapView.getZoomLevel());
mapView.goToUserLocation(true);
}
});
return rootView;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
setHasOptionsMenu(true);
adapter = new NearbyAdapter(getActivity(), new ArrayList<NearbyPage>());
nearbyList.setAdapter(adapter);
nearbyList.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
NearbyPage nearbyPage = adapter.getItem(position);
PageTitle title = new PageTitle(nearbyPage.getTitle(), site, nearbyPage.getThumblUrl());
((PageActivity) getActivity()).showLinkPreview(title, HistoryEntry.SOURCE_NEARBY);
}
});
PageLongPressHandler.ListViewContextMenuListener contextMenuListener = new LongPressHandler((PageActivity) getActivity());
new PageLongPressHandler(getActivity(), nearbyList, HistoryEntry.SOURCE_NEARBY,
contextMenuListener);
mSensorManager = (SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE);
mAccelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
mMagnetometer = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
compassViews = new ArrayList<>();
if (!adapter.isEmpty()) {
setupGeomagneticField();
showNearbyPages(lastResult);
} else if (savedInstanceState != null) {
currentLocation = savedInstanceState.getParcelable(NEARBY_CURRENT_LOCATION);
if (currentLocation != null) {
lastResult = savedInstanceState.getParcelable(NEARBY_LAST_RESULT);
setupGeomagneticField();
showNearbyPages(lastResult);
}
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getActivity());
//do we already have a preference for metric/imperial units?
if (prefs.contains(PREF_KEY_UNITS)) {
setImperialUnits(prefs.getBoolean(PREF_KEY_UNITS, false));
} else {
//if our locale is set to US, then use imperial units by default.
try {
if (Locale.getDefault().getISO3Country().equalsIgnoreCase(Locale.US.getISO3Country())) {
setImperialUnits(true);
}
} catch (MissingResourceException e) {
// getISO3Country can throw MissingResourceException: No 3-letter country code for locale: zz_ZZ
// Just ignore it.
}
}
mMarkerIconPassive = makeMarkerIcon(false);
mMarkerIconActive = makeMarkerIcon(true);
setRefreshingState(true);
initializeMap();
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (lastResult != null) {
outState.putParcelable(NEARBY_CURRENT_LOCATION, currentLocation);
outState.putParcelable(NEARBY_LAST_RESULT, lastResult);
}
}
@Override
public void onResume() {
super.onResume();
mapView.setUserLocationEnabled(true);
mSensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_UI);
mSensorManager.registerListener(this, mMagnetometer, SensorManager.SENSOR_DELAY_UI);
}
@Override
public void onPause() {
super.onPause();
mapView.setUserLocationEnabled(false);
mSensorManager.unregisterListener(this);
compassViews.clear();
}
private void initializeMap() {
WebSourceTileLayer tileSource = new WebSourceTileLayer(
"openstreetmap",
getString(R.string.map_tile_source_url),
true
);
mapView.setBubbleEnabled(false);
mapView.setDiskCacheEnabled(true);
mapView.setTileSource(tileSource);
mapView.setZoom(getResources().getInteger(R.integer.map_default_zoom));
mapView.setUserLocationTrackingMode(UserLocationOverlay.TrackingMode.FOLLOW_BEARING);
mapView.getUserLocationOverlay().runOnFirstFix(new Runnable() {
@Override
public void run() {
if (!isResumed()) {
return;
}
currentLocation = mapView.getUserLocationOverlay().getLastFix();
makeUseOfNewLocation(currentLocation);
fetchNearbyPages();
}
});
mapView.setMapViewListener(new DefaultMapViewListener() {
@Override
public void onTapMarker(MapView mapView, Marker marker) {
highlightMarker(marker);
int index = adapter.getPosition((NearbyPage) marker.getRelatedObject());
if (index == -1) {
return;
}
nearbyList.setSelection(index);
}
});
mapView.addListener(new MapListener() {
@Override
public void onScroll(ScrollEvent scrollEvent) {
fetchNearbyPages();
}
@Override
public void onZoom(ZoomEvent zoomEvent) {
fetchNearbyPages();
}
@Override
public void onRotate(RotateEvent rotateEvent) {
}
});
}
private Icon makeMarkerIcon(boolean isActive) {
int iconSize = (int) getResources().getDimension(R.dimen.map_marker_icon_size);
Bitmap bmp = Bitmap.createBitmap(iconSize, iconSize, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bmp);
Paint paint = new Paint();
paint.setAntiAlias(true);
Drawable d;
if (isActive) {
paint.setColor(getResources().getColor(R.color.blue_liberal));
int circleSize = bmp.getWidth() / 2;
canvas.drawCircle(circleSize, circleSize, circleSize, paint);
Drawable drawable = ContextCompat.getDrawable(getActivity(), R.drawable.ic_place_dark);
Bitmap bitmap = ((BitmapDrawable) drawable).getBitmap();
d = new BitmapDrawable(getResources(), Bitmap.createScaledBitmap(bitmap,
iconSize, iconSize, true));
d = DrawableCompat.wrap(d).mutate();
DrawableCompat.setTint(d, getResources().getColor(R.color.blue_liberal));
} else {
paint.setColor(getResources().getColor(R.color.green_progressive));
int circleSize = bmp.getWidth() / 2;
canvas.drawCircle(circleSize, circleSize, circleSize / 2, paint);
d = new BitmapDrawable(getResources(), bmp);
}
return new Icon(d);
}
private void highlightMarker(Marker marker) {
for (Marker m : mMarkerList) {
if (m.equals(marker)) {
m.setIcon(mMarkerIconActive);
m.setHotspot(Marker.HotspotPlace.BOTTOM_CENTER);
} else {
m.setIcon(mMarkerIconPassive);
m.setHotspot(Marker.HotspotPlace.BOTTOM_CENTER);
}
}
}
private void showDialogForSettings() {
AlertDialog.Builder alert = new AlertDialog.Builder(getActivity());
alert.setMessage(R.string.nearby_dialog_goto_settings);
alert.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
try {
Intent myIntent = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS);
startActivity(myIntent);
} catch (ActivityNotFoundException e) {
// it's highly unusual for a device not to have a Settings activity,
// but nevertheless, let's not crash in case it happens.
e.printStackTrace();
}
}
});
alert.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
}
});
alert.setCancelable(false);
AlertDialog ad = alert.create();
ad.show();
}
private void makeUseOfNewLocation(Location location) {
if (!isBetterLocation(location, currentLocation)) {
return;
}
currentLocation = location;
setupGeomagneticField();
updateDistances();
}
private void fetchNearbyPages() {
final int fetchTaskDelayMillis = 500;
mapView.removeCallbacks(fetchTaskRunnable);
mapView.postDelayed(fetchTaskRunnable, fetchTaskDelayMillis);
}
private Runnable fetchTaskRunnable = new Runnable() {
@Override
public void run() {
if (!isResumed()) {
return;
}
LatLng latLng = mapView.getCenter();
setRefreshingState(true);
new NearbyFetchTask(getActivity(), site, latLng.getLatitude(), latLng.getLongitude(), getMapRadius()) {
@Override
public void onFinish(NearbyResult result) {
if (!isResumed()) {
return;
}
lastResult = result;
showNearbyPages(result);
setRefreshingState(false);
}
@Override
public void onCatch(Throwable caught) {
if (!isResumed()) {
return;
}
L.e(caught);
FeedbackUtil.showError(getActivity(), caught);
setRefreshingState(false);
}
}.execute();
}
};
private double getMapRadius() {
LatLng leftTop = new LatLng(mapView.getBoundingBox().getLatNorth(), mapView.getBoundingBox().getLonWest());
LatLng rightTop = new LatLng(mapView.getBoundingBox().getLatNorth(), mapView.getBoundingBox().getLonEast());
LatLng leftBottom = new LatLng(mapView.getBoundingBox().getLatSouth(), mapView.getBoundingBox().getLonWest());
double width = leftTop.distanceTo(rightTop);
double height = leftTop.distanceTo(leftBottom);
return Math.min(width, height) / 2;
}
/** Updates geomagnetic field data, to give us our precise declination from true north. */
private void setupGeomagneticField() {
geomagneticField = new GeomagneticField((float)currentLocation.getLatitude(), (float)currentLocation.getLongitude(), 0, (new Date()).getTime());
}
/** Determines whether one Location reading is better than the current Location fix.
* lifted from http://developer.android.com/guide/topics/location/strategies.html
* @param location The new Location that you want to evaluate
* @param currentBestLocation The current Location fix, to which you want to compare the new one
*/
protected boolean isBetterLocation(Location location, Location currentBestLocation) {
if (currentBestLocation == null) {
// A new location is always better than no location
return true;
}
// Check whether the new location fix is newer or older
final int twoMinutes = 1000 * 60 * 2;
final int accuracyThreshold = 200;
long timeDelta = location.getTime() - currentBestLocation.getTime();
boolean isSignificantlyNewer = timeDelta > twoMinutes;
boolean isSignificantlyOlder = timeDelta < -twoMinutes;
boolean isNewer = timeDelta > 0;
// If it's been more than two minutes since the current location, use the new location
// because the user has likely moved
if (isSignificantlyNewer) {
return true;
// If the new location is more than two minutes older, it must be worse
} else if (isSignificantlyOlder) {
return false;
}
// Check whether the new location fix is more or less accurate
int accuracyDelta = (int) (location.getAccuracy() - currentBestLocation.getAccuracy());
boolean isLessAccurate = accuracyDelta > 0;
boolean isMoreAccurate = accuracyDelta < 0;
boolean isSignificantlyLessAccurate = accuracyDelta > accuracyThreshold;
// Check if the old and new location are from the same provider
boolean isFromSameProvider = isSameProvider(location.getProvider(),
currentBestLocation.getProvider());
// Determine location quality using a combination of timeliness and accuracy
if (isMoreAccurate) {
return true;
} else if (isNewer && !isLessAccurate) {
return true;
} else if (isNewer && !isSignificantlyLessAccurate && isFromSameProvider) {
return true;
}
return false;
}
/** Checks whether two providers are the same */
private boolean isSameProvider(String provider1, String provider2) {
if (provider1 == null) {
return provider2 == null;
}
return provider1.equals(provider2);
}
private void showNearbyPages(NearbyResult result) {
getActivity().invalidateOptionsMenu();
if (currentLocation != null) {
sortByDistance(result.getList());
}
adapter.clear();
addResultsToAdapter(result.getList());
compassViews.clear();
mMarkerList.clear();
mapView.clear();
nearbyListContainer.setVisibility(adapter.isEmpty() ? View.GONE : View.VISIBLE);
for (int i = 0; i < adapter.getCount(); i++) {
NearbyPage item = adapter.getItem(i);
Location location = item.getLocation();
Marker marker = new Marker(mapView, item.getTitle(), item.getDescription(),
new LatLng(location.getLatitude(), location.getLongitude()));
marker.setIcon(mMarkerIconPassive);
marker.setRelatedObject(item);
marker.setHotspot(Marker.HotspotPlace.BOTTOM_CENTER);
mMarkerList.add(marker);
}
mapView.addMarkers(mMarkerList);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void addResultsToAdapter(List<NearbyPage> result) {
if (ApiUtil.hasHoneyComb()) {
adapter.addAll(result);
} else {
for (NearbyPage page : result) {
adapter.add(page);
}
}
}
private void setRefreshingState(boolean isRefreshing) {
((PageActivity)getActivity()).updateProgressBar(isRefreshing, true, 0);
}
private void sortByDistance(List<NearbyPage> nearbyPages) {
calcDistances(nearbyPages);
Collections.sort(nearbyPages, new Comparator<NearbyPage>() {
public int compare(NearbyPage a, NearbyPage b) {
return a.getDistance() - b.getDistance();
}
});
}
/**
* Calculates the distances from the origin to the given pages.
* This method should be called before sorting.
*/
private void calcDistances(List<NearbyPage> pages) {
for (NearbyPage page : pages) {
page.setDistance(getDistance(page.getLocation()));
}
}
private int getDistance(Location otherLocation) {
if (otherLocation == null) {
return Integer.MAX_VALUE;
} else {
return (int) currentLocation.distanceTo(otherLocation);
}
}
private String getDistanceLabel(Location otherLocation) {
final int meters = getDistance(otherLocation);
if (showImperial) {
final double feet = meters * METER_TO_FEET;
if (feet < ONE_THOUSAND) {
return getString(R.string.nearby_distance_in_feet, (int)feet);
} else {
return getString(R.string.nearby_distance_in_miles, feet / ONE_MILE);
}
} else {
if (meters < ONE_THOUSAND) {
return getString(R.string.nearby_distance_in_meters, meters);
} else {
return getString(R.string.nearby_distance_in_kilometers, meters / (double)ONE_THOUSAND);
}
}
}
private void updateDistances() {
adapter.notifyDataSetChanged();
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
if (!isAdded() || ((PageActivity)getActivity()).isSearching()) {
return;
}
inflater.inflate(R.menu.menu_nearby, menu);
menu.findItem(R.id.menu_metric_imperial).setTitle(showImperial
? getString(R.string.nearby_set_metric)
: getString(R.string.nearby_set_imperial));
}
@Override
public void onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
if (!isAdded() || ((PageActivity)getActivity()).isSearching()) {
return;
}
menu.findItem(R.id.menu_metric_imperial).setTitle(showImperial
? getString(R.string.nearby_set_metric)
: getString(R.string.nearby_set_imperial));
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_metric_imperial:
setImperialUnits(!showImperial);
adapter.notifyDataSetInvalidated();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private void setImperialUnits(boolean imperial) {
showImperial = imperial;
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getActivity());
prefs.edit().putBoolean(PREF_KEY_UNITS, showImperial).apply();
getActivity().supportInvalidateOptionsMenu();
}
private View.OnClickListener markerClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
Marker marker = findPageMarker((NearbyPage) v.getTag());
if (marker != null) {
highlightMarker(marker);
}
}
};
@Nullable
private Marker findPageMarker(NearbyPage nearbyPage) {
Marker result = null;
for (Marker marker : mMarkerList) {
if (marker.getRelatedObject().equals(nearbyPage)) {
result = marker;
break;
}
}
return result;
}
private View.OnLongClickListener markerLongClickListener = new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
int[] pos = new int[2];
v.getLocationInWindow(pos);
// display a toast that shows a tooltip based on the button's content description,
// like the standard ActionBar does.
Toast t = Toast.makeText(getActivity(), v.getContentDescription(), Toast.LENGTH_SHORT);
t.setGravity(Gravity.TOP | Gravity.END, 0, pos[1]);
t.show();
return true;
}
};
private class NearbyAdapter extends ArrayAdapter<NearbyPage> {
private static final int LAYOUT_ID = R.layout.item_nearby_entry;
public NearbyAdapter(Context context, ArrayList<NearbyPage> pages) {
super(context, LAYOUT_ID, pages);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
NearbyPage nearbyPage = getItem(position);
ViewHolder viewHolder;
if (convertView == null) {
viewHolder = new ViewHolder();
LayoutInflater inflater = LayoutInflater.from(getContext());
convertView = inflater.inflate(LAYOUT_ID, parent, false);
viewHolder.thumbnail = (NearbyCompassView) convertView.findViewById(R.id.nearby_thumbnail);
viewHolder.title = (TextView) convertView.findViewById(R.id.nearby_title);
viewHolder.description = (TextView) convertView.findViewById(R.id.nearby_description);
viewHolder.distance = (TextView) convertView.findViewById(R.id.nearby_distance);
viewHolder.markerButton = convertView.findViewById(R.id.nearby_marker);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
viewHolder.title.setText(nearbyPage.getTitle());
if (TextUtils.isEmpty(nearbyPage.getDescription())) {
viewHolder.description.setVisibility(View.GONE);
} else {
viewHolder.description.setText(nearbyPage.getDescription());
viewHolder.description.setVisibility(View.VISIBLE);
}
viewHolder.markerButton.setTag(nearbyPage);
viewHolder.markerButton.setOnClickListener(markerClickListener);
viewHolder.markerButton.setOnLongClickListener(markerLongClickListener);
viewHolder.thumbnail.setMaskColor(getResources().getColor(Utils.getThemedAttributeId(getActivity(), R.attr.page_background_color)));
if (currentLocation == null) {
viewHolder.distance.setVisibility(View.INVISIBLE);
viewHolder.thumbnail.setEnabled(false);
} else {
// set the calculated angle as the base angle for our compass view
viewHolder.thumbnail.setAngle((float) calculateAngle(nearbyPage.getLocation()));
viewHolder.thumbnail.setTickColor(getResources().getColor(R.color.button_light));
viewHolder.thumbnail.setEnabled(true);
if (!compassViews.contains(viewHolder.thumbnail)) {
compassViews.add(viewHolder.thumbnail);
}
viewHolder.distance.setText(getDistanceLabel(nearbyPage.getLocation()));
viewHolder.distance.setVisibility(View.VISIBLE);
}
if (app.isImageDownloadEnabled()) {
Picasso.with(getActivity())
.load(nearbyPage.getThumblUrl())
.placeholder(R.drawable.ic_pageimage_placeholder)
.error(R.drawable.ic_pageimage_placeholder)
.into(viewHolder.thumbnail);
} else {
Picasso.with(getActivity())
.load(R.drawable.ic_pageimage_placeholder)
.into(viewHolder.thumbnail);
}
return convertView;
}
private double calculateAngle(Location otherLocation) {
// simplified angle between two vectors...
// vector pointing towards north from our location = [0, 1]
// vector pointing towards destination from our location = [a1, a2]
double a1 = otherLocation.getLongitude() - currentLocation.getLongitude();
double a2 = otherLocation.getLatitude() - currentLocation.getLatitude();
// cos θ = (v1*a1 + v2*a2) / (√(v1²+v2²) * √(a1²+a2²))
double angle = Math.toDegrees(Math.acos(a2 / Math.sqrt(a1 * a1 + a2 * a2)));
// since the acos function only goes between 0 to 180 degrees, we'll manually
// negate the angle if the destination's longitude is on the opposite side.
if (a1 < 0f) {
angle = -angle;
}
return angle;
}
private class ViewHolder {
private NearbyCompassView thumbnail;
private TextView title;
private TextView description;
private TextView distance;
private View markerButton;
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int i) { }
@Override
public void onSensorChanged(SensorEvent event) {
if (!isAdded()) {
return;
}
//acquire raw data from sensors...
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
if (accelData == null) {
accelData = new MovingAverageArray(event.values.length, MOVING_AVERAGE_SIZE);
}
accelData.addData(event.values);
} else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) {
if (magneticData == null) {
magneticData = new MovingAverageArray(event.values.length, MOVING_AVERAGE_SIZE);
}
magneticData.addData(event.values);
}
if (accelData == null || magneticData == null) {
return;
}
final int matrixSize = 9;
final int orientationSize = 3;
final int quarterTurn = 90;
float[] mR = new float[matrixSize];
//get the device's rotation matrix with respect to world coordinates, based on the sensor data
if (!SensorManager.getRotationMatrix(mR, null, accelData.getData(), magneticData.getData())) {
Log.e("NearbyActivity", "getRotationMatrix failed.");
return;
}
//get device's orientation with respect to world coordinates, based on the
//rotation matrix acquired above.
float[] orientation = new float[orientationSize];
SensorManager.getOrientation(mR, orientation);
// orientation[0] = azimuth
// orientation[1] = pitch
// orientation[2] = roll
float azimuth = (float) Math.toDegrees(orientation[0]);
//adjust for declination from magnetic north...
float declination = 0f;
if (geomagneticField != null) {
declination = geomagneticField.getDeclination();
}
azimuth += declination;
//adjust for device screen rotation
int rotation = ((WindowManager) getActivity().getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay().getRotation();
switch (rotation) {
case Surface.ROTATION_90:
azimuth += quarterTurn;
break;
case Surface.ROTATION_180:
azimuth += quarterTurn * 2;
break;
case Surface.ROTATION_270:
azimuth -= quarterTurn;
break;
default:
break;
}
//update views!
for (NearbyCompassView view : compassViews) {
view.setAzimuth(-azimuth);
}
}
private class LongPressHandler extends PageActivityLongPressHandler
implements PageLongPressHandler.ListViewContextMenuListener {
public LongPressHandler(@NonNull PageActivity activity) {
super(activity);
}
@Override
public PageTitle getTitleForListPosition(int position) {
NearbyPage page = adapter.getItem(position);
return new PageTitle(page.getTitle(), site, page.getThumblUrl());
}
}
}
| parvez3019/apps-android-wikipedia | app/src/main/java/org/wikipedia/nearby/NearbyFragment.java | Java | apache-2.0 | 32,945 |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/resources/keyword_plan_campaign.proto
package com.google.ads.googleads.v8.resources;
/**
* <pre>
* A geo target.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.resources.KeywordPlanGeoTarget}
*/
public final class KeywordPlanGeoTarget extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.resources.KeywordPlanGeoTarget)
KeywordPlanGeoTargetOrBuilder {
private static final long serialVersionUID = 0L;
// Use KeywordPlanGeoTarget.newBuilder() to construct.
private KeywordPlanGeoTarget(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private KeywordPlanGeoTarget() {
geoTargetConstant_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new KeywordPlanGeoTarget();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private KeywordPlanGeoTarget(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
java.lang.String s = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
geoTargetConstant_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.resources.KeywordPlanCampaignProto.internal_static_google_ads_googleads_v8_resources_KeywordPlanGeoTarget_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.resources.KeywordPlanCampaignProto.internal_static_google_ads_googleads_v8_resources_KeywordPlanGeoTarget_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget.class, com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget.Builder.class);
}
private int bitField0_;
public static final int GEO_TARGET_CONSTANT_FIELD_NUMBER = 2;
private volatile java.lang.Object geoTargetConstant_;
/**
* <pre>
* Required. The resource name of the geo target.
* </pre>
*
* <code>optional string geo_target_constant = 2 [(.google.api.resource_reference) = { ... }</code>
* @return Whether the geoTargetConstant field is set.
*/
@java.lang.Override
public boolean hasGeoTargetConstant() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Required. The resource name of the geo target.
* </pre>
*
* <code>optional string geo_target_constant = 2 [(.google.api.resource_reference) = { ... }</code>
* @return The geoTargetConstant.
*/
@java.lang.Override
public java.lang.String getGeoTargetConstant() {
java.lang.Object ref = geoTargetConstant_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
geoTargetConstant_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the geo target.
* </pre>
*
* <code>optional string geo_target_constant = 2 [(.google.api.resource_reference) = { ... }</code>
* @return The bytes for geoTargetConstant.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getGeoTargetConstantBytes() {
java.lang.Object ref = geoTargetConstant_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
geoTargetConstant_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, geoTargetConstant_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, geoTargetConstant_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget other = (com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget) obj;
if (hasGeoTargetConstant() != other.hasGeoTargetConstant()) return false;
if (hasGeoTargetConstant()) {
if (!getGeoTargetConstant()
.equals(other.getGeoTargetConstant())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGeoTargetConstant()) {
hash = (37 * hash) + GEO_TARGET_CONSTANT_FIELD_NUMBER;
hash = (53 * hash) + getGeoTargetConstant().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A geo target.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.resources.KeywordPlanGeoTarget}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.resources.KeywordPlanGeoTarget)
com.google.ads.googleads.v8.resources.KeywordPlanGeoTargetOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.resources.KeywordPlanCampaignProto.internal_static_google_ads_googleads_v8_resources_KeywordPlanGeoTarget_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.resources.KeywordPlanCampaignProto.internal_static_google_ads_googleads_v8_resources_KeywordPlanGeoTarget_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget.class, com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget.Builder.class);
}
// Construct using com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
geoTargetConstant_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.resources.KeywordPlanCampaignProto.internal_static_google_ads_googleads_v8_resources_KeywordPlanGeoTarget_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget getDefaultInstanceForType() {
return com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget build() {
com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget buildPartial() {
com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget result = new com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.geoTargetConstant_ = geoTargetConstant_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget) {
return mergeFrom((com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget other) {
if (other == com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget.getDefaultInstance()) return this;
if (other.hasGeoTargetConstant()) {
bitField0_ |= 0x00000001;
geoTargetConstant_ = other.geoTargetConstant_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object geoTargetConstant_ = "";
/**
* <pre>
* Required. The resource name of the geo target.
* </pre>
*
* <code>optional string geo_target_constant = 2 [(.google.api.resource_reference) = { ... }</code>
* @return Whether the geoTargetConstant field is set.
*/
public boolean hasGeoTargetConstant() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Required. The resource name of the geo target.
* </pre>
*
* <code>optional string geo_target_constant = 2 [(.google.api.resource_reference) = { ... }</code>
* @return The geoTargetConstant.
*/
public java.lang.String getGeoTargetConstant() {
java.lang.Object ref = geoTargetConstant_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
geoTargetConstant_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the geo target.
* </pre>
*
* <code>optional string geo_target_constant = 2 [(.google.api.resource_reference) = { ... }</code>
* @return The bytes for geoTargetConstant.
*/
public com.google.protobuf.ByteString
getGeoTargetConstantBytes() {
java.lang.Object ref = geoTargetConstant_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
geoTargetConstant_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the geo target.
* </pre>
*
* <code>optional string geo_target_constant = 2 [(.google.api.resource_reference) = { ... }</code>
* @param value The geoTargetConstant to set.
* @return This builder for chaining.
*/
public Builder setGeoTargetConstant(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
geoTargetConstant_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the geo target.
* </pre>
*
* <code>optional string geo_target_constant = 2 [(.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearGeoTargetConstant() {
bitField0_ = (bitField0_ & ~0x00000001);
geoTargetConstant_ = getDefaultInstance().getGeoTargetConstant();
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the geo target.
* </pre>
*
* <code>optional string geo_target_constant = 2 [(.google.api.resource_reference) = { ... }</code>
* @param value The bytes for geoTargetConstant to set.
* @return This builder for chaining.
*/
public Builder setGeoTargetConstantBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
bitField0_ |= 0x00000001;
geoTargetConstant_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.resources.KeywordPlanGeoTarget)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.resources.KeywordPlanGeoTarget)
private static final com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget();
}
public static com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<KeywordPlanGeoTarget>
PARSER = new com.google.protobuf.AbstractParser<KeywordPlanGeoTarget>() {
@java.lang.Override
public KeywordPlanGeoTarget parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new KeywordPlanGeoTarget(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<KeywordPlanGeoTarget> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<KeywordPlanGeoTarget> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.resources.KeywordPlanGeoTarget getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| googleads/google-ads-java | google-ads-stubs-v8/src/main/java/com/google/ads/googleads/v8/resources/KeywordPlanGeoTarget.java | Java | apache-2.0 | 22,601 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.hive.udf.string;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
/**
* Returns true when the haystack string (first argument) begins with the
* needle string (second argument). If either argument is NULL then NULL is
* returned.
*
* @author jonchang
*/
@Description(name = "starts_with",
value = "_FUNC_(haystack, needle) - Return whether " +
"haystack begins with needle.")
public class UDFStartsWith extends UDF {
public Boolean evaluate(String haystack, String needle) {
if (haystack == null || needle == null) {
return null;
}
return haystack.startsWith(needle);
}
}
| brndnmtthws/facebook-hive-udfs | src/main/java/com/facebook/hive/udf/UDFStartsWith.java | Java | apache-2.0 | 1,512 |
package org.yukthi.edutool.entity;
import java.util.Date;
import java.util.Set;
import java.util.UUID;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import org.hibernate.annotations.GenericGenerator;
@Entity
@NamedQueries(value={
@NamedQuery(name="findAdmissionByStudentId",query="from Admission a where a.student = :student")
})
public class Admission {
@GeneratedValue(generator = "uuid2")
@GenericGenerator(name = "uuid2", strategy = "uuid2")
@Column(columnDefinition = "BINARY(16)")
@Id
private UUID admissionId;
private boolean active = true;
private int acedemicYear;
private Date doj;
private double rteDiscount;
@ManyToOne(fetch = FetchType.LAZY)
private Student student;
@ManyToOne(fetch = FetchType.LAZY)
private Class associatedClass;
@OneToMany(mappedBy = "admission", fetch = FetchType.LAZY, cascade = CascadeType.PERSIST)
private Set<Payment> payments;
public enum AdmitType {
NEW, PROMOTED, REPEATED
}
public UUID getAdmissionId() {
return admissionId;
}
public void setAdmissionId(UUID admissionId) {
this.admissionId = admissionId;
}
public boolean isActive() {
return active;
}
public void setActive(boolean active) {
this.active = active;
}
public int getAcedemicYear() {
return acedemicYear;
}
public void setAcedemicYear(int acedemicYear) {
this.acedemicYear = acedemicYear;
}
public Date getDoj() {
return doj;
}
public void setDoj(Date doj) {
this.doj = doj;
}
public double getRteDiscount() {
return rteDiscount;
}
public void setRteDiscount(double rteDiscount) {
this.rteDiscount = rteDiscount;
}
public Student getStudent() {
return student;
}
public void setStudent(Student student) {
this.student = student;
}
public Class getAssociatedClass() {
return associatedClass;
}
public void setAssociatedClass(Class associatedClass) {
this.associatedClass = associatedClass;
}
public Set<Payment> getPayments() {
return payments;
}
public void setPayments(Set<Payment> payments) {
this.payments = payments;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((admissionId == null) ? 0 : admissionId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Admission other = (Admission) obj;
if (admissionId == null) {
if (other.admissionId != null)
return false;
} else if (!admissionId.equals(other.admissionId))
return false;
return true;
}
}
| yukthi-jayaram/EduTool | src/main/java/org/yukthi/edutool/entity/Admission.java | Java | apache-2.0 | 3,067 |
package vn.edu.usth.myapplication.fragment;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import vn.edu.usth.myapplication.R;
/**
* Created by linhtynny on 15/11/2016.
*/
public class NewsfeedFragment extends Fragment {
private static final String TAG = "NewsfeedFrag";
public NewsfeedFragment() {
}
public static NewsfeedFragment newInstance(String chosen) {
Bundle args = new Bundle();
args.putString("chosen", chosen);
NewsfeedFragment newsfeedfragment = new NewsfeedFragment();
newsfeedfragment.setArguments(args);
return newsfeedfragment;
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container2, Bundle savedInstanceStase) {
View v = new View(getContext());
v = inflater.inflate(R.layout.test, container2, false);
return v;
}
}
| linhtynny/androiddev2017 | MyApplication/app/src/main/java/vn/edu/usth/myapplication/fragment/NewsfeedFragment.java | Java | apache-2.0 | 1,042 |
package darks.orm.test;
import java.util.List;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import darks.orm.app.SqlSession;
import darks.orm.core.factory.SqlSessionFactory;
import darks.orm.test.model.User;
public class QueryTest
{
SqlSession session = null;
@Before
public void before()
{
session = SqlSessionFactory.getSession();
Assert.assertNotNull(session);
}
@After
public void after()
{
session.close();
}
@Test
public void testQuery()
{
List<User> users = session.queryList(User.class, "select * from users");
Assert.assertNotNull(session);
Assert.assertFalse(users.isEmpty());
}
}
| liulhdarks/darks-orm | test/darks/orm/test/QueryTest.java | Java | apache-2.0 | 826 |
/*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2017 Fabian Prasser, Florian Kohlmayer and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.metric.v2;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.deidentifier.arx.ARXConfiguration;
import org.deidentifier.arx.DataDefinition;
import org.deidentifier.arx.RowSet;
import org.deidentifier.arx.certificate.elements.ElementData;
import org.deidentifier.arx.framework.check.groupify.HashGroupify;
import org.deidentifier.arx.framework.check.groupify.HashGroupifyEntry;
import org.deidentifier.arx.framework.data.Data;
import org.deidentifier.arx.framework.data.DataManager;
import org.deidentifier.arx.framework.data.GeneralizationHierarchy;
import org.deidentifier.arx.framework.lattice.Transformation;
import org.deidentifier.arx.metric.MetricConfiguration;
/**
* This class implements the KL Divergence metric.
* Ashwin Machanavajjhala, Daniel Kifer, Johannes Gehrke, Muthuramakrishnan Venkitasubramaniam:
* L-diversity: Privacy beyond k-anonymity
* ACM Transactions on Knowledge Discovery from Data (TKDD), Volume 1 Issue 1, March 2007
*
* @author Fabian Prasser
*/
public class MetricSDNMKLDivergence extends AbstractMetricSingleDimensional {
/** Tuple wrapper*/
class TupleWrapper {
/** Field*/
private final int[] tuple;
/** Field*/
private final int hash;
/**
* Constructor
* @param tuple
*/
public TupleWrapper(int[] tuple) {
this.tuple = tuple;
this.hash = Arrays.hashCode(tuple);
}
@Override
public boolean equals(Object other) {
return Arrays.equals(this.tuple, ((TupleWrapper)other).tuple);
}
@Override
public int hashCode() {
return hash;
}
}
/** SUID. */
private static final long serialVersionUID = -4918601543733931921L;
/**
* Computes log 2.
*
* @param num
* @return
*/
static final double log2(final double num) {
return Math.log(num) / LOG2;
}
/** Total number of tuples, depends on existence of research subset. */
private Double tuples = null;
/** Domain shares for each dimension. */
private DomainShare[] shares;
/** Maximum value */
private Double max = null;
/** Tuple matcher */
private transient TupleMatcher matcher = null;
/** Distribution */
private double[] inputDistribution = null;
/** Log 2. */
private static final double LOG2 = Math.log(2);
/** Maximal area */
private double maximalArea = 0d;
/**
* Default constructor.
*/
public MetricSDNMKLDivergence(){
super(true, false, false);
}
@Override
public ILSingleDimensional createMaxInformationLoss() {
if (max == null) {
throw new IllegalStateException("Metric must be initialized first");
} else {
return new ILSingleDimensional(max);
}
}
@Override
public ILSingleDimensional createMinInformationLoss() {
return new ILSingleDimensional(0);
}
/**
* Returns the configuration of this metric.
*
* @return
*/
public MetricConfiguration getConfiguration() {
return new MetricConfiguration(false, // monotonic
0.5d, // gs-factor
false, // precomputed
0.0d, // precomputation threshold
AggregateFunction.SUM // aggregate function
);
}
@Override
public String getName() {
return "KL-Divergence";
}
@Override
public ElementData render(ARXConfiguration config) {
ElementData result = new ElementData("KL divergence");
result.addProperty("Monotonic", this.isMonotonic(config.getMaxOutliers()));
return result;
}
@Override
public String toString() {
return "KL-Divergence";
}
/**
* Returns the area
* @param output
* @param generalization
* @return
*/
private double getArea(int[] output, int[] generalization) {
double result = 1d;
for (int dimension = 0; dimension < output.length; dimension++) {
DomainShare share = this.shares[dimension];
result *= share.getShare(output[dimension], generalization[dimension]) * share.getDomainSize();
}
return result;
}
@Override
protected ILSingleDimensionalWithBound getInformationLossInternal(Transformation node, HashGroupify g) {
// Obtain number of outliers
double outliers = 0d;
HashGroupifyEntry m = g.getFirstEquivalenceClass();
while (m != null) {
outliers += !m.isNotOutlier ? m.count : 0d;
m = m.nextOrdered;
}
// Init
double result = 0d;
// For each tuple
for (int row = 0; row < this.inputDistribution.length; row++) {
// Obtain frequency
double inputFrequency = inputDistribution[row];
// Only if present
if (inputFrequency != 0d) {
int[] generalization = node.getGeneralization();
HashGroupifyEntry entry = this.matcher.getEntry(row, generalization, g);
double outputFrequency = entry.isNotOutlier ? entry.count : outliers;
outputFrequency /= this.tuples;
outputFrequency /= entry.isNotOutlier ? getArea(entry.key, generalization) : maximalArea;
// Compute KL-Divergence
result += inputFrequency * log2(inputFrequency / outputFrequency);
}
}
// Return
return new ILSingleDimensionalWithBound(result);
}
@Override
protected ILSingleDimensionalWithBound getInformationLossInternal(Transformation node, HashGroupifyEntry entry) {
return new ILSingleDimensionalWithBound(entry.count, entry.count);
}
@Override
protected ILSingleDimensional getLowerBoundInternal(Transformation node) {
return null;
}
@Override
protected ILSingleDimensional getLowerBoundInternal(Transformation node,
HashGroupify g) {
return null;
}
@Override
protected void initializeInternal(final DataManager manager,
final DataDefinition definition,
final Data input,
final GeneralizationHierarchy[] hierarchies,
final ARXConfiguration config) {
// Prepare weights
super.initializeInternal(manager, definition, input, hierarchies, config);
// Compute domain shares
this.shares = new DomainShare[hierarchies.length];
for (int i = 0; i < shares.length; i++) {
// Extract info
String attribute = input.getHeader()[i];
String[][] hierarchy = definition.getHierarchy(attribute);
this.shares[i] = new DomainShareMaterialized(hierarchy,
input.getDictionary().getMapping()[i],
hierarchies[i].getArray());
}
// Determine total number of tuples
this.tuples = (double)super.getNumRecords(config, input);
RowSet subset = super.getSubset(config);
// Tuple matcher
this.matcher = new TupleMatcher(hierarchies, input.getArray());
// Areamax
this.maximalArea = 1d;
for (int dimension = 0; dimension < this.shares.length; dimension++) {
maximalArea *= this.shares[dimension].getDomainSize();
}
// Groupify
Map<TupleWrapper, Integer> groupify = new HashMap<TupleWrapper, Integer>();
for (int row = 0; row < input.getDataLength(); row++) {
if (subset == null || subset.contains(row)) {
TupleWrapper wrapper = new TupleWrapper(input.getArray()[row]);
Integer count = groupify.get(wrapper);
count = count == null ? 1 : count + 1;
groupify.put(wrapper, count);
}
}
// Build input distribution and compute max
this.max = 0d;
this.inputDistribution = new double[input.getArray().length];
for (int row = 0; row < input.getDataLength(); row++) {
if (subset == null || subset.contains(row)) {
TupleWrapper wrapper = new TupleWrapper(input.getArray()[row]);
double frequency = groupify.get(wrapper).doubleValue() / this.tuples;
this.inputDistribution[row] = frequency ;
this.max += frequency * log2(frequency * maximalArea);
}
}
}
}
| jgaupp/arx | src/main/org/deidentifier/arx/metric/v2/MetricSDNMKLDivergence.java | Java | apache-2.0 | 10,203 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.connect.impl;
import java.io.Closeable;
import java.io.IOException;
import org.camunda.connect.spi.CloseableConnectorResponse;
/**
* This class is a base class for implementing closeable connector responses
* based on {@link Closeable}.
*
* @author Daniel Meyer
*
*/
public abstract class AbstractCloseableConnectorResponse extends AbstractConnectorResponse implements CloseableConnectorResponse {
private final static ConnectCoreLogger LOG = ConnectLogger.CORE_LOGGER;
/**
* Implements the default close behavior
*/
public void close() {
Closeable closable = getClosable();
try {
LOG.closingResponse(this);
closable.close();
LOG.successfullyClosedResponse(this);
} catch (IOException e) {
throw LOG.exceptionWhileClosingResponse(e);
}
}
/**
* Allows subclasses to provide the closeable resource.
* @return the {@link Closeable} resource
*/
protected abstract Closeable getClosable();
}
| nagyistoce/camunda-connect | core/src/main/java/org/camunda/connect/impl/AbstractCloseableConnectorResponse.java | Java | apache-2.0 | 1,547 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.runtime.distributed;
import org.apache.kafka.clients.ApiVersions;
import org.apache.kafka.clients.ClientDnsLookup;
import org.apache.kafka.clients.ClientUtils;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.Metadata;
import org.apache.kafka.clients.NetworkClient;
import org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient;
import org.apache.kafka.clients.GroupRebalanceConfig;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.internals.ClusterResourceListeners;
import org.apache.kafka.common.metrics.JmxReporter;
import org.apache.kafka.common.metrics.MetricConfig;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.MetricsReporter;
import org.apache.kafka.common.network.ChannelBuilder;
import org.apache.kafka.common.network.Selector;
import org.apache.kafka.common.utils.AppInfoParser;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.connect.storage.ConfigBackingStore;
import org.apache.kafka.connect.util.ConnectorTaskId;
import org.slf4j.Logger;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
/**
* This class manages the coordination process with brokers for the Connect cluster group membership. It ties together
* the Coordinator, which implements the group member protocol, with all the other pieces needed to drive the connection
* to the group coordinator broker. This isolates all the networking to a single thread managed by this class, with
* higher level operations in response to group membership events being handled by the herder.
*/
public class WorkerGroupMember {
private static final String JMX_PREFIX = "kafka.connect";
private final Logger log;
private final Time time;
private final String clientId;
private final ConsumerNetworkClient client;
private final Metrics metrics;
private final Metadata metadata;
private final long retryBackoffMs;
private final WorkerCoordinator coordinator;
private boolean stopped = false;
public WorkerGroupMember(DistributedConfig config,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener,
Time time,
String clientId,
LogContext logContext) {
try {
this.time = time;
this.clientId = clientId;
this.log = logContext.logger(WorkerGroupMember.class);
Map<String, String> metricsTags = new LinkedHashMap<>();
metricsTags.put("client-id", clientId);
MetricConfig metricConfig = new MetricConfig().samples(config.getInt(CommonClientConfigs.METRICS_NUM_SAMPLES_CONFIG))
.timeWindow(config.getLong(CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_CONFIG), TimeUnit.MILLISECONDS)
.tags(metricsTags);
List<MetricsReporter> reporters = config.getConfiguredInstances(CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG,
MetricsReporter.class,
Collections.singletonMap(CommonClientConfigs.CLIENT_ID_CONFIG, clientId));
reporters.add(new JmxReporter(JMX_PREFIX));
this.metrics = new Metrics(metricConfig, reporters, time);
this.retryBackoffMs = config.getLong(CommonClientConfigs.RETRY_BACKOFF_MS_CONFIG);
this.metadata = new Metadata(retryBackoffMs, config.getLong(CommonClientConfigs.METADATA_MAX_AGE_CONFIG),
logContext, new ClusterResourceListeners());
List<InetSocketAddress> addresses = ClientUtils.parseAndValidateAddresses(
config.getList(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG),
config.getString(CommonClientConfigs.CLIENT_DNS_LOOKUP_CONFIG));
this.metadata.bootstrap(addresses, time.milliseconds());
String metricGrpPrefix = "connect";
ChannelBuilder channelBuilder = ClientUtils.createChannelBuilder(config, time);
NetworkClient netClient = new NetworkClient(
new Selector(config.getLong(CommonClientConfigs.CONNECTIONS_MAX_IDLE_MS_CONFIG), metrics, time, metricGrpPrefix, channelBuilder, logContext),
this.metadata,
clientId,
100, // a fixed large enough value will suffice
config.getLong(CommonClientConfigs.RECONNECT_BACKOFF_MS_CONFIG),
config.getLong(CommonClientConfigs.RECONNECT_BACKOFF_MAX_MS_CONFIG),
config.getInt(CommonClientConfigs.SEND_BUFFER_CONFIG),
config.getInt(CommonClientConfigs.RECEIVE_BUFFER_CONFIG),
config.getInt(CommonClientConfigs.REQUEST_TIMEOUT_MS_CONFIG),
ClientDnsLookup.forConfig(config.getString(CommonClientConfigs.CLIENT_DNS_LOOKUP_CONFIG)),
time,
true,
new ApiVersions(),
logContext);
this.client = new ConsumerNetworkClient(
logContext,
netClient,
metadata,
time,
retryBackoffMs,
config.getInt(CommonClientConfigs.REQUEST_TIMEOUT_MS_CONFIG),
Integer.MAX_VALUE);
this.coordinator = new WorkerCoordinator(
new GroupRebalanceConfig(config, GroupRebalanceConfig.ProtocolType.CONNECT),
logContext,
this.client,
metrics,
metricGrpPrefix,
this.time,
restUrl,
configStorage,
listener,
ConnectProtocolCompatibility.compatibility(config.getString(DistributedConfig.CONNECT_PROTOCOL_CONFIG)),
config.getInt(DistributedConfig.SCHEDULED_REBALANCE_MAX_DELAY_MS_CONFIG));
AppInfoParser.registerAppInfo(JMX_PREFIX, clientId, metrics, time.milliseconds());
log.debug("Connect group member created");
} catch (Throwable t) {
// call close methods if internal objects are already constructed
// this is to prevent resource leak. see KAFKA-2121
stop(true);
// now propagate the exception
throw new KafkaException("Failed to construct kafka consumer", t);
}
}
public void stop() {
if (stopped) return;
stop(false);
}
public void ensureActive() {
coordinator.poll(0);
}
public void poll(long timeout) {
if (timeout < 0)
throw new IllegalArgumentException("Timeout must not be negative");
coordinator.poll(timeout);
}
/**
* Interrupt any running poll() calls, causing a WakeupException to be thrown in the thread invoking that method.
*/
public void wakeup() {
this.client.wakeup();
}
/**
* Get the member ID of this worker in the group of workers.
*
* This ID is the unique member ID automatically generated.
*
* @return the member ID
*/
public String memberId() {
return coordinator.memberId();
}
public void requestRejoin() {
coordinator.requestRejoin();
}
public void maybeLeaveGroup() {
coordinator.maybeLeaveGroup();
}
public String ownerUrl(String connector) {
return coordinator.ownerUrl(connector);
}
public String ownerUrl(ConnectorTaskId task) {
return coordinator.ownerUrl(task);
}
/**
* Get the version of the connect protocol that is currently active in the group of workers.
*
* @return the current connect protocol version
*/
public short currentProtocolVersion() {
return coordinator.currentProtocolVersion();
}
private void stop(boolean swallowException) {
log.trace("Stopping the Connect group member.");
AtomicReference<Throwable> firstException = new AtomicReference<>();
this.stopped = true;
Utils.closeQuietly(coordinator, "coordinator", firstException);
Utils.closeQuietly(metrics, "consumer metrics", firstException);
Utils.closeQuietly(client, "consumer network client", firstException);
AppInfoParser.unregisterAppInfo(JMX_PREFIX, clientId, metrics);
if (firstException.get() != null && !swallowException)
throw new KafkaException("Failed to stop the Connect group member", firstException.get());
else
log.debug("The Connect group member has stopped.");
}
}
| KevinLiLu/kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/WorkerGroupMember.java | Java | apache-2.0 | 9,850 |
package com.planet_ink.coffee_mud.Abilities.Skills;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2016-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Skill_FoulWeatherSailing extends StdSkill
{
@Override
public String ID()
{
return "Skill_FoulWeatherSailing";
}
private final static String localizedName = CMLib.lang().L("Foul Weather Sailing");
@Override
public String name()
{
return localizedName;
}
private final static String localizedStaticDisplay = CMLib.lang().L("(Foul Weather Sailing)");
private static final String[] triggerStrings = I(new String[] { "FOULWEATHERSAILING"});
@Override
public String[] triggerStrings()
{
return triggerStrings;
}
@Override
public String displayText()
{
return localizedStaticDisplay;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_INDIFFERENT;
}
@Override
protected int canAffectCode()
{
return CAN_MOBS;
}
@Override
public int classificationCode()
{
return Ability.ACODE_SKILL | Ability.DOMAIN_SEATRAVEL;
}
@Override
public int usageType()
{
return USAGE_MOVEMENT|USAGE_MANA;
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(!super.okMessage(myHost, msg))
return false;
if((msg.target() instanceof Boardable)
&&(msg.targetMinor()==CMMsg.TYP_WEATHER)
&&(msg.target() == affected)
&&(affected instanceof Item))
{
final MOB M=invoker();
if((M!=null)
&&(M.location()!=null)
&&(M.location().getArea() instanceof Boardable)
&&(((Boardable)M.location().getArea()).getBoardableItem() == msg.target())
&&(super.proficiencyCheck(M, 0, false)))
{
super.helpProficiency(M, 0);
final Room R=CMLib.map().roomLocation(msg.target());
if(R!=null)
{
R.show(M, msg.target(), CMMsg.MSG_OK_VISUAL, L("<S-YOUPOSS> superior sailing skills keeps <T-NAME> sailing."));
return false;
}
}
}
return true;
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
if((CMLib.flags().isSitting(mob)||CMLib.flags().isSleeping(mob)))
{
mob.tell(L("You are on the floor!"));
return false;
}
if(!CMLib.flags().isAliveAwakeMobileUnbound(mob,false))
return false;
final Room R=mob.location();
if(R==null)
return false;
final Item target;
if((R.getArea() instanceof Boardable)
&&(((Boardable)R.getArea()).getBoardableItem() instanceof Boardable))
{
target=((Boardable)R.getArea()).getBoardableItem();
}
else
{
mob.tell(L("You must be on a ship to do rig for foul weather!"));
return false;
}
if(target.fetchEffect(ID())!=null)
{
mob.tell(L("Your ship is already rigged for foul weather!"));
return false;
}
final Room shipR=CMLib.map().roomLocation(target);
if((shipR==null)||(!CMLib.flags().isWaterySurfaceRoom(shipR))||(!target.subjectToWearAndTear()))
{
mob.tell(L("You must be on a sailing ship to rig for foul weather!"));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,CMMsg.MASK_MALICIOUS|CMMsg.MSG_NOISYMOVEMENT,auto?L("<T-NAME> is rigged for bad weather!"):L("<S-NAME> rig(s) <T-NAME> for foul weather sailing!"));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
beneficialAffect(mob, target, asLevel, 0);
}
}
else
return beneficialVisualFizzle(mob,null,L("<S-NAME> attempt(s) to do rid the ship for foul weather sailing, but mess(es) it up."));
return success;
}
}
| bozimmerman/CoffeeMud | com/planet_ink/coffee_mud/Abilities/Skills/Skill_FoulWeatherSailing.java | Java | apache-2.0 | 5,179 |
package org.mengyun.tcctransaction.sample.http.capital.infrastructure.dao;
import org.mengyun.tcctransaction.sample.http.capital.domain.entity.CapitalAccount;
/**
* Created on 4/2/16.
*/
public interface CapitalAccountDao {
CapitalAccount findByUserId(long userId);
void update(CapitalAccount capitalAccount);
}
| tangdazhu/TCC | tcc-transaction-tutorial-sample/tcc-transaction-http-sample/tcc-transaction-http-capital/src/main/java/org/mengyun/tcctransaction/sample/http/capital/infrastructure/dao/CapitalAccountDao.java | Java | apache-2.0 | 327 |
package com.alibaba.otter.canal.parse.inbound;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang.math.RandomUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import com.alibaba.otter.canal.common.AbstractCanalLifeCycle;
import com.alibaba.otter.canal.common.alarm.CanalAlarmHandler;
import com.alibaba.otter.canal.filter.CanalEventFilter;
import com.alibaba.otter.canal.parse.CanalEventParser;
import com.alibaba.otter.canal.parse.exception.CanalParseException;
import com.alibaba.otter.canal.parse.exception.TableIdNotFoundException;
import com.alibaba.otter.canal.parse.inbound.EventTransactionBuffer.TransactionFlushCallback;
import com.alibaba.otter.canal.parse.inbound.mysql.MysqlEventParser;
import com.alibaba.otter.canal.parse.index.CanalLogPositionManager;
import com.alibaba.otter.canal.parse.support.AuthenticationInfo;
import com.alibaba.otter.canal.protocol.CanalEntry;
import com.alibaba.otter.canal.protocol.CanalEntry.Entry;
import com.alibaba.otter.canal.protocol.CanalEntry.EntryType;
import com.alibaba.otter.canal.protocol.CanalEntry.Header;
import com.alibaba.otter.canal.protocol.position.EntryPosition;
import com.alibaba.otter.canal.protocol.position.LogIdentity;
import com.alibaba.otter.canal.protocol.position.LogPosition;
import com.alibaba.otter.canal.sink.CanalEventSink;
import com.alibaba.otter.canal.sink.exception.CanalSinkException;
/**
* 抽象的EventParser, 最大化共用mysql/oracle版本的实现
*
* @author jianghang 2013-1-20 下午08:10:25
* @version 1.0.0
*/
public abstract class AbstractEventParser<EVENT> extends AbstractCanalLifeCycle implements CanalEventParser<EVENT> {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
protected CanalLogPositionManager logPositionManager = null;
protected CanalEventSink<List<CanalEntry.Entry>> eventSink = null;
protected CanalEventFilter eventFilter = null;
protected CanalEventFilter eventBlackFilter = null;
private CanalAlarmHandler alarmHandler = null;
// 统计参数
protected AtomicBoolean profilingEnabled = new AtomicBoolean(false); // profile开关参数
protected AtomicLong receivedEventCount = new AtomicLong();
protected AtomicLong parsedEventCount = new AtomicLong();
protected AtomicLong consumedEventCount = new AtomicLong();
protected long parsingInterval = -1;
protected long processingInterval = -1;
// 认证信息
protected volatile AuthenticationInfo runningInfo;
protected String destination;
// binLogParser
protected BinlogParser binlogParser = null;
protected Thread parseThread = null;
protected Thread.UncaughtExceptionHandler handler = new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread t,
Throwable e) {
logger.error("parse events has an error",
e);
}
};
protected EventTransactionBuffer transactionBuffer;
protected int transactionSize = 1024;
protected AtomicBoolean needTransactionPosition = new AtomicBoolean(false);
protected long lastEntryTime = 0L;
protected volatile boolean detectingEnable = true; // 是否开启心跳检查
protected Integer detectingIntervalInSeconds = 3; // 检测频率
protected volatile Timer timer;
protected TimerTask heartBeatTimerTask;
protected Throwable exception = null;
//update by yishun.chen
//特殊异常处理参数
protected int specialExceptionCount = 0;//特殊异常计数
protected int specialExceptionCountThreshold = 3;//特殊异常计数阀值
protected boolean isFindEndPosition = false;//重连时查找数据库最新的位点
protected abstract BinlogParser buildParser();
protected abstract ErosaConnection buildErosaConnection();
protected abstract EntryPosition findStartPosition(ErosaConnection connection) throws IOException;
//update by yishun.chen
//查找数据库最新的位点
protected abstract EntryPosition findEndPosition(ErosaConnection connection) throws IOException;
protected void preDump(ErosaConnection connection) {
}
protected void afterDump(ErosaConnection connection) {
}
public void sendAlarm(String destination, String msg) {
if (this.alarmHandler != null) {
this.alarmHandler.sendAlarm(destination, msg);
}
}
public AbstractEventParser(){
// 初始化一下
transactionBuffer = new EventTransactionBuffer(new TransactionFlushCallback() {
public void flush(List<CanalEntry.Entry> transaction) throws InterruptedException {
boolean successed = consumeTheEventAndProfilingIfNecessary(transaction);
if (!running) {
return;
}
if (!successed) {
throw new CanalParseException("consume failed!");
}
LogPosition position = buildLastTransactionPosition(transaction);
if (position != null) { // 可能position为空
logPositionManager.persistLogPosition(AbstractEventParser.this.destination, position);
}
}
});
}
public void start() {
super.start();
MDC.put("destination", destination);
// 配置transaction buffer
// 初始化缓冲队列
transactionBuffer.setBufferSize(transactionSize);// 设置buffer大小
transactionBuffer.start();
// 构造bin log parser
binlogParser = buildParser();// 初始化一下BinLogParser
binlogParser.start();
// 启动工作线程
parseThread = new Thread(new Runnable() {
public void run() {
MDC.put("destination", String.valueOf(destination));
ErosaConnection erosaConnection = null;
while (running) {
try {
// 开始执行replication
// 1. 构造Erosa连接
erosaConnection = buildErosaConnection();
// 2. 启动一个心跳线程
startHeartBeat(erosaConnection);
// 3. 执行dump前的准备工作
preDump(erosaConnection);
erosaConnection.connect();// 链接
// 4. 获取最后的位置信息
EntryPosition position = findStartPosition(erosaConnection);
//update by yishun.chen
//获取当前数据库最后一个位置进行消费
//当解析binlog多次失败后跳过当前失败的位点重新到数据库获取新的位点
if(isFindEndPosition){
position = findEndPosition(erosaConnection);
specialExceptionCount = 0;
isFindEndPosition = false;
logger.error("special exception count>" + specialExceptionCountThreshold + ", find end position, maybe cause data loss!");
sendAlarm(destination, "special exception count>" + specialExceptionCountThreshold + ", find end position, maybe cause data loss!");
}
final EntryPosition startPosition = position;
if (startPosition == null) {
throw new CanalParseException("can't find start position for " + destination);
}
logger.info("find start position : {}", startPosition.toString());
// 重新链接,因为在找position过程中可能有状态,需要断开后重建
erosaConnection.reconnect();
final SinkFunction sinkHandler = new SinkFunction<EVENT>() {
private LogPosition lastPosition;
public boolean sink(EVENT event) {
try {
CanalEntry.Entry entry = parseAndProfilingIfNecessary(event);
if (!running) {
return false;
}
if (entry != null) {
exception = null; // 有正常数据流过,清空exception
transactionBuffer.add(entry);
// 记录一下对应的positions
this.lastPosition = buildLastPosition(entry);
// 记录一下最后一次有数据的时间
lastEntryTime = System.currentTimeMillis();
}
return running;
} catch (TableIdNotFoundException e) {
throw e;
} catch (Exception e) {
// 记录一下,出错的位点信息
processError(e,
this.lastPosition,
startPosition.getJournalName(),
startPosition.getPosition());
throw new CanalParseException(e); // 继续抛出异常,让上层统一感知
}
}
};
// 4. 开始dump数据
if (StringUtils.isEmpty(startPosition.getJournalName()) && startPosition.getTimestamp() != null) {
erosaConnection.dump(startPosition.getTimestamp(), sinkHandler);
} else {
erosaConnection.dump(startPosition.getJournalName(),
startPosition.getPosition(),
sinkHandler);
}
} catch (TableIdNotFoundException e) {
exception = e;
// 特殊处理TableIdNotFound异常,出现这样的异常,一种可能就是起始的position是一个事务当中,导致tablemap
// Event时间没解析过
needTransactionPosition.compareAndSet(false, true);
logger.error(String.format("dump address %s has an error, retrying. caused by ",
runningInfo.getAddress().toString()), e);
} catch (Throwable e) {
//update by yishun.chen
//dump数据出现IOException累计错误出现3次以上跳过当前位点
if(e instanceof IOException){
specialExceptionCount ++;
if(specialExceptionCount >= specialExceptionCountThreshold){
isFindEndPosition = true;
}
}
exception = e;
if (!running) {
if (!(e instanceof java.nio.channels.ClosedByInterruptException || e.getCause() instanceof java.nio.channels.ClosedByInterruptException)) {
throw new CanalParseException(String.format("dump address %s has an error, retrying. ",
runningInfo.getAddress().toString()), e);
}
} else {
logger.error(String.format("dump address %s has an error, retrying. caused by ",
runningInfo.getAddress().toString()), e);
sendAlarm(destination, ExceptionUtils.getFullStackTrace(e));
}
} finally {
// 重新置为中断状态
Thread.interrupted();
// 关闭一下链接
afterDump(erosaConnection);
try {
if (erosaConnection != null) {
erosaConnection.disconnect();
}
} catch (IOException e1) {
if (!running) {
throw new CanalParseException(String.format("disconnect address %s has an error, retrying. ",
runningInfo.getAddress().toString()),
e1);
} else {
logger.error("disconnect address {} has an error, retrying., caused by ",
runningInfo.getAddress().toString(),
e1);
}
}
}
// 出异常了,退出sink消费,释放一下状态
eventSink.interrupt();
transactionBuffer.reset();// 重置一下缓冲队列,重新记录数据
binlogParser.reset();// 重新置位
if (running) {
// sleep一段时间再进行重试
try {
Thread.sleep(10000 + RandomUtils.nextInt(10000));
} catch (InterruptedException e) {
}
}
}
MDC.remove("destination");
}
});
parseThread.setUncaughtExceptionHandler(handler);
parseThread.setName(String.format("destination = %s , address = %s , EventParser",
destination,
runningInfo == null ? null : runningInfo.getAddress().toString()));
parseThread.start();
}
public void stop() {
super.stop();
stopHeartBeat(); // 先停止心跳
parseThread.interrupt(); // 尝试中断
eventSink.interrupt();
try {
parseThread.join();// 等待其结束
} catch (InterruptedException e) {
// ignore
}
if (binlogParser.isStart()) {
binlogParser.stop();
}
if (transactionBuffer.isStart()) {
transactionBuffer.stop();
}
}
protected boolean consumeTheEventAndProfilingIfNecessary(List<CanalEntry.Entry> entrys) throws CanalSinkException,
InterruptedException {
long startTs = -1;
boolean enabled = getProfilingEnabled();
if (enabled) {
startTs = System.currentTimeMillis();
}
boolean result = eventSink.sink(entrys, (runningInfo == null) ? null : runningInfo.getAddress(), destination);
if (enabled) {
this.processingInterval = System.currentTimeMillis() - startTs;
}
if (consumedEventCount.incrementAndGet() < 0) {
consumedEventCount.set(0);
}
return result;
}
protected CanalEntry.Entry parseAndProfilingIfNecessary(EVENT bod) throws Exception {
long startTs = -1;
boolean enabled = getProfilingEnabled();
if (enabled) {
startTs = System.currentTimeMillis();
}
CanalEntry.Entry event = binlogParser.parse(bod);
if (enabled) {
this.parsingInterval = System.currentTimeMillis() - startTs;
}
if (parsedEventCount.incrementAndGet() < 0) {
parsedEventCount.set(0);
}
return event;
}
public Boolean getProfilingEnabled() {
return profilingEnabled.get();
}
protected LogPosition buildLastTransactionPosition(List<CanalEntry.Entry> entries) { // 初始化一下
for (int i = entries.size() - 1; i > 0; i--) {
CanalEntry.Entry entry = entries.get(i);
if (entry.getEntryType() == CanalEntry.EntryType.TRANSACTIONEND) {// 尽量记录一个事务做为position
return buildLastPosition(entry);
}
}
return null;
}
protected LogPosition buildLastPosition(CanalEntry.Entry entry) { // 初始化一下
LogPosition logPosition = new LogPosition();
EntryPosition position = new EntryPosition();
position.setJournalName(entry.getHeader().getLogfileName());
position.setPosition(entry.getHeader().getLogfileOffset());
position.setTimestamp(entry.getHeader().getExecuteTime());
logPosition.setPostion(position);
LogIdentity identity = new LogIdentity(runningInfo.getAddress(), -1L);
logPosition.setIdentity(identity);
return logPosition;
}
protected void processError(Exception e, LogPosition lastPosition, String startBinlogFile, long startPosition) {
if (lastPosition != null) {
logger.warn(String.format("ERROR ## parse this event has an error , last position : [%s]",
lastPosition.getPostion()),
e);
} else {
logger.warn(String.format("ERROR ## parse this event has an error , last position : [%s,%s]",
startBinlogFile,
startPosition), e);
}
}
protected void startHeartBeat(ErosaConnection connection) {
lastEntryTime = 0L; // 初始化
if (timer == null) {// lazy初始化一下
String name = String.format("destination = %s , address = %s , HeartBeatTimeTask",
destination,
runningInfo == null ? null : runningInfo.getAddress().toString());
synchronized (MysqlEventParser.class) {
if (timer == null) {
timer = new Timer(name, true);
}
}
}
if (heartBeatTimerTask == null) {// fixed issue #56,避免重复创建heartbeat线程
heartBeatTimerTask = buildHeartBeatTimeTask(connection);
Integer interval = detectingIntervalInSeconds;
timer.schedule(heartBeatTimerTask, interval * 1000L, interval * 1000L);
logger.info("start heart beat.... ");
}
}
protected TimerTask buildHeartBeatTimeTask(ErosaConnection connection) {
return new TimerTask() {
public void run() {
try {
if (exception == null || lastEntryTime > 0) {
// 如果未出现异常,或者有第一条正常数据
long now = System.currentTimeMillis();
long inteval = (now - lastEntryTime) / 1000;
if (inteval >= detectingIntervalInSeconds) {
Header.Builder headerBuilder = Header.newBuilder();
headerBuilder.setExecuteTime(now);
Entry.Builder entryBuilder = Entry.newBuilder();
entryBuilder.setHeader(headerBuilder.build());
entryBuilder.setEntryType(EntryType.HEARTBEAT);
Entry entry = entryBuilder.build();
// 提交到sink中,目前不会提交到store中,会在sink中进行忽略
consumeTheEventAndProfilingIfNecessary(Arrays.asList(entry));
}
}
} catch (Throwable e) {
logger.warn("heartBeat run failed " + ExceptionUtils.getStackTrace(e));
}
}
};
}
protected void stopHeartBeat() {
lastEntryTime = 0L; // 初始化
if (timer != null) {
timer.cancel();
timer = null;
}
heartBeatTimerTask = null;
}
public void setEventFilter(CanalEventFilter eventFilter) {
this.eventFilter = eventFilter;
}
public void setEventBlackFilter(CanalEventFilter eventBlackFilter) {
this.eventBlackFilter = eventBlackFilter;
}
public Long getParsedEventCount() {
return parsedEventCount.get();
}
public Long getConsumedEventCount() {
return consumedEventCount.get();
}
public void setProfilingEnabled(boolean profilingEnabled) {
this.profilingEnabled = new AtomicBoolean(profilingEnabled);
}
public long getParsingInterval() {
return parsingInterval;
}
public long getProcessingInterval() {
return processingInterval;
}
public void setEventSink(CanalEventSink<List<CanalEntry.Entry>> eventSink) {
this.eventSink = eventSink;
}
public void setDestination(String destination) {
this.destination = destination;
}
public void setBinlogParser(BinlogParser binlogParser) {
this.binlogParser = binlogParser;
}
public BinlogParser getBinlogParser() {
return binlogParser;
}
public void setAlarmHandler(CanalAlarmHandler alarmHandler) {
this.alarmHandler = alarmHandler;
}
public CanalAlarmHandler getAlarmHandler() {
return this.alarmHandler;
}
public void setLogPositionManager(CanalLogPositionManager logPositionManager) {
this.logPositionManager = logPositionManager;
}
public void setTransactionSize(int transactionSize) {
this.transactionSize = transactionSize;
}
public CanalLogPositionManager getLogPositionManager() {
return logPositionManager;
}
public void setDetectingEnable(boolean detectingEnable) {
this.detectingEnable = detectingEnable;
}
public void setDetectingIntervalInSeconds(Integer detectingIntervalInSeconds) {
this.detectingIntervalInSeconds = detectingIntervalInSeconds;
}
public Throwable getException() {
return exception;
}
}
| gewanbo/canalKafka | parse/src/main/java/com/alibaba/otter/canal/parse/inbound/AbstractEventParser.java | Java | apache-2.0 | 24,533 |
/*
* Copyright 2013-2014 Grzegorz Ligas <ligasgr@gmail.com> and other contributors
* (see the CONTRIBUTORS file).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.xquery.model;
/**
* User: ligasgr
* Date: 09/08/13
* Time: 13:08
*/
public class XQueryQName<T> {
private final String prefix;
private final String localName;
private final String namespace;
private final T namedObject;
public XQueryQName(String prefix, String localName, String namespace, T namedObject) {
this.prefix = prefix;
this.localName = localName;
this.namespace = namespace;
this.namedObject = namedObject;
}
public T getNamedObject() {
return namedObject;
}
public String getNamespace() {
return namespace;
}
@Override
public int hashCode() {
int result = prefix != null ? prefix.hashCode() : 0;
result = 31 * result + (localName != null ? localName.hashCode() : 0);
return result;
}
@Override
public boolean equals(Object obj) {
if (obj == null) return false;
if (!(obj instanceof XQueryQName)) return false;
XQueryQName that = (XQueryQName) obj;
boolean prefixesAndLocalNamesMatch = prefix != null && prefix.equals
(that.prefix) && localName != null && localName.equals(that.localName);
boolean prefixesAreEmptyAndLocalNamesMatch = prefix == null && that.prefix == null && localName != null &&
localName.equals(that.localName);
boolean namespacesAndLocalNamesMatch = namespace != null && namespace.equals(that.namespace) &&
localName != null && localName.equals(that.localName);
return prefixesAndLocalNamesMatch || prefixesAreEmptyAndLocalNamesMatch || namespacesAndLocalNamesMatch;
}
public String getTextRepresentation() {
if (prefix != null) {
return prefix + ":" + localName;
} else {
return localName;
}
}
}
| ligasgr/intellij-xquery | src/main/java/org/intellij/xquery/model/XQueryQName.java | Java | apache-2.0 | 2,532 |
/*
* Copyright 2017 The Bazel Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.idea.blaze.python.resolve.provider;
import com.google.idea.blaze.base.settings.Blaze.BuildSystem;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.intellij.psi.util.QualifiedName;
import com.jetbrains.python.codeInsight.imports.AutoImportQuickFix;
import com.jetbrains.python.psi.resolve.PyQualifiedNameResolveContext;
import javax.annotation.Nullable;
/** A strategy for conversion between import strings and blaze artifacts. */
public interface PyImportResolverStrategy {
ExtensionPointName<PyImportResolverStrategy> EP_NAME =
ExtensionPointName.create("com.google.idea.blaze.PyImportResolverStrategy");
/**
* Find a python source somewhere in the Blaze workspace, corresponding to the given import
* string. Not limited to .blazeproject source roots.
*/
@Nullable
PsiElement resolveToWorkspaceSource(QualifiedName name, PyQualifiedNameResolveContext context);
/** Find a python source corresponding to the given name, available during the last blaze sync. */
@Nullable
PsiElement resolveFromSyncData(QualifiedName name, PyQualifiedNameResolveContext context);
/**
* Add quick fix import suggestions for a given symbol, searching a symbol index built up during
* the last blaze sync.
*/
void addImportCandidates(PsiReference reference, String name, AutoImportQuickFix quickFix);
/** Whether this import resolver strategy is applicable to the given build system */
boolean appliesToBuildSystem(BuildSystem buildSystem);
}
| brendandouglas/intellij | python/src/com/google/idea/blaze/python/resolve/provider/PyImportResolverStrategy.java | Java | apache-2.0 | 2,211 |
package pojos;
public class SimpleGrade {
private int lectureID;
private int gradeID;
private String name;
private int affect;
private float average;
private float grade;
public SimpleGrade( int lectureID, int gradeID, String name, int affect, float average) {
super();
this.lectureID = lectureID;
this.gradeID = gradeID;
this.name = name;
this.affect = affect;
this.average = average;
}
public SimpleGrade(int lectureID, int gradeID, String name, int affect, float average, float grade) {
super();
this.lectureID = lectureID;
this.gradeID = gradeID;
this.name = name;
this.affect = affect;
this.average = average;
this.grade = grade;
}
public int getLectureID() {
return lectureID;
}
public int getGradeID() {
return gradeID;
}
public String getName() {
return name;
}
public int getAffect() {
return affect;
}
public float getAverage() {
return average;
}
}
| koryOzyurt/AIUBoard | src/pojos/SimpleGrade.java | Java | apache-2.0 | 950 |
package com.arthurivanets.graphalgorithmsvisualization.graph.model;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Point;
import com.arthurivanets.graphalgorithmsvisualization.core.Constants;
import com.arthurivanets.graphalgorithmsvisualization.util.CSVConvertable;
import com.arthurivanets.graphalgorithmsvisualization.util.Drawable;
import com.arthurivanets.graphalgorithmsvisualization.util.MathUtils;
import com.arthurivanets.graphalgorithmsvisualization.util.Taggable;
import com.arthurivanets.graphalgorithmsvisualization.util.Unique;
public class Edge implements Drawable, Unique<String>, Taggable<Edge>, CSVConvertable<Edge> {
public static final int FLAG_NONE = 0;
public static final int FLAG_DIRECTION_IMPORTANT = 1;
public static final int FLAG_DIRECTION_NOT_IMPORTANT = 2;
public static final Color DEFAULT_COLOR = Color.GREEN;
public static final Color DEFAULT_SELECTED_COLOR = Color.RED;
public static final Color DEFAULT_TEXT_COLOR = Color.WHITE;
public static final int DEFAULT_TEXT_SIZE = 14;
private int textSize;
private int weight;
private Vertex startVertex;
private Vertex endVertex;
private Color color;
private Color selectedColor;
private Color textColor;
private Object tag;
private boolean isSelected;
private boolean isWeightVisible;
private boolean shouldBeFilled;
public Edge() {
this(null, null);
}
public Edge(Vertex startVertex, Vertex endVertex) {
this.startVertex = startVertex;
this.endVertex = endVertex;
this.weight = -1;
this.textSize = DEFAULT_TEXT_SIZE;
this.color = DEFAULT_COLOR;
this.selectedColor = DEFAULT_SELECTED_COLOR;
this.textColor = DEFAULT_TEXT_COLOR;
this.isSelected = false;
this.isWeightVisible = true;
this.shouldBeFilled = false;
}
public Edge setStartVertex(Vertex startVertex) {
this.startVertex = startVertex;
return this;
}
public Vertex getStartVertex() {
return this.startVertex;
}
public Edge setEndVertex(Vertex endVertex) {
this.endVertex = endVertex;
return this;
}
public Vertex getEndVertex() {
return this.endVertex;
}
public Edge setWeight(int weight) {
this.weight = weight;
return this;
}
public int getWeight() {
return ((this.weight < 0) ? ((int) getLength()) : (this.weight + getWeightCompensation()));
}
public int getWeightCompensation() {
if(startVertex.getType() == Vertex.Type.OBSTACLE && endVertex.getType() == Vertex.Type.OBSTACLE) {
return Constants.OBSTACLE_WEIGHT_COMPENSATION;
} else {
return 0;
}
}
public Edge setTextSize(int textSize) {
this.textSize = textSize;
return this;
}
public int getTextSize() {
return this.textSize;
}
public Edge setColor(int color) {
return setColor(new Color(color));
}
public Edge setColor(Color color) {
this.color = color;
return this;
}
public Color getColor() {
return this.color;
}
public Edge setSelectedColor(int color) {
return setSelectedColor(new Color(color));
}
public Edge setSelectedColor(Color color) {
this.selectedColor = color;
return this;
}
public Color getSelectedColor() {
return this.selectedColor;
}
public Edge setTextColor(int color) {
return setTextColor(new Color(color));
}
public Edge setTextColor(Color color) {
this.textColor = color;
return this;
}
public Color getTextColor() {
return this.textColor;
}
public Edge setSelected(boolean isSelected) {
this.isSelected = isSelected;
return this;
}
public boolean isSelected() {
return this.isSelected;
}
public Edge setWeightVisible(boolean isWeightVisible) {
this.isWeightVisible = isWeightVisible;
return this;
}
public boolean isWeightVisible() {
return this.isWeightVisible;
}
public Edge setFill(boolean shouldBeFilled) {
this.shouldBeFilled = shouldBeFilled;
return this;
}
public boolean shouldBeFilled() {
return this.shouldBeFilled;
}
/**
*
* <br>
* Determines whether the current Edge has the given Vertices as a base,
* in either direction(doesn't consider a particular direction).
* <br>
*
* @param vertex1
* @param vertex2
*
* @return
*
*/
public boolean equalsVertexwise(Vertex vertex1, Vertex vertex2) {
return equalsVertexwise(vertex1, vertex2, FLAG_DIRECTION_NOT_IMPORTANT);
}
/**
*
* <br>
* Determines whether the current Edge has the given Vertices as a base,
* in a direction(-s) specified via the flag.
* <br>
*
* @param vertex1
* @param vertex2
*
* @return
*
*/
public boolean equalsVertexwise(Vertex vertex1, Vertex vertex2, int flag) {
return equalsVertexwise(new Edge(vertex1, vertex2), flag);
}
/**
*
* <br>
* Determines whether the current Edge has the given Edge's Vertices as a base,
* in either direction(doesn't consider a particular direction).
* <br>
*
* @param vertex1
* @param vertex2
*
* @return
*
*/
public boolean equalsVertexwise(Edge edge) {
return equalsVertexwise(edge, FLAG_DIRECTION_NOT_IMPORTANT);
}
/**
*
* <br>
* Determines whether the current Edge has the given Edge's Vertices as a base,
* in a direction(-s) specified via the flag.
* <br>
*
* @param vertex1
* @param vertex2
*
* @return
*
*/
public boolean equalsVertexwise(Edge edge, int flag) {
if(edge == null || edge.getStartVertex() == null || edge.getEndVertex() == null) {
return false;
}
if(flag == FLAG_DIRECTION_IMPORTANT) {
return (startVertex.equalsKeywise(edge.getStartVertex()) && endVertex.equalsKeywise(edge.getEndVertex()));
} else {
return ((startVertex.equalsKeywise(edge.getStartVertex()) && endVertex.equalsKeywise(edge.getEndVertex()))
|| (startVertex.equalsKeywise(edge.getEndVertex()) && endVertex.equalsKeywise(edge.getStartVertex())));
}
}
@Override
public Edge setTag(Object tag) {
this.tag = tag;
return this;
}
@Override
public Object getTag() {
return this.tag;
}
@Override
public String getUniqueKey() {
return composeKey(createKeyPart(startVertex, endVertex), createKeyPart(endVertex, startVertex));
}
@Override
public Edge fromCSV(String csvData) {
String[] csvDataChunks = csvData.split(";");
//edge part
String[] dataChunk = csvDataChunks[0].split(",");
setTextSize(Integer.parseInt(dataChunk[0]));
setWeight(Integer.parseInt(dataChunk[1]));
setColor(Integer.parseInt(dataChunk[2]));
setSelectedColor(Integer.parseInt(dataChunk[3]));
setTextColor(Integer.parseInt(dataChunk[4]));
setSelected(Boolean.parseBoolean(dataChunk[5]));
setWeightVisible(Boolean.parseBoolean(dataChunk[6]));
setFill(Boolean.parseBoolean(dataChunk[7]));
//vertices
setStartVertex(new Vertex().fromCSV(csvDataChunks[1]));
setEndVertex(new Vertex().fromCSV(csvDataChunks[2]));
return this;
}
@Override
public String toCSV() {
StringBuilder edgeCSVBuilder = new StringBuilder();
edgeCSVBuilder.append(this.textSize).append(",");
edgeCSVBuilder.append(this.weight).append(",");
edgeCSVBuilder.append(this.color.getRGB()).append(",");
edgeCSVBuilder.append(this.selectedColor.getRGB()).append(",");
edgeCSVBuilder.append(this.textColor.getRGB()).append(",");
edgeCSVBuilder.append(this.isSelected).append(",");
edgeCSVBuilder.append(this.isWeightVisible).append(",");
edgeCSVBuilder.append(this.shouldBeFilled);
return String.format("%s;%s;%s", edgeCSVBuilder.toString(), this.startVertex.toCSV(), this.endVertex.toCSV());
}
public static String composeKey(String firstPart, String secondPart) {
return (firstPart + "|" + secondPart);
}
public static String createKeyPart(Vertex startVertex, Vertex endVertex) {
return ("[{" + startVertex.getUniqueKey() + "},{" + endVertex.getUniqueKey() + "}]");
}
public Point getMidPoint() {
Point point = new Point(
((startVertex.getX() + endVertex.getX()) / 2),
((startVertex.getY() + endVertex.getY()) / 2)
);
return point;
}
public double getLength() {
return MathUtils.distance(startVertex.getX(), startVertex.getY(), endVertex.getX(), endVertex.getY());
}
/**
*
* <br>
* Finds the intersection points for the given Vertex with the current Edge.
* (If they indeed intersect with each other)
* <br>
*
* @param vertex
*
* @return an intersection Point(if the objects intersect), null(if they don't intersect with each other)
*
*/
public Point getIntersectionPoint(Vertex vertex) {
float dX;
float dY;
float a;
float b;
float c;
float determinant;
float t;
Point firstIntersectionPoint;
Point secondIntersectionPoint;
dX = (endVertex.getX() - startVertex.getX());
dY = (endVertex.getY() - startVertex.getY());
a = (dX * dX + dY * dY);
b = (2 * (dX * (startVertex.getX() - vertex.getX()) + dY * (startVertex.getY() - vertex.getY())));
c = ((startVertex.getX() - vertex.getX()) * (startVertex.getX() - vertex.getX()) + (startVertex.getY() - vertex.getY()) * (startVertex.getY() - vertex.getY()) - vertex.getSize() * vertex.getSize());
determinant = (b * b - 4 * a * c);
if((a <= 0.0000001) || (determinant < 0)) {
// No real solutions.
return null;
} else if (determinant == 0) {
// One solution.
t = (-b / (2 * a));
firstIntersectionPoint = new Point(
(int) (startVertex.getX() + t * dX),
(int) (startVertex.getY() + t * dY)
);
return firstIntersectionPoint;
} else {
// Two solutions.
t = (float) ((-b + Math.sqrt(determinant)) / (2 * a));
firstIntersectionPoint = new Point(
(int) (startVertex.getX() + t * dX),
(int) (startVertex.getY() + t * dY)
);
t = (float) ((-b - Math.sqrt(determinant)) / (2 * a));
secondIntersectionPoint = new Point(
(int) (startVertex.getX() + t * dX),
(int) (startVertex.getY() + t * dY)
);
return MathUtils.midPoint(firstIntersectionPoint, secondIntersectionPoint);
}
}
/**
*
* <br>
* Used to determine whether the current Edge can be considered as a "mostly" horizontal line.
* (The condition is satisfied if the angle created by the line(Edge) with the Ox axis
* is contained within the following angle range
* <br><strong>[-(PI / 4); +(PI / 4)] & [+(7 * PI / 4); +(9 * PI / 4)]</strong>)
* <br>
*
* @return
*
*/
public boolean isMostlyHorizontal() {
int deltaX = Math.abs(endVertex.getX() - startVertex.getX());
int deltaY = Math.abs(endVertex.getY() - startVertex.getY());
return (deltaY < (deltaX / 2));
}
public boolean isHorizontal() {
return (startVertex.getY() == endVertex.getY());
}
public boolean isMostlyVertical() {
int deltaX = Math.abs(endVertex.getX() - startVertex.getX());
int deltaY = Math.abs(endVertex.getY() - startVertex.getY());
return (deltaX < (deltaY / 2));
}
public boolean isVertical() {
return (startVertex.getX() == endVertex.getX());
}
public boolean isDiagonal() {
return (Math.abs(endVertex.getX() - startVertex.getX()) == Math.abs(endVertex.getY() - startVertex.getY()));
}
public boolean isMostlyDiagonal() {
int deltaX = Math.abs(endVertex.getX() - startVertex.getX());
int deltaY = Math.abs(endVertex.getY() - startVertex.getY());
return (((deltaY >= (deltaX / 2)) && (deltaY <= deltaX)) || ((deltaX >= (deltaY / 2)) && (deltaX <= deltaY)));
}
public boolean isContainedByTheCurrentCoordinateRange(Vertex vertex) {
if(isMostlyHorizontal()) {
return (vertex.getX() >= Math.min(startVertex.getX(), endVertex.getX())
&& vertex.getX() <= Math.max(startVertex.getX(), endVertex.getX()));
} else {
return (vertex.getY() >= Math.min(startVertex.getY(), endVertex.getY())
&& vertex.getY() <= Math.max(startVertex.getY(), endVertex.getY()));
}
}
@Override
public void draw(Graphics2D g) {
//drawing the edge
g.setColor(isSelected ? selectedColor : color);
g.drawLine(
startVertex.getX(),
startVertex.getY(),
endVertex.getX(),
endVertex.getY()
);
//drawing the vertices
startVertex.setColor(isSelected ? selectedColor : color);
startVertex.setTextColor(textColor);
startVertex.setTextSize(textSize);
startVertex.setFill(shouldBeFilled);
startVertex.draw(g);
endVertex.setColor(isSelected ? selectedColor : color);
endVertex.setTextColor(textColor);
endVertex.setTextSize(textSize);
endVertex.setFill(shouldBeFilled);
endVertex.draw(g);
//drawing the text which represents the approximate weight of the edge
//(will be drawn if the visibility of the weight is set to true)
if(!isWeightVisible) {
return;
}
Point midPoint = getMidPoint();
g.setColor(textColor);
g.setFont(new Font(Font.SERIF, Font.BOLD, textSize));
g.drawString(
("~" + Math.round(getLength())),
midPoint.x,
midPoint.y
);
}
}
| arthur3486/graphpathfindingalgorithmsvisualization | GraphPathfindingAlgorithmsVisualization/src/com/arthurivanets/graphalgorithmsvisualization/graph/model/Edge.java | Java | apache-2.0 | 13,266 |
package de.turban.deadlock.tracer.runtime.serdata;
import javax.annotation.Nullable;
public interface ISerializationSnapshotCreator {
@Nullable
ISerializableData createSerializationSnapshot(int revision);
}
| AndreasTu/jctrace | subprojects/jctrace-core/src/main/java/de/turban/deadlock/tracer/runtime/serdata/ISerializationSnapshotCreator.java | Java | apache-2.0 | 227 |
/**
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neo4art.domain;
import java.util.HashMap;
import java.util.Map;
import org.neo4art.graphdb.Neo4ArtLabel;
import org.neo4art.graphdb.Neo4ArtNode;
import org.neo4j.graphdb.Label;
/**
* @author Lorenzo Speranzoni
* @since 4 Apr 2015
*/
public class Country implements Neo4ArtNode
{
private static final Label[] LABELS = new Label[] { Neo4ArtLabel.Country };
private Long nodeId;
// private String micronation;
private String conventionalLongName;
private String nativeName;
private String commonName;
// private String status;
// private String imageFlag;
// private String altFlag;
// private String flagBorder;
// private String imageFlag2;
// private String altFlag2;
// private String flag2Border;
// private String imageCoat;
// private String altCoat;
// private String symbolType;
// private String nationalMotto;
// private String englishmotto;
// private String nationalAnthem;
// private String royalAnthem;
// private String otherSymbolType;
// private String otherSymbol;
// private String imageMap;
// private String loctext;
// private String altMap;
// private String mapCaption;
// private String imageMap2;
// private String altMap2;
// private String mapCaption2;
// private String capital;
// private String capital2;
private Coordinate coordinate;
// private String largestCity;
// private String largestSettlementType;
// private String largestSettlement;
// private String officialLanguages;
// private String nationalLanguages;
// private String regionalLanguages;
// private String languagesType;
// private String languages;
// private String languagesSub;
// private String languages2Type;
// private String languages2;
// private String languages2Sub;
// private String ethnicGroups;
// private String ethnicGroupsYear;
// private String nationalities;
// private String religion;
// private String demonym;
// private String governmentType;
// private String leaderTitle1;
// private String leaderName1;
// private String leaderTitle2;
// private String leaderName2;
// private String leaderTitle3;
// private String leaderName3;
// private String leaderTitle4;
// private String leaderName4;
// private String leaderTitle5;
// private String leaderName5;
// private String leaderTitle6;
// private String leaderName6;
// private String legislature;
// private String upperHouse;
// private String lowerHouse;
// private String sovereigntyType;
// private String sovereigntyNote;
// private String establishedEvent1;
// private String establishedDate1;
// private String establishedEvent2;
// private String establishedDate2;
// private String establishedEvent3;
// private String establishedDate3;
// private String establishedEvent4;
// private String establishedDate4;
// private String establishedEvent5;
// private String establishedDate5;
// private String establishedEvent6;
// private String establishedDate6;
// private String establishedEvent7;
// private String establishedDate7;
// private String establishedEvent8;
// private String establishedDate8;
// private String establishedEvent9;
// private String establishedDate9;
// private String areaRank;
// private String areaMagnitude;
// private String area;
// private String areaKm2;
// private String areaSqMi;
// private String areaFootnote;
// private String percentWater;
// private String areaLabel;
// private String areaLabel2;
// private String areaData2;
// private String populationEstimate;
// private String populationEstimateRank;
// private String populationEstimateYear;
// private String populationCensus;
// private String populationCensusYear;
// private String populationDensityKm2;
// private String populationDensitySqMi;
// private String populationDensityRank;
// private String nummembers;
// private String GDPPPP;
// private String GDPPPPRank;
// private String GDPPPPYear;
// private String GDPPPPPerCapita;
// private String GDPPPPPerCapitaRank;
// private String GDPNominal;
// private String GDPNominalRank;
// private String GDPNominalYear;
// private String GDPNominalPerCapita;
// private String GDPNominalPerCapitaRank;
// private String Gini;
// private String GiniRef;
// private String GiniRank;
// private String GiniYear;
// private String HDIYear;
// private String HDI;
// private String HDIChange;
// private String HDIRank;
// private String HDIRef;
// private String currency;
// private String currencyCode;
// private String timeZone;
// private String utcOffset;
// private String timeZoneDST;
// private String utcOffsetDST;
// private String DSTNote;
// private String antipodes;
// private String dateFormat;
// private String drivesOn;
// private String cctld;
// private String iso3166code;
// private String callingCode;
// private String imageMap3;
// private String altMap3;
// private String footnoteA;
// private String footnoteB;
// private String footnoteC;
// private String footnoteD;
// private String footnoteE;
// private String footnoteF;
// private String footnoteG;
// private String footnoteH;
// private String footnotes;
private String type;
public String getType()
{
return type;
}
public void setType(String type)
{
this.type = type;
}
public Country()
{
}
public String getConventionalLongName()
{
return conventionalLongName;
}
public void setConventionalLongName(String conventionalLongName)
{
this.conventionalLongName = conventionalLongName;
}
public String getNativeName()
{
return nativeName;
}
public void setNativeName(String nativeName)
{
this.nativeName = nativeName;
}
public String getCommonName()
{
return commonName;
}
public void setCommonName(String commonName)
{
this.commonName = commonName;
}
public Coordinate getCoordinate()
{
return coordinate;
}
public void setCoordinate(Coordinate coordinate)
{
this.coordinate = coordinate;
}
@Override
public Long getNodeId()
{
return this.nodeId;
}
@Override
public void setNodeId(long nodeId)
{
this.nodeId = nodeId;
}
@Override
public Map<String, Object> getProperties()
{
Map<String, Object> properties = new HashMap<String, Object>();
if (this.commonName != null)
{
properties.put("commonName", this.commonName);
}
return properties;
}
@Override
public Label[] getLabels()
{
return LABELS;
}
} | MZaratin-Larus/neo4art | neo4art-commons/src/main/java/org/neo4art/domain/Country.java | Java | apache-2.0 | 7,330 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package de.akquinet.gomobile.deployment.rp.autoconf;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.felix.metatype.AD;
import org.apache.felix.metatype.OCD;
import org.osgi.service.metatype.AttributeDefinition;
import org.osgi.service.metatype.ObjectClassDefinition;
public class ObjectClassDefinitionImpl implements ObjectClassDefinition {
private final OCD m_ocd;
public ObjectClassDefinitionImpl(OCD ocd) {
m_ocd = ocd;
}
public AttributeDefinition[] getAttributeDefinitions(int filter) {
if (m_ocd.getAttributeDefinitions() == null) {
return null;
}
if (filter != ObjectClassDefinition.OPTIONAL && filter != ObjectClassDefinition.REQUIRED && filter != ObjectClassDefinition.ALL) {
return null;
}
List result = new ArrayList();
for (Iterator i = m_ocd.getAttributeDefinitions().values().iterator(); i.hasNext();) {
AD ad = (AD) i.next();
if (filter != ObjectClassDefinition.ALL) {
if (ad.isRequired() && filter == ObjectClassDefinition.REQUIRED) {
result.add(new AttributeDefinitionImpl(ad));
}
else if (!ad.isRequired() && filter == ObjectClassDefinition.OPTIONAL) {
result.add(new AttributeDefinitionImpl(ad));
}
} else {
result.add(new AttributeDefinitionImpl(ad));
}
}
return (AttributeDefinition[]) result.toArray(new AttributeDefinition[result.size()]);
}
public InputStream getIcon(int size) throws IOException {
// TODO Auto-generated method stub
return null;
}
public String getDescription() {
return m_ocd.getDescription();
}
public String getID() {
return m_ocd.getID();
}
public String getName() {
return m_ocd.getName();
}
}
| akquinet/osgi-deployment-admin | autoconf-resource-processor/src/main/java/de/akquinet/gomobile/deployment/rp/autoconf/ObjectClassDefinitionImpl.java | Java | apache-2.0 | 2,686 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.relational;
import com.facebook.presto.common.function.OperatorType;
import com.facebook.presto.common.function.QualifiedFunctionName;
import com.facebook.presto.common.type.CharType;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.metadata.FunctionManager;
import com.facebook.presto.spi.function.FunctionHandle;
import com.facebook.presto.spi.function.StandardFunctionResolution;
import com.facebook.presto.sql.tree.ArithmeticBinaryExpression;
import com.facebook.presto.sql.tree.ComparisonExpression;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.Optional;
import static com.facebook.presto.common.function.OperatorType.ADD;
import static com.facebook.presto.common.function.OperatorType.BETWEEN;
import static com.facebook.presto.common.function.OperatorType.DIVIDE;
import static com.facebook.presto.common.function.OperatorType.EQUAL;
import static com.facebook.presto.common.function.OperatorType.GREATER_THAN;
import static com.facebook.presto.common.function.OperatorType.GREATER_THAN_OR_EQUAL;
import static com.facebook.presto.common.function.OperatorType.IS_DISTINCT_FROM;
import static com.facebook.presto.common.function.OperatorType.LESS_THAN;
import static com.facebook.presto.common.function.OperatorType.LESS_THAN_OR_EQUAL;
import static com.facebook.presto.common.function.OperatorType.MODULUS;
import static com.facebook.presto.common.function.OperatorType.MULTIPLY;
import static com.facebook.presto.common.function.OperatorType.NEGATION;
import static com.facebook.presto.common.function.OperatorType.NOT_EQUAL;
import static com.facebook.presto.common.function.OperatorType.SUBSCRIPT;
import static com.facebook.presto.common.function.OperatorType.SUBTRACT;
import static com.facebook.presto.common.type.BooleanType.BOOLEAN;
import static com.facebook.presto.common.type.VarcharType.VARCHAR;
import static com.facebook.presto.metadata.BuiltInFunctionNamespaceManager.DEFAULT_NAMESPACE;
import static com.facebook.presto.sql.analyzer.TypeSignatureProvider.fromTypes;
import static com.facebook.presto.sql.tree.ArrayConstructor.ARRAY_CONSTRUCTOR;
import static com.facebook.presto.type.LikePatternType.LIKE_PATTERN;
import static com.google.common.base.Preconditions.checkArgument;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
public final class FunctionResolution
implements StandardFunctionResolution
{
private final FunctionManager functionManager;
public FunctionResolution(FunctionManager functionManager)
{
this.functionManager = requireNonNull(functionManager, "functionManager is null");
}
@Override
public FunctionHandle notFunction()
{
return functionManager.lookupFunction("not", fromTypes(BOOLEAN));
}
public boolean isNotFunction(FunctionHandle functionHandle)
{
return notFunction().equals(functionHandle);
}
@Override
public FunctionHandle likeVarcharFunction()
{
return functionManager.lookupFunction("LIKE", fromTypes(VARCHAR, LIKE_PATTERN));
}
@Override
public FunctionHandle likeCharFunction(Type valueType)
{
checkArgument(valueType instanceof CharType, "Expected CHAR value type");
return functionManager.lookupFunction("LIKE", fromTypes(valueType, LIKE_PATTERN));
}
public boolean isLikeFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getName().equals(QualifiedFunctionName.of(DEFAULT_NAMESPACE, "LIKE"));
}
public FunctionHandle likePatternFunction()
{
return functionManager.lookupFunction("LIKE_PATTERN", fromTypes(VARCHAR, VARCHAR));
}
@Override
public boolean isCastFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getOperatorType().equals(Optional.of(OperatorType.CAST));
}
public boolean isArrayConstructor(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getName().equals(QualifiedFunctionName.of(DEFAULT_NAMESPACE, ARRAY_CONSTRUCTOR));
}
@Override
public FunctionHandle betweenFunction(Type valueType, Type lowerBoundType, Type upperBoundType)
{
return functionManager.lookupFunction(BETWEEN.getFunctionName().getFunctionName(), fromTypes(valueType, lowerBoundType, upperBoundType));
}
@Override
public boolean isBetweenFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getOperatorType().equals(Optional.of(BETWEEN));
}
@Override
public FunctionHandle arithmeticFunction(OperatorType operator, Type leftType, Type rightType)
{
checkArgument(operator.isArithmeticOperator(), format("unexpected arithmetic type %s", operator));
return functionManager.resolveOperator(operator, fromTypes(leftType, rightType));
}
public FunctionHandle arithmeticFunction(ArithmeticBinaryExpression.Operator operator, Type leftType, Type rightType)
{
OperatorType operatorType;
switch (operator) {
case ADD:
operatorType = ADD;
break;
case SUBTRACT:
operatorType = SUBTRACT;
break;
case MULTIPLY:
operatorType = MULTIPLY;
break;
case DIVIDE:
operatorType = DIVIDE;
break;
case MODULUS:
operatorType = MODULUS;
break;
default:
throw new IllegalStateException("Unknown arithmetic operator: " + operator);
}
return arithmeticFunction(operatorType, leftType, rightType);
}
@Override
public boolean isArithmeticFunction(FunctionHandle functionHandle)
{
Optional<OperatorType> operatorType = functionManager.getFunctionMetadata(functionHandle).getOperatorType();
return operatorType.isPresent() && operatorType.get().isArithmeticOperator();
}
@Override
public FunctionHandle negateFunction(Type type)
{
return functionManager.lookupFunction(NEGATION.getFunctionName().getFunctionName(), fromTypes(type));
}
@Override
public boolean isNegateFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getOperatorType().equals(Optional.of(NEGATION));
}
@Override
public FunctionHandle arrayConstructor(List<? extends Type> argumentTypes)
{
return functionManager.lookupFunction(ARRAY_CONSTRUCTOR, fromTypes(argumentTypes));
}
@Override
public FunctionHandle comparisonFunction(OperatorType operator, Type leftType, Type rightType)
{
checkArgument(operator.isComparisonOperator(), format("unexpected comparison type %s", operator));
return functionManager.resolveOperator(operator, fromTypes(leftType, rightType));
}
public FunctionHandle comparisonFunction(ComparisonExpression.Operator operator, Type leftType, Type rightType)
{
OperatorType operatorType;
switch (operator) {
case EQUAL:
operatorType = EQUAL;
break;
case NOT_EQUAL:
operatorType = NOT_EQUAL;
break;
case LESS_THAN:
operatorType = LESS_THAN;
break;
case LESS_THAN_OR_EQUAL:
operatorType = LESS_THAN_OR_EQUAL;
break;
case GREATER_THAN:
operatorType = GREATER_THAN;
break;
case GREATER_THAN_OR_EQUAL:
operatorType = GREATER_THAN_OR_EQUAL;
break;
case IS_DISTINCT_FROM:
operatorType = IS_DISTINCT_FROM;
break;
default:
throw new IllegalStateException("Unsupported comparison operator type: " + operator);
}
return comparisonFunction(operatorType, leftType, rightType);
}
@Override
public boolean isComparisonFunction(FunctionHandle functionHandle)
{
Optional<OperatorType> operatorType = functionManager.getFunctionMetadata(functionHandle).getOperatorType();
return operatorType.isPresent() && operatorType.get().isComparisonOperator();
}
@Override
public FunctionHandle subscriptFunction(Type baseType, Type indexType)
{
return functionManager.lookupFunction(SUBSCRIPT.getFunctionName().getFunctionName(), fromTypes(baseType, indexType));
}
@Override
public boolean isSubscriptFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getOperatorType().equals(Optional.of(SUBSCRIPT));
}
public FunctionHandle tryFunction(Type returnType)
{
return functionManager.lookupFunction("$internal$try", fromTypes(returnType));
}
public boolean isTryFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getName().equals("$internal$try");
}
public boolean isFailFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getName().equals(QualifiedFunctionName.of(DEFAULT_NAMESPACE, "fail"));
}
@Override
public boolean isCountFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getName().equals(QualifiedFunctionName.of(DEFAULT_NAMESPACE, "count"));
}
@Override
public FunctionHandle countFunction()
{
return functionManager.lookupFunction("count", ImmutableList.of());
}
@Override
public FunctionHandle countFunction(Type valueType)
{
return functionManager.lookupFunction("count", fromTypes(valueType));
}
@Override
public boolean isMaxFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getName().equals(QualifiedFunctionName.of(DEFAULT_NAMESPACE, "max"));
}
@Override
public FunctionHandle maxFunction(Type valueType)
{
return functionManager.lookupFunction("max", fromTypes(valueType));
}
@Override
public boolean isMinFunction(FunctionHandle functionHandle)
{
return functionManager.getFunctionMetadata(functionHandle).getName().equals(QualifiedFunctionName.of(DEFAULT_NAMESPACE, "min"));
}
@Override
public FunctionHandle minFunction(Type valueType)
{
return functionManager.lookupFunction("min", fromTypes(valueType));
}
}
| twitter-forks/presto | presto-main/src/main/java/com/facebook/presto/sql/relational/FunctionResolution.java | Java | apache-2.0 | 11,331 |
package io.github.pascalgrimaud.qualitoast.service;
import io.github.pascalgrimaud.qualitoast.config.audit.AuditEventConverter;
import io.github.pascalgrimaud.qualitoast.repository.PersistenceAuditEventRepository;
import org.springframework.boot.actuate.audit.AuditEvent;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.Instant;
import java.util.Optional;
/**
* Service for managing audit events.
* <p>
* This is the default implementation to support SpringBoot Actuator AuditEventRepository
*/
@Service
@Transactional
public class AuditEventService {
private final PersistenceAuditEventRepository persistenceAuditEventRepository;
private final AuditEventConverter auditEventConverter;
public AuditEventService(
PersistenceAuditEventRepository persistenceAuditEventRepository,
AuditEventConverter auditEventConverter) {
this.persistenceAuditEventRepository = persistenceAuditEventRepository;
this.auditEventConverter = auditEventConverter;
}
public Page<AuditEvent> findAll(Pageable pageable) {
return persistenceAuditEventRepository.findAll(pageable)
.map(auditEventConverter::convertToAuditEvent);
}
public Page<AuditEvent> findByDates(Instant fromDate, Instant toDate, Pageable pageable) {
return persistenceAuditEventRepository.findAllByAuditEventDateBetween(fromDate, toDate, pageable)
.map(auditEventConverter::convertToAuditEvent);
}
public Page<AuditEvent> findByDatesDesc(Instant fromDate, Instant toDate, Pageable pageable) {
return persistenceAuditEventRepository.findAllByAuditEventDateBetweenOrderByAuditEventDateDesc(fromDate, toDate, pageable)
.map(auditEventConverter::convertToAuditEvent);
}
public Optional<AuditEvent> find(Long id) {
return Optional.ofNullable(persistenceAuditEventRepository.findOne(id)).map
(auditEventConverter::convertToAuditEvent);
}
}
| pascalgrimaud/qualitoast | src/main/java/io/github/pascalgrimaud/qualitoast/service/AuditEventService.java | Java | apache-2.0 | 2,127 |
package com.sequenceiq.periscope.monitor.event;
import org.springframework.context.ApplicationEvent;
import com.sequenceiq.periscope.domain.BaseAlert;
public class ScalingEvent extends ApplicationEvent {
public ScalingEvent(BaseAlert alert) {
super(alert);
}
public BaseAlert getAlert() {
return (BaseAlert) getSource();
}
}
| sequenceiq/cloudbreak | autoscale/src/main/java/com/sequenceiq/periscope/monitor/event/ScalingEvent.java | Java | apache-2.0 | 363 |
package com.ts.entity.pdss.pdss.RSBeans;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlElement;
import com.ts.entity.pdss.pdss.RSBeans.DrugUserAuth.TDrugUserAuthResult;
import com.ts.entity.pdss.pdss.RSBeans.ias.TAntiDrugCheckResult;
public class TCheckResult
{
private String enter = "#A#D";
public String getCheckResult()
{
String checkResult = "";
checkResult += "给药途径审查结果 -红灯:" + admRedCount + ", 黄灯:" + admYellowCount + enter;
checkResult += "过敏审查结果 -红灯:" + dagRedCount + ", 黄灯:" + dagYellowCount + enter;
checkResult += "禁忌审查结果 -红灯:" + ddiRedCount + ", 黄灯:" + ddiYellowCount + enter;
checkResult += "剂量审查结果 -红灯:" + ddgRedCount + ", 黄灯:" + ddgYellowCount + enter;
checkResult += "重复成份审查结果 -红灯:" + didRedCount + ", 黄灯:" + didYellowCount + enter;
checkResult += "相互作用审查结果 -红灯:" + diaRedCount + ", 黄灯:" + diaYellowCount + enter;
checkResult += "配伍审查结果 -红灯:" + dieRedCount + ", 黄灯:" + dieYellowCount + enter;
checkResult += "特殊人群审查结果 -红灯:" + dspRedCount + ", 黄灯:" + dspYellowCount + enter;
checkResult += "不良反应审查结果 -红灯:" + dhfRedCount + ", 黄灯:" + dhfYellowCount + enter;
return checkResult;
}
public String getAlertLevel()
{
if ((diaRedCount + dieRedCount + admRedCount + dagRedCount + ddgRedCount +
ddiRedCount + dhfRedCount + didRedCount + dspRedCount) > 0)
return "R";
else
if ((diaYellowCount + dieYellowCount + admYellowCount + dagYellowCount + ddgYellowCount +
ddiYellowCount + dhfYellowCount + didYellowCount + dspYellowCount) > 0)
return "Y";
else
return "-";
}
//药物给药途径审查结果对象
private List<TAdministrationRslt> admList = new ArrayList<TAdministrationRslt>();
public void addAdminRslt(TAdministrationRslt adminr)
{
// TODO: 统计红黄灯数量
admList.add(adminr);
if("R".equals(adminr.alertLevel))
this.admRedCount++;
else if("Y".equals(adminr.alertLevel))
this.admYellowCount++;
}
private int admRedCount = 0;
private int admYellowCount = 0;
public void CopyADMRsltTo(TCheckResult cr)
{
cr.admList = this.admList;
cr.admRedCount = this.admRedCount;
cr.admYellowCount = this.admYellowCount;
}
/**
* 药物给药途径审查结果对象
* @return
*/
@XmlElement(name="getAdministrationRslt")
public TAdministrationRslt[] getAdministrationRslt()
{
return admList.toArray(new TAdministrationRslt[0]);
}
//药物过敏审查结果对象
private List<TDrugAllergenRslt> dagList = new ArrayList<TDrugAllergenRslt>();
public void addDrugAllergenRslt(TDrugAllergenRslt dag)
{
// TODO: 统计红黄灯数量
dagList.add(dag);
if("R".equals(dag.alertLevel))
this.dagRedCount++;
else if("Y".equals(dag.alertLevel))
this.dagYellowCount++;
}
private int dagRedCount = 0;
private int dagYellowCount = 0;
public void CopyDAGRsltTo(TCheckResult cr)
{
cr.dagList = this.dagList;
cr.dagRedCount = this.dagRedCount;
cr.dagYellowCount = this.dagYellowCount;
}
/**
* 药物过敏审查结果对象
* @return
*/
@XmlElement(name="getDrugAllergenRslt")
public TDrugAllergenRslt[] getDrugAllergenRslt()
{
return dagList.toArray(new TDrugAllergenRslt[0]);
}
//药品禁忌审查结果对象
private List<TDrugDiagRslt> ddiList = new ArrayList<TDrugDiagRslt>();
public void addDrugDiagRslt(TDrugDiagRslt ddi)
{
// TODO: 统计红黄灯数量
ddiList.add(ddi);
if("R".equals(ddi.alertLevel))
this.ddiRedCount++;
else if("Y".equals(ddi.alertLevel))
this.ddiYellowCount++;
}
private int ddiRedCount = 0;
private int ddiYellowCount = 0;
public void CopyDDIRsltTo(TCheckResult cr)
{
cr.ddiList = this.ddiList;
cr.ddiRedCount = this.ddiRedCount;
cr.ddiYellowCount = this.ddiYellowCount;
}
/**
* 药品禁忌审查结果对象
* @return
*/
@XmlElement(name="getDrugDiagRslt")
public TDrugDiagRslt[] getDrugDiagRslt()
{
return ddiList.toArray(new TDrugDiagRslt[0]);
}
//药物剂量审查结果对象
private List<TDrugDosageRslt> ddgList = new ArrayList<TDrugDosageRslt>();
public void addDrugDosageRslt(TDrugDosageRslt ddg)
{
// TODO: 统计红黄灯数量
ddgList.add(ddg);
if("R".equals(ddg.alertLevel))
this.ddgRedCount++;
else if("Y".equals(ddg.alertLevel))
this.ddgYellowCount++;
}
private int ddgRedCount = 0;
private int ddgYellowCount = 0;
public void CopyDDGRsltTo(TCheckResult cr)
{
cr.ddgList = this.ddgList;
cr.ddgRedCount = this.ddgRedCount;
cr.ddgYellowCount = this.ddgYellowCount ;
}
/**
* 药物剂量审查结果对象
* @return
*/
@XmlElement(name="getDrugDosageRslt")
public TDrugDosageRslt[] getDrugDosageRslt()
{
return ddgList.toArray(new TDrugDosageRslt[0]);
}
//药物重复成份审查结果对象
private List<TDrugIngredientRslt> didList = new ArrayList<TDrugIngredientRslt>();
public void addDrugIngredientRslt(TDrugIngredientRslt did)
{
// TODO: 统计红黄灯数量
didList.add(did);
if("R".equals(did.alertLevel))
this.didRedCount++;
else if("Y".equals(did.alertLevel))
this.didYellowCount++;
}
private int didRedCount = 0;
private int didYellowCount = 0;
public void CopyDIDRsltTo(TCheckResult cr)
{
cr.didList = this.didList;
cr.didRedCount = this.didRedCount;
cr.didYellowCount = this.didYellowCount;
}
/**
* 药物重复成份审查结果对象
* @return
*/
@XmlElement(name="getDrugIngredientRslt")
public TDrugIngredientRslt[] getDrugIngredientRslt()
{
return didList.toArray(new TDrugIngredientRslt[0]);
}
//药品相互作用审查结果对象
private List<TDrugInteractionRslt> diaList = new ArrayList<TDrugInteractionRslt>();
public void addInteractionRslt(TDrugInteractionRslt dir)
{
if (dir.getAlertLevel().equals("R"))
diaRedCount++;
else
if (dir.getAlertLevel().equals("Y"))
diaYellowCount++;
diaList.add(dir);
}
public void CopyIARsltTo(TCheckResult cr)
{
cr.diaList = this.diaList;
cr.diaRedCount = this.diaRedCount;
cr.diaYellowCount = this.diaYellowCount;
}
/**
* 药品相互作用审查结果对象
* @return
*/
@XmlElement(name="getDrugInteractionRslt")
public TDrugInteractionRslt[] getDrugInteractionRslt()
{
return diaList.toArray(new TDrugInteractionRslt[0]);
}
private int diaRedCount = 0;
private int diaYellowCount = 0;
@XmlElement(name="getDiaRedCount")
public int getDiaRedCount()
{
return diaRedCount;
}
@XmlElement(name="getDiaYellowCount")
public int getDiaYellowCount()
{
return diaYellowCount;
}
//药品配伍审查结果对象
private List<TDrugIvEffectRslt> dieList = new ArrayList<TDrugIvEffectRslt>();
public void addDrugIvEffectRslt(TDrugIvEffectRslt die)
{
// TODO: 统计红黄灯数量
dieList.add(die);
if("R".equals(die.alertLevel))
this.dieRedCount++;
else if("Y".equals(die.alertLevel))
this.dieYellowCount++;
}
public void CopyIERsltTo(TCheckResult cr)
{
cr.dieList = this.dieList;
cr.dieRedCount = this.dieRedCount ;
cr.dieYellowCount = this.dieYellowCount ;
}
private int dieRedCount = 0;
private int dieYellowCount = 0;
/**
* 药品配伍审查结果对象
* @return
*/
@XmlElement(name="getDrugIvEffectRslt")
public TDrugIvEffectRslt[] getDrugIvEffectRslt()
{
return dieList.toArray(new TDrugIvEffectRslt[0]);
}
//特殊人群审查结果对象
private List<TDrugSpecPeopleRslt> dspList = new ArrayList<TDrugSpecPeopleRslt>();
public void addDrugSpecPeopleRslt(TDrugSpecPeopleRslt dsp)
{
// TODO: 统计红黄灯数量
dspList.add(dsp);
if("R".equals(dsp.alertLevel))
this.dspRedCount++;
else if("Y".equals(dsp.alertLevel))
this.dspYellowCount++;
}
private int dspRedCount = 0;
private int dspYellowCount = 0;
public void CopyDSPRsltTo(TCheckResult cr)
{
cr.dspList = this.dspList;
cr.dspRedCount = this.dspRedCount ;
cr.dspYellowCount = this.dspYellowCount ;
}
/**
* 特殊人群审查结果对象
* @return
*/
@XmlElement(name="getDrugSpecPeopleRslt")
public TDrugSpecPeopleRslt[] getDrugSpecPeopleRslt()
{
return dspList.toArray(new TDrugSpecPeopleRslt[0]);
}
//不良反应审查结果对象
private List<TDrugHarmfulRslt> dhfList = new ArrayList<TDrugHarmfulRslt>();
public void addDrugHarmfulRslt(TDrugHarmfulRslt dhf)
{
// TODO: 统计红黄灯数量
dhfList.add(dhf);
if("R".equals(dhf.alertLevel))
this.dhfRedCount++;
else if("Y".equals(dhf.alertLevel))
this.dhfYellowCount++;
}
private int dhfRedCount = 0;
private int dhfYellowCount = 0;
public void CopyDHRsltTo(TCheckResult cr)
{
cr.dhfList = this.dhfList;
cr.dhfRedCount = this.dhfRedCount;
cr.dhfYellowCount = this.dhfYellowCount ;
}
/**
* 不良反应审查结果对象
* @return
*/
@XmlElement(name="getSideRslt")
public TDrugHarmfulRslt[] getSideRslt()
{
return dhfList.toArray(new TDrugHarmfulRslt[0]);
}
// 抗菌药物
private List<TAntiDrugCheckResult> adcrList = new ArrayList<TAntiDrugCheckResult>();
public void addTAntiDrugCheckResult(TAntiDrugCheckResult adcr)
{
this.adcrList.add(adcr);
if("R".equals(adcr.alertLevel))
this.adcrRedCount++;
else if("Y".equals(adcr.alertLevel))
this.adcrYellowCount++;
}
private int adcrRedCount = 0;
private int adcrYellowCount = 0;
public void CopyADCRRsltTo(TCheckResult cr)
{
cr.adcrList = this.adcrList;
cr.adcrRedCount = this.adcrRedCount;
cr.adcrYellowCount = this.adcrYellowCount ;
}
/**
* 抗菌药物 审查结果对象
* @return
*/
@XmlElement(name="getAdcrRslt")
public TAntiDrugCheckResult[] getAdcrRslt()
{
return adcrList.toArray(new TAntiDrugCheckResult[0]);
}
private List<TDrugUserAuthResult> duAuthList = new ArrayList<TDrugUserAuthResult>();
public void addDrugUserAuthResult(TDrugUserAuthResult dua)
{
this.duAuthList.add(dua);
if("R".equals(dua.alertLevel))
this.duAuthRedCount++;
else if("Y".equals(dua.alertLevel))
this.duAuthYellowCount++;
}
private int duAuthRedCount = 0;
private int duAuthYellowCount = 0;
public void CopyDuAuthRsltTo(TCheckResult cr)
{
cr.duAuthList = this.duAuthList;
cr.duAuthRedCount = this.duAuthRedCount;
cr.duAuthYellowCount = this.duAuthYellowCount ;
}
/**
* 药物授权控制 审核结果对象
* @return
*/
@XmlElement(name="getDuAuthRslt")
public TDrugUserAuthResult[] getDuAuthRslt()
{
return duAuthList.toArray(new TDrugUserAuthResult[0]);
}
//医保审查
private List<TMedicareRslt> mcareList = new ArrayList<TMedicareRslt>();
public void addMedicareRslt(TMedicareRslt mcare)
{
mcareList.add(mcare);
}
public void CopyMCRsltTo(TCheckResult cr)
{
cr.mcareList = this.mcareList;
}
/**
* 医保审查
* @return
*/
@XmlElement(name="getMedicareRslt")
public TMedicareRslt[] getMedicareRslt()
{
return (TMedicareRslt[])mcareList.toArray(new TMedicareRslt[0]);
}
@XmlElement(name="getDuAuthRedCount")
public int getDuAuthRedCount()
{
return duAuthRedCount;
}
@XmlElement(name="getDuAuthYellowCount")
public int getDuAuthYellowCount()
{
return duAuthYellowCount;
}
@XmlElement(name="getAdcrRedCount")
public int getAdcrRedCount()
{
return adcrRedCount;
}
@XmlElement(name="getAdcrYellowCount")
public int getAdcrYellowCount()
{
return adcrYellowCount;
}
@XmlElement(name="getAdmRedCount")
public int getAdmRedCount() {
return admRedCount;
}
@XmlElement(name="getAdmYellowCount")
public int getAdmYellowCount() {
return admYellowCount;
}
@XmlElement(name="getDagRedCount")
public int getDagRedCount() {
return dagRedCount;
}
@XmlElement(name="getDagYellowCount")
public int getDagYellowCount() {
return dagYellowCount;
}
@XmlElement(name="getDdiRedCount")
public int getDdiRedCount() {
return ddiRedCount;
}
@XmlElement(name="getDdiYellowCount")
public int getDdiYellowCount() {
return ddiYellowCount;
}
@XmlElement(name="getDdgRedCount")
public int getDdgRedCount() {
return ddgRedCount;
}
@XmlElement(name="getDdgYellowCount")
public int getDdgYellowCount() {
return ddgYellowCount;
}
@XmlElement(name="getDidRedCount")
public int getDidRedCount() {
return didRedCount;
}
@XmlElement(name="getDidYellowCount")
public int getDidYellowCount() {
return didYellowCount;
}
@XmlElement(name="getDieRedCount")
public int getDieRedCount() {
return dieRedCount;
}
@XmlElement(name="getDieYellowCount")
public int getDieYellowCount() {
return dieYellowCount;
}
@XmlElement(name="getDspRedCount")
public int getDspRedCount() {
return dspRedCount;
}
@XmlElement(name="getDspYellowCount")
public int getDspYellowCount() {
return dspYellowCount;
}
@XmlElement(name="getDhfRedCount")
public int getDhfRedCount() {
return dhfRedCount;
}
@XmlElement(name="getDhfYellowCount")
public int getDhfYellowCount() {
return dhfYellowCount;
}
}
| ljcservice/autumnprogram | src/main/java/com/ts/entity/pdss/pdss/RSBeans/TCheckResult.java | Java | apache-2.0 | 14,045 |
/*
* Copyright 2003 - 2016 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.efaps.ci;
import org.efaps.util.cache.CacheReloadException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Each class that extends this abstract class represents a configuration
* item for a form from eFaps. It is used to have easy access during the
* coding of esjp etc. to the configuration items without having the problem
* of using strings to access them. The classes are created automatically
* with a maven target.
*
* @author The eFaps Team
*
*/
//CHECKSTYLE:OFF
public abstract class CIForm
extends CICollection
{
//CHECKSTYLE:ON
/**
* Logging instance used in this class.
*/
private static final Logger LOG = LoggerFactory.getLogger(CIObject.class);
/**
* Constructor setting the uuid.
* @param _uuid UUID of this type
*/
protected CIForm(final String _uuid)
{
super(_uuid);
}
/**
* Get the type this Configuration item represents.
* @return Form
*/
public org.efaps.admin.ui.Form getType()
{
org.efaps.admin.ui.Form ret = null;
try {
ret = org.efaps.admin.ui.Form.get(this.uuid);
} catch (final CacheReloadException e) {
CIForm.LOG.error("Error on retrieving Type for CIType with uuid: {}", this.uuid);
}
return ret;
}
}
| eFaps/eFaps-Kernel | src/main/java/org/efaps/ci/CIForm.java | Java | apache-2.0 | 1,941 |
package org.openqa.grid.selenium.utils;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.InvalidParameterException;
import java.util.ArrayList;
import java.util.List;
import org.openqa.grid.common.exception.GridException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.net.NetworkUtils;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.server.RemoteControlConfiguration;
import org.openqa.selenium.server.cli.RemoteControlLauncher;
public class GridConfiguration {
private GridRole role = GridRole.NOT_GRID;
private int timeout = 30;
private int maxConcurrent = 5;
private URL registrationURL;
private int port = 4444;
private String host;
private boolean throwOnCapabilityNotPresent = true;
private String[] seleniumServerargs = new String[0];
private RemoteControlConfiguration nodeConfig = new RemoteControlConfiguration();
private NetworkUtils networkUtils = new NetworkUtils();
private List<String> servlets = new ArrayList<String>();
private List<DesiredCapabilities> capabilities = new ArrayList<DesiredCapabilities>();
private String file;
public static GridConfiguration parse(String[] args) {
List<String> leftOver = new ArrayList<String>();
GridConfiguration config = new GridConfiguration();
for (int i = 0; i < args.length; i++) {
String arg = args[i];
if ("-role".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
if ("hub".equalsIgnoreCase(v)) {
config.setRole(GridRole.HUB);
} else if ("remotecontrol".equalsIgnoreCase(v) || "remote-control".equalsIgnoreCase(v) || "rc".equalsIgnoreCase(v)) {
config.setRole(GridRole.REMOTE_CONTROL);
} else if ("webdriver".equalsIgnoreCase(v) || "wd".equalsIgnoreCase(v)) {
config.setRole(GridRole.WEBDRIVER);
} else {
config.setRole(GridRole.NOT_GRID);
printHelpAndDie("wrong role");
}
} else if ("-hub".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
try {
config.setRegistrationURL(new URL(v));
} catch (MalformedURLException e) {
printHelpAndDie("invalid url : " + v);
}
} else if ("-port".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
config.setPort(Integer.parseInt(v));
// -port is common for Grid and SeleniumServer
leftOver.add(arg);
leftOver.add(v);
} else if ("-host".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
config.setHost(v);
} else if ("-nodeTimeout".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
config.setNodeTimeoutInSec(Integer.parseInt(v));
} else if ("-maxConcurrent".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
config.setMaxConcurrentTests(Integer.parseInt(v));
} else if ("-browser".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
config.addCapabilityFromString(v);
} else if ("-servlet".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
config.addServlet(v);
} else if ("-throwCapabilityNotPresent".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
config.setThrowOnCapabilityNotPresent(Boolean.parseBoolean(v));
} else if ("-file".equalsIgnoreCase(arg)) {
i++;
String v = getArgValue(args, i);
config.setFile(v);
} else {
leftOver.add(arg);
}
}
config.setSeleniumServerArgs(leftOver);
try {
config.validate();
} catch (InvalidParameterException e) {
printHelpAndDie(e.getMessage());
}
return config;
}
private void setFile(String v) {
this.file = v;
}
public boolean isThrowOnCapabilityNotPresent() {
return throwOnCapabilityNotPresent;
}
public void setThrowOnCapabilityNotPresent(boolean throwOnCapabilityNotPresent) {
this.throwOnCapabilityNotPresent = throwOnCapabilityNotPresent;
}
/**
* To get the list of extra servlet the hub should register.
*
* @return
*/
public List<String> getServlets() {
return servlets;
}
private void addServlet(String v) {
servlets.add(v);
}
public List<DesiredCapabilities> getCapabilities() {
return capabilities;
}
private void addCapabilityFromString(String capability) {
String[] s = capability.split(",");
if (s.length == 0) {
throw new InvalidParameterException("-browser must be followed by a browser description");
}
DesiredCapabilities res = new DesiredCapabilities();
for (int i = 0; i < s.length; i++) {
if (s[i].split("=").length != 2) {
throw new InvalidParameterException("-browser format is key1=value1,key2=value2 " + s[i] + " deosn't follow that format.");
}
String key = s[i].split("=")[0];
String value = s[i].split("=")[1];
res.setCapability(key, value);
}
if (res.getBrowserName() == null){
throw new GridException("You need to specify a browserName using browserName=XXX");
}
capabilities.add(res);
}
/**
* returns the value of the argument indexed i.
*
* @param args
* @param i
* @return
*/
private static String getArgValue(String[] args, int i) {
if (i >= args.length) {
printHelpAndDie("expected a value after " + args[i]);
}
return args[i];
}
private static void printHelpAndDie(String msg) {
String INDENT = " ";
RemoteControlLauncher.printWrappedErrorLine("", "Error with the parameters :" + msg);
RemoteControlLauncher.printWrappedErrorLine("", "To use as a grid, specify a role and its arguments.");
RemoteControlLauncher
.printWrappedErrorLine(
INDENT,
"-role <hub|remotecontrol|webdriver> (default is no grid -- just run an RC server). When launching a node for webdriver"
+ " or remotecontrol, the parameters will be forwarded to the server on the node, so you can use something like -role remotecontrol -trustAllSSLCertificates."
+ " In that case, the SeleniumServer will be launch with the trustallCertificats option.");
RemoteControlLauncher.printWrappedErrorLine(INDENT,
"-hub <http://localhost:4444/grid/register> : the url that will be used to post the registration request.");
RemoteControlLauncher.printWrappedErrorLine(INDENT,
"-host <IP | hostname> : usually not needed and determined automatically. For exotic network configuration, network with VPN, "
+ "specifying the host might be necessary.");
RemoteControlLauncher.printWrappedErrorLine(INDENT, "-port <xxxx> : the port the remote/hub will listen on.Default to 4444.");
RemoteControlLauncher.printWrappedErrorLine(INDENT,
"-nodeTimeout <xxxx> : the timeout in seconds before the hub automatically releases a node that hasn't received any requests for more than XX sec."
+ " The browser will be released for another test to use.This tupically takes care of the client crashes.");
RemoteControlLauncher
.printWrappedErrorLine(
INDENT,
"-maxConcurrent <x> : Defaults to 5. The maximum number of tests that can run at the same time on the node. "
+ "Different from the supported browsers.For a node that supports firefox 3.6, firefox 4.0 and IE8 for instance,maxConccurent=1 "
+ "will ensure that you never have more than 1 browserrunning. With maxConcurrent=2 you can have 2 firefox tests at the same time, or 1 IE and 1 FF. ");
RemoteControlLauncher
.printWrappedErrorLine(INDENT,
"-servlet <com.mycompany.MyServlet> to register a new servlet on the hub. The servlet will accessible under the path /grid/admin/MyServlet");
RemoteControlLauncher
.printWrappedErrorLine(
INDENT,
"-throwCapabilityNotPresent <true | false> default to true. If true, the hub will reject test request right away if no proxy is currently registered that can host that capability.");
// -browser
// browserName=firefox,version=3.6,firefox_binary=/Users/freynaud
System.exit(-1);
}
public String getHost() {
if (host == null) {
host = networkUtils.getIp4NonLoopbackAddressOfThisMachine().getHostAddress();
}
return host;
}
public void setHost(String host) {
this.host = host;
}
public URL getRegistrationURL() {
return registrationURL;
}
public void setRegistrationURL(URL registrationURL) {
this.registrationURL = registrationURL;
}
public GridRole getRole() {
return role;
}
public int getPort() {
return port;
}
public void setRole(GridRole role) {
this.role = role;
}
public void setPort(int port) {
this.port = port;
getNodeRemoteControlConfiguration().setPort(port);
}
/**
* Validate the current config
*
* @throws InvalidParameterException
* if the CLA are wrong
*/
public void validate() {
if (role == GridRole.WEBDRIVER || role == GridRole.REMOTE_CONTROL) {
if (registrationURL == null) {
throw new InvalidParameterException("registration url cannot be null");
}
// TODO freyanud : validation should also check that the selenium server
// param passed to the node do not contain anything that doesn't make
// sense in a grid environement.For instance launching a node with
// -interactive.
if (getNodeRemoteControlConfiguration().isInteractive() == true) {
throw new InvalidParameterException("no point launching the node in interactive mode");
}
}
}
public void setSeleniumServerArgs(List<String> leftOver) {
seleniumServerargs = leftOver.toArray(new String[leftOver.size()]);
nodeConfig = RemoteControlLauncher.parseLauncherOptions(seleniumServerargs);
}
public RemoteControlConfiguration getNodeRemoteControlConfiguration() {
return nodeConfig;
}
public void setNodeTimeoutInSec(int sec) {
this.timeout = sec;
}
public int getNodeTimeoutInSec() {
return timeout;
}
public int getMaxConcurrentTests() {
return maxConcurrent;
}
public void setMaxConcurrentTests(int maxConcurrent) {
this.maxConcurrent = maxConcurrent;
}
public String getFile() {
return file;
}
}
| akiellor/selenium | java/server/src/org/openqa/grid/selenium/utils/GridConfiguration.java | Java | apache-2.0 | 9,894 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.producer.internals;
import org.apache.kafka.clients.ApiVersions;
import org.apache.kafka.clients.Metadata;
import org.apache.kafka.clients.MockClient;
import org.apache.kafka.clients.NodeApiVersions;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.internals.ClusterResourceListeners;
import org.apache.kafka.common.metrics.KafkaMetric;
import org.apache.kafka.common.metrics.MetricConfig;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.protocol.ApiKeys;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.record.CompressionType;
import org.apache.kafka.common.record.MutableRecordBatch;
import org.apache.kafka.common.record.RecordBatch;
import org.apache.kafka.common.record.MemoryRecords;
import org.apache.kafka.common.requests.AbstractRequest;
import org.apache.kafka.common.requests.ApiVersionsResponse;
import org.apache.kafka.common.requests.ProduceRequest;
import org.apache.kafka.common.requests.InitPidRequest;
import org.apache.kafka.common.requests.InitPidResponse;
import org.apache.kafka.common.requests.ProduceResponse;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class SenderTest {
private static final int MAX_REQUEST_SIZE = 1024 * 1024;
private static final short ACKS_ALL = -1;
private static final int MAX_RETRIES = 0;
private static final String CLIENT_ID = "clientId";
private static final String METRIC_GROUP = "producer-metrics";
private static final double EPS = 0.0001;
private static final int MAX_BLOCK_TIMEOUT = 1000;
private static final int REQUEST_TIMEOUT = 1000;
private TopicPartition tp0 = new TopicPartition("test", 0);
private TopicPartition tp1 = new TopicPartition("test", 1);
private MockTime time = new MockTime();
private MockClient client = new MockClient(time);
private int batchSize = 16 * 1024;
private Metadata metadata = new Metadata(0, Long.MAX_VALUE, true, new ClusterResourceListeners());
private ApiVersions apiVersions = new ApiVersions();
private Cluster cluster = TestUtils.singletonCluster("test", 2);
private Metrics metrics = null;
private RecordAccumulator accumulator = null;
private Sender sender = null;
@Before
public void setup() {
setupWithTransactionState(null);
}
@After
public void tearDown() {
this.metrics.close();
}
@Test
public void testSimple() throws Exception {
long offset = 0;
Future<RecordMetadata> future = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future;
sender.run(time.milliseconds()); // connect
sender.run(time.milliseconds()); // send produce request
assertEquals("We should have a single produce request in flight.", 1, client.inFlightRequestCount());
assertTrue(client.hasInFlightRequests());
client.respond(produceResponse(tp0, offset, Errors.NONE, 0));
sender.run(time.milliseconds());
assertEquals("All requests completed.", 0, client.inFlightRequestCount());
assertFalse(client.hasInFlightRequests());
sender.run(time.milliseconds());
assertTrue("Request should be completed", future.isDone());
assertEquals(offset, future.get().offset());
}
@Test
public void testMessageFormatDownConversion() throws Exception {
// this test case verifies the behavior when the version of the produce request supported by the
// broker changes after the record set is created
long offset = 0;
// start off support produce request v3
apiVersions.update("0", NodeApiVersions.create());
Future<RecordMetadata> future = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(),
null, null, MAX_BLOCK_TIMEOUT).future;
// now the partition leader supports only v2
apiVersions.update("0", NodeApiVersions.create(Collections.singleton(
new ApiVersionsResponse.ApiVersion(ApiKeys.PRODUCE.id, (short) 0, (short) 2))));
client.prepareResponse(new MockClient.RequestMatcher() {
@Override
public boolean matches(AbstractRequest body) {
ProduceRequest request = (ProduceRequest) body;
if (request.version() != 2)
return false;
MemoryRecords records = request.partitionRecordsOrFail().get(tp0);
return records != null &&
records.sizeInBytes() > 0 &&
records.hasMatchingMagic(RecordBatch.MAGIC_VALUE_V1);
}
}, produceResponse(tp0, offset, Errors.NONE, 0));
sender.run(time.milliseconds()); // connect
sender.run(time.milliseconds()); // send produce request
assertTrue("Request should be completed", future.isDone());
assertEquals(offset, future.get().offset());
}
@Test
public void testDownConversionForMismatchedMagicValues() throws Exception {
// it can happen that we construct a record set with mismatching magic values (perhaps
// because the partition leader changed after the record set was initially constructed)
// in this case, we down-convert record sets with newer magic values to match the oldest
// created record set
long offset = 0;
// start off support produce request v3
apiVersions.update("0", NodeApiVersions.create());
Future<RecordMetadata> future1 = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(),
null, null, MAX_BLOCK_TIMEOUT).future;
// now the partition leader supports only v2
apiVersions.update("0", NodeApiVersions.create(Collections.singleton(
new ApiVersionsResponse.ApiVersion(ApiKeys.PRODUCE.id, (short) 0, (short) 2))));
Future<RecordMetadata> future2 = accumulator.append(tp1, 0L, "key".getBytes(), "value".getBytes(),
null, null, MAX_BLOCK_TIMEOUT).future;
// start off support produce request v3
apiVersions.update("0", NodeApiVersions.create());
ProduceResponse.PartitionResponse resp = new ProduceResponse.PartitionResponse(Errors.NONE, offset, RecordBatch.NO_TIMESTAMP);
Map<TopicPartition, ProduceResponse.PartitionResponse> partResp = new HashMap<>();
partResp.put(tp0, resp);
partResp.put(tp1, resp);
ProduceResponse produceResponse = new ProduceResponse(partResp, 0);
client.prepareResponse(new MockClient.RequestMatcher() {
@Override
public boolean matches(AbstractRequest body) {
ProduceRequest request = (ProduceRequest) body;
if (request.version() != 2)
return false;
Map<TopicPartition, MemoryRecords> recordsMap = request.partitionRecordsOrFail();
if (recordsMap.size() != 2)
return false;
for (MemoryRecords records : recordsMap.values()) {
if (records == null || records.sizeInBytes() == 0 || !records.hasMatchingMagic(RecordBatch.MAGIC_VALUE_V1))
return false;
}
return true;
}
}, produceResponse);
sender.run(time.milliseconds()); // connect
sender.run(time.milliseconds()); // send produce request
assertTrue("Request should be completed", future1.isDone());
assertTrue("Request should be completed", future2.isDone());
}
/*
* Send multiple requests. Verify that the client side quota metrics have the right values
*/
@Test
public void testQuotaMetrics() throws Exception {
final long offset = 0;
for (int i = 1; i <= 3; i++) {
accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT);
sender.run(time.milliseconds()); // send produce request
client.respond(produceResponse(tp0, offset, Errors.NONE, 100 * i));
sender.run(time.milliseconds());
}
Map<MetricName, KafkaMetric> allMetrics = metrics.metrics();
KafkaMetric avgMetric = allMetrics.get(metrics.metricName("produce-throttle-time-avg", METRIC_GROUP, ""));
KafkaMetric maxMetric = allMetrics.get(metrics.metricName("produce-throttle-time-max", METRIC_GROUP, ""));
assertEquals(200, avgMetric.value(), EPS);
assertEquals(300, maxMetric.value(), EPS);
}
@Test
public void testRetries() throws Exception {
// create a sender with retries = 1
int maxRetries = 1;
Metrics m = new Metrics();
try {
Sender sender = new Sender(client,
metadata,
this.accumulator,
false,
MAX_REQUEST_SIZE,
ACKS_ALL,
maxRetries,
m,
time,
REQUEST_TIMEOUT,
50,
null,
apiVersions
);
// do a successful retry
Future<RecordMetadata> future = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future;
sender.run(time.milliseconds()); // connect
sender.run(time.milliseconds()); // send produce request
String id = client.requests().peek().destination();
Node node = new Node(Integer.parseInt(id), "localhost", 0);
assertEquals(1, client.inFlightRequestCount());
assertTrue(client.hasInFlightRequests());
assertTrue("Client ready status should be true", client.isReady(node, 0L));
client.disconnect(id);
assertEquals(0, client.inFlightRequestCount());
assertFalse(client.hasInFlightRequests());
assertFalse("Client ready status should be false", client.isReady(node, 0L));
sender.run(time.milliseconds()); // receive error
sender.run(time.milliseconds()); // reconnect
sender.run(time.milliseconds()); // resend
assertEquals(1, client.inFlightRequestCount());
assertTrue(client.hasInFlightRequests());
long offset = 0;
client.respond(produceResponse(tp0, offset, Errors.NONE, 0));
sender.run(time.milliseconds());
assertTrue("Request should have retried and completed", future.isDone());
assertEquals(offset, future.get().offset());
// do an unsuccessful retry
future = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future;
sender.run(time.milliseconds()); // send produce request
for (int i = 0; i < maxRetries + 1; i++) {
client.disconnect(client.requests().peek().destination());
sender.run(time.milliseconds()); // receive error
sender.run(time.milliseconds()); // reconnect
sender.run(time.milliseconds()); // resend
}
sender.run(time.milliseconds());
completedWithError(future, Errors.NETWORK_EXCEPTION);
} finally {
m.close();
}
}
@Test
public void testSendInOrder() throws Exception {
int maxRetries = 1;
Metrics m = new Metrics();
try {
Sender sender = new Sender(client,
metadata,
this.accumulator,
true,
MAX_REQUEST_SIZE,
ACKS_ALL,
maxRetries,
m,
time,
REQUEST_TIMEOUT,
50,
null,
apiVersions
);
// Create a two broker cluster, with partition 0 on broker 0 and partition 1 on broker 1
Cluster cluster1 = TestUtils.clusterWith(2, "test", 2);
metadata.update(cluster1, Collections.<String>emptySet(), time.milliseconds());
// Send the first message.
TopicPartition tp2 = new TopicPartition("test", 1);
accumulator.append(tp2, 0L, "key1".getBytes(), "value1".getBytes(), null, null, MAX_BLOCK_TIMEOUT);
sender.run(time.milliseconds()); // connect
sender.run(time.milliseconds()); // send produce request
String id = client.requests().peek().destination();
assertEquals(ApiKeys.PRODUCE, client.requests().peek().requestBuilder().apiKey());
Node node = new Node(Integer.parseInt(id), "localhost", 0);
assertEquals(1, client.inFlightRequestCount());
assertTrue(client.hasInFlightRequests());
assertTrue("Client ready status should be true", client.isReady(node, 0L));
time.sleep(900);
// Now send another message to tp2
accumulator.append(tp2, 0L, "key2".getBytes(), "value2".getBytes(), null, null, MAX_BLOCK_TIMEOUT);
// Update metadata before sender receives response from broker 0. Now partition 2 moves to broker 0
Cluster cluster2 = TestUtils.singletonCluster("test", 2);
metadata.update(cluster2, Collections.<String>emptySet(), time.milliseconds());
// Sender should not send the second message to node 0.
sender.run(time.milliseconds());
assertEquals(1, client.inFlightRequestCount());
assertTrue(client.hasInFlightRequests());
} finally {
m.close();
}
}
/**
* Tests that topics are added to the metadata list when messages are available to send
* and expired if not used during a metadata refresh interval.
*/
@Test
public void testMetadataTopicExpiry() throws Exception {
long offset = 0;
metadata.update(Cluster.empty(), Collections.<String>emptySet(), time.milliseconds());
Future<RecordMetadata> future = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future;
sender.run(time.milliseconds());
assertTrue("Topic not added to metadata", metadata.containsTopic(tp0.topic()));
metadata.update(cluster, Collections.<String>emptySet(), time.milliseconds());
sender.run(time.milliseconds()); // send produce request
client.respond(produceResponse(tp0, offset++, Errors.NONE, 0));
sender.run(time.milliseconds());
assertEquals("Request completed.", 0, client.inFlightRequestCount());
assertFalse(client.hasInFlightRequests());
sender.run(time.milliseconds());
assertTrue("Request should be completed", future.isDone());
assertTrue("Topic not retained in metadata list", metadata.containsTopic(tp0.topic()));
time.sleep(Metadata.TOPIC_EXPIRY_MS);
metadata.update(Cluster.empty(), Collections.<String>emptySet(), time.milliseconds());
assertFalse("Unused topic has not been expired", metadata.containsTopic(tp0.topic()));
future = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future;
sender.run(time.milliseconds());
assertTrue("Topic not added to metadata", metadata.containsTopic(tp0.topic()));
metadata.update(cluster, Collections.<String>emptySet(), time.milliseconds());
sender.run(time.milliseconds()); // send produce request
client.respond(produceResponse(tp0, offset++, Errors.NONE, 0));
sender.run(time.milliseconds());
assertEquals("Request completed.", 0, client.inFlightRequestCount());
assertFalse(client.hasInFlightRequests());
sender.run(time.milliseconds());
assertTrue("Request should be completed", future.isDone());
}
@Test
public void testInitPidRequest() throws Exception {
final long producerId = 343434L;
TransactionManager transactionManager = new TransactionManager();
setupWithTransactionState(transactionManager);
client.setNode(new Node(1, "localhost", 33343));
client.prepareResponse(new MockClient.RequestMatcher() {
@Override
public boolean matches(AbstractRequest body) {
return body instanceof InitPidRequest;
}
}, new InitPidResponse(Errors.NONE, producerId, (short) 0));
sender.run(time.milliseconds());
assertTrue(transactionManager.hasPid());
assertEquals(producerId, transactionManager.pidAndEpoch().producerId);
assertEquals((short) 0, transactionManager.pidAndEpoch().epoch);
}
@Test
public void testSequenceNumberIncrement() throws InterruptedException {
final long producerId = 343434L;
TransactionManager transactionManager = new TransactionManager();
transactionManager.setPidAndEpoch(producerId, (short) 0);
setupWithTransactionState(transactionManager);
client.setNode(new Node(1, "localhost", 33343));
int maxRetries = 10;
Metrics m = new Metrics();
Sender sender = new Sender(client,
metadata,
this.accumulator,
true,
MAX_REQUEST_SIZE,
ACKS_ALL,
maxRetries,
m,
time,
REQUEST_TIMEOUT,
50,
transactionManager,
apiVersions
);
Future<RecordMetadata> responseFuture = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future;
client.prepareResponse(new MockClient.RequestMatcher() {
@Override
public boolean matches(AbstractRequest body) {
if (body instanceof ProduceRequest) {
ProduceRequest request = (ProduceRequest) body;
MemoryRecords records = request.partitionRecordsOrFail().get(tp0);
Iterator<MutableRecordBatch> batchIterator = records.batches().iterator();
assertTrue(batchIterator.hasNext());
RecordBatch batch = batchIterator.next();
assertFalse(batchIterator.hasNext());
assertEquals(0, batch.baseSequence());
assertEquals(producerId, batch.producerId());
assertEquals(0, batch.producerEpoch());
return true;
}
return false;
}
}, produceResponse(tp0, 0, Errors.NONE, 0));
sender.run(time.milliseconds()); // connect.
sender.run(time.milliseconds()); // send.
sender.run(time.milliseconds()); // receive response
assertTrue(responseFuture.isDone());
assertEquals((long) transactionManager.sequenceNumber(tp0), 1L);
}
@Test
public void testAbortRetryWhenPidChanges() throws InterruptedException {
final long producerId = 343434L;
TransactionManager transactionManager = new TransactionManager();
transactionManager.setPidAndEpoch(producerId, (short) 0);
setupWithTransactionState(transactionManager);
client.setNode(new Node(1, "localhost", 33343));
int maxRetries = 10;
Metrics m = new Metrics();
Sender sender = new Sender(client,
metadata,
this.accumulator,
true,
MAX_REQUEST_SIZE,
ACKS_ALL,
maxRetries,
m,
time,
REQUEST_TIMEOUT,
50,
transactionManager,
apiVersions
);
Future<RecordMetadata> responseFuture = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future;
sender.run(time.milliseconds()); // connect.
sender.run(time.milliseconds()); // send.
String id = client.requests().peek().destination();
Node node = new Node(Integer.valueOf(id), "localhost", 0);
assertEquals(1, client.inFlightRequestCount());
assertTrue("Client ready status should be true", client.isReady(node, 0L));
client.disconnect(id);
assertEquals(0, client.inFlightRequestCount());
assertFalse("Client ready status should be false", client.isReady(node, 0L));
transactionManager.setPidAndEpoch(producerId + 1, (short) 0);
sender.run(time.milliseconds()); // receive error
sender.run(time.milliseconds()); // reconnect
sender.run(time.milliseconds()); // nothing to do, since the pid has changed. We should check the metrics for errors.
assertEquals("Expected requests to be aborted after pid change", 0, client.inFlightRequestCount());
KafkaMetric recordErrors = m.metrics().get(m.metricName("record-error-rate", METRIC_GROUP, ""));
assertTrue("Expected non-zero value for record send errors", recordErrors.value() > 0);
assertTrue(responseFuture.isDone());
assertEquals((long) transactionManager.sequenceNumber(tp0), 0L);
}
@Test
public void testResetWhenOutOfOrderSequenceReceived() throws InterruptedException {
final long producerId = 343434L;
TransactionManager transactionManager = new TransactionManager();
transactionManager.setPidAndEpoch(producerId, (short) 0);
setupWithTransactionState(transactionManager);
client.setNode(new Node(1, "localhost", 33343));
int maxRetries = 10;
Metrics m = new Metrics();
Sender sender = new Sender(client,
metadata,
this.accumulator,
true,
MAX_REQUEST_SIZE,
ACKS_ALL,
maxRetries,
m,
time,
REQUEST_TIMEOUT,
50,
transactionManager,
apiVersions
);
Future<RecordMetadata> responseFuture = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future;
sender.run(time.milliseconds()); // connect.
sender.run(time.milliseconds()); // send.
assertEquals(1, client.inFlightRequestCount());
client.respond(produceResponse(tp0, 0, Errors.OUT_OF_ORDER_SEQUENCE_NUMBER, 0));
sender.run(time.milliseconds());
assertTrue(responseFuture.isDone());
assertFalse("Expected transaction state to be reset upon receiving an OutOfOrderSequenceException", transactionManager.hasPid());
}
private void completedWithError(Future<RecordMetadata> future, Errors error) throws Exception {
assertTrue("Request should be completed", future.isDone());
try {
future.get();
fail("Should have thrown an exception.");
} catch (ExecutionException e) {
assertEquals(error.exception().getClass(), e.getCause().getClass());
}
}
private ProduceResponse produceResponse(TopicPartition tp, long offset, Errors error, int throttleTimeMs) {
ProduceResponse.PartitionResponse resp = new ProduceResponse.PartitionResponse(error, offset, RecordBatch.NO_TIMESTAMP);
Map<TopicPartition, ProduceResponse.PartitionResponse> partResp = Collections.singletonMap(tp, resp);
return new ProduceResponse(partResp, throttleTimeMs);
}
private void setupWithTransactionState(TransactionManager transactionManager) {
Map<String, String> metricTags = new LinkedHashMap<>();
metricTags.put("client-id", CLIENT_ID);
MetricConfig metricConfig = new MetricConfig().tags(metricTags);
this.metrics = new Metrics(metricConfig, time);
this.accumulator = new RecordAccumulator(batchSize, 1024 * 1024, CompressionType.NONE, 0L, 0L, metrics, time, apiVersions, transactionManager);
this.sender = new Sender(this.client,
this.metadata,
this.accumulator,
true,
MAX_REQUEST_SIZE,
ACKS_ALL,
MAX_RETRIES,
this.metrics,
this.time,
REQUEST_TIMEOUT,
50,
transactionManager,
apiVersions);
this.metadata.update(this.cluster, Collections.<String>emptySet(), time.milliseconds());
}
}
| rhauch/kafka | clients/src/test/java/org/apache/kafka/clients/producer/internals/SenderTest.java | Java | apache-2.0 | 26,164 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.query.internal;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import org.junit.Before;
import org.junit.Test;
import org.apache.geode.cache.EntryDestroyedException;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.query.FunctionDomainException;
import org.apache.geode.cache.query.NameResolutionException;
import org.apache.geode.cache.query.QueryInvocationTargetException;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.cache.query.TypeMismatchException;
import org.apache.geode.cache.query.internal.index.IndexProtocol;
import org.apache.geode.cache.query.internal.parse.OQLLexerTokenTypes;
public class CompiledComparisonTest {
private CompiledComparison compiledComparison;
private QueryExecutionContext queryExecutionContext;
@Before
public void setUp() {
CompiledValue aliasId = new CompiledID("Batman");
CompiledValue clarkLiteral = new CompiledLiteral("BruceWayne");
compiledComparison =
spy(new CompiledComparison(aliasId, clarkLiteral, OQLLexerTokenTypes.TOK_EQ));
queryExecutionContext = mock(QueryExecutionContext.class);
}
@Test
public void getSizeEstimateShouldReturnZeroWhenBothFieldsAreIndexed()
throws NameResolutionException, TypeMismatchException, FunctionDomainException,
QueryInvocationTargetException {
IndexInfo[] indexInfos = new IndexInfo[] {mock(IndexInfo.class), mock(IndexInfo.class)};
doReturn(indexInfos).when(compiledComparison).getIndexInfo(queryExecutionContext);
assertThat(compiledComparison.getSizeEstimate(queryExecutionContext)).isEqualTo(0);
}
@Test
public void getSizeEstimateShouldReturnZeroWhenTheIndexKeyIsUndefined()
throws NameResolutionException, TypeMismatchException, FunctionDomainException,
QueryInvocationTargetException {
IndexInfo indexInfo = mock(IndexInfo.class);
when(indexInfo.evaluateIndexKey(queryExecutionContext)).thenReturn(QueryService.UNDEFINED);
IndexInfo[] indexInfos = new IndexInfo[] {indexInfo};
doReturn(indexInfos).when(compiledComparison).getIndexInfo(queryExecutionContext);
assertThat(compiledComparison.getSizeEstimate(queryExecutionContext)).isEqualTo(0);
}
@Test
public void getSizeEstimateShouldReturnOneWhenThereAreNoIndexes() throws NameResolutionException,
TypeMismatchException, FunctionDomainException, QueryInvocationTargetException {
assertThat(compiledComparison.getSizeEstimate(queryExecutionContext)).isEqualTo(1);
}
@Test
public void getSizeEstimateShouldReturnHintSizeWhenTheIndexIsHinted()
throws NameResolutionException, TypeMismatchException, FunctionDomainException,
QueryInvocationTargetException {
String indexName = "MyIndex";
when(queryExecutionContext.isHinted(indexName)).thenReturn(true);
when(queryExecutionContext.getHintSize(indexName)).thenReturn(10);
IndexProtocol indexProtocol = mock(IndexProtocol.class);
when(indexProtocol.getName()).thenReturn(indexName);
IndexInfo indexInfo = spy(new IndexInfo(null, null, indexProtocol, 0, null, 0));
doReturn("Key1").when(indexInfo).evaluateIndexKey(queryExecutionContext);
IndexInfo[] indexInfos = new IndexInfo[] {indexInfo};
doReturn(indexInfos).when(compiledComparison).getIndexInfo(queryExecutionContext);
assertThat(compiledComparison.getSizeEstimate(queryExecutionContext)).isEqualTo(10);
}
@Test
public void getSizeEstimateShouldReturnIndexSizeEstimate() throws NameResolutionException,
TypeMismatchException, FunctionDomainException, QueryInvocationTargetException {
String indexName = "MyIndex";
IndexProtocol indexProtocol = mock(IndexProtocol.class);
when(indexProtocol.getName()).thenReturn(indexName);
when(indexProtocol.getSizeEstimate(any(), anyInt(), anyInt())).thenReturn(15);
IndexInfo indexInfo = spy(new IndexInfo(null, null, indexProtocol, 0, null, 0));
doReturn("Key1").when(indexInfo).evaluateIndexKey(queryExecutionContext);
IndexInfo[] indexInfos = new IndexInfo[] {indexInfo};
doReturn(indexInfos).when(compiledComparison).getIndexInfo(queryExecutionContext);
assertThat(compiledComparison.getSizeEstimate(queryExecutionContext)).isEqualTo(15);
}
@Test
public void evaluateHandlesEntryDestroyedExceptionThrownByRegionEntryGetValue()
throws NameResolutionException, TypeMismatchException, QueryInvocationTargetException,
FunctionDomainException {
CompiledValue left = mock(CompiledValue.class);
CompiledValue right = mock(CompiledValue.class);
ExecutionContext context = mock(ExecutionContext.class);
when(context.isCqQueryContext()).thenReturn(true);
Region.Entry<?, ?> leftEntry = mock(Region.Entry.class);
when(left.evaluate(context)).thenReturn(leftEntry);
when(leftEntry.getValue()).thenThrow(new EntryDestroyedException());
Region.Entry<?, ?> rightEntry = mock(Region.Entry.class);
when(right.evaluate(context)).thenReturn(rightEntry);
when(rightEntry.getValue()).thenThrow(new EntryDestroyedException());
CompiledComparison comparison =
spy(new CompiledComparison(left, right, OQLLexerTokenTypes.TOK_EQ));
comparison.evaluate(context);
}
}
| davinash/geode | geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledComparisonTest.java | Java | apache-2.0 | 6,278 |
/*
* Copyright (C) 2018 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mypackage.pipeline;
import com.google.gson.Gson;
import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.PipelineResult;
import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.schemas.JavaFieldSchema;
import org.apache.beam.sdk.schemas.annotations.DefaultSchema;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MyPipeline {
/**
* The logger to output status messages to.
*/
private static final Logger LOG = LoggerFactory.getLogger(MyPipeline.class);
/**
* The {@link Options} class provides the custom execution options passed by the
* executor at the command-line.
*/
public interface Options extends DataflowPipelineOptions {
}
/**
* The main entry-point for pipeline execution. This method will start the
* pipeline but will not wait for it's execution to finish. If blocking
* execution is required, use the {@link MyPipeline#run(Options)} method to
* start the pipeline and invoke {@code result.waitUntilFinish()} on the
* {@link PipelineResult}.
*
* @param args The command-line args passed by the executor.
*/
public static void main(String[] args) {
Options options = PipelineOptionsFactory.fromArgs(args).as(Options.class);
run(options);
}
/**
* A class used for parsing JSON web server events
* Annotated with @DefaultSchema to the allow the use of Beam Schema and <Row> object
*/
@DefaultSchema(JavaFieldSchema.class)
public static class CommonLog {
String user_id;
String ip;
Double lat;
Double lng;
String timestamp;
String http_request;
String user_agent;
Long http_response;
Long num_bytes;
}
/**
* A DoFn acccepting Json and outputing CommonLog with Beam Schema
*/
static class JsonToCommonLog extends DoFn<String, CommonLog> {
@ProcessElement
public void processElement(@Element String json, OutputReceiver<CommonLog> r) throws Exception {
Gson gson = new Gson();
CommonLog commonLog = gson.fromJson(json, CommonLog.class);
r.output(commonLog);
}
}
/**
* Runs the pipeline to completion with the specified options. This method does
* not wait until the pipeline is finished before returning. Invoke
* {@code result.waitUntilFinish()} on the result object to block until the
* pipeline is finished running if blocking programmatic execution is required.
*
* @param options The execution options.
* @return The pipeline result.
*/
public static PipelineResult run(Options options) {
// Create the pipeline
Pipeline pipeline = Pipeline.create(options);
options.setJobName("my-pipeline-" + System.currentTimeMillis());
/*
* Steps:
* 1) Read something
* 2) Transform something
* 3) Write something
*/
pipeline.apply("ReadFromGCS", TextIO.read().from(input))
.apply("ParseJson", ParDo.of(new JsonToCommonLog()))
.apply("WriteToBQ",
BigQueryIO.<CommonLog>write().to(output).useBeamSchema()
.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE)
.withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED));
LOG.info("Building pipeline...");
return pipeline.run();
}
} | GoogleCloudPlatform/training-data-analyst | quests/dataflow/2_Branching_Pipelines/labs/src/main/java/com/mypackage/pipeline/MyPipeline.java | Java | apache-2.0 | 4,472 |
package com.juncoder.aggregationnews.utils;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.telephony.TelephonyManager;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2016/8/2
* desc : 网络相关工具类
* </pre>
*/
public class NetworkUtils {
private NetworkUtils() {
throw new UnsupportedOperationException("u can't fuck me...");
}
private static final int NETWORK_WIFI = 1; // wifi network
private static final int NETWORK_4G = 4; // "4G" networks
private static final int NETWORK_3G = 3; // "3G" networks
private static final int NETWORK_2G = 2; // "2G" networks
private static final int NETWORK_UNKNOWN = 5; // unknown network
private static final int NETWORK_NO = -1; // no network
private static final int NETWORK_TYPE_GSM = 16;
private static final int NETWORK_TYPE_TD_SCDMA = 17;
private static final int NETWORK_TYPE_IWLAN = 18;
/**
* 打开网络设置界面
* <p>3.0以下打开设置界面</p>
*
* @param context 上下文
*/
public static void openWirelessSettings(Context context) {
if (android.os.Build.VERSION.SDK_INT > 10) {
context.startActivity(new Intent(android.provider.Settings.ACTION_SETTINGS));
} else {
context.startActivity(new Intent(android.provider.Settings.ACTION_WIRELESS_SETTINGS));
}
}
/**
* 获取活动网络信息
*
* @param context 上下文
* @return NetworkInfo
*/
private static NetworkInfo getActiveNetworkInfo(Context context) {
ConnectivityManager cm = (ConnectivityManager) context
.getSystemService(Context.CONNECTIVITY_SERVICE);
return cm.getActiveNetworkInfo();
}
/**
* 判断网络是否可用
* <p>需添加权限 {@code <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>}</p>
*
* @param context 上下文
* @return {@code true}: 可用<br>{@code false}: 不可用
*/
public static boolean isAvailable(Context context) {
NetworkInfo info = getActiveNetworkInfo(context);
return info != null && info.isAvailable();
}
/**
* 判断网络是否连接
* <p>需添加权限 {@code <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>}</p>
*
* @param context 上下文
* @return {@code true}: 是<br>{@code false}: 否
*/
public static boolean isConnected(Context context) {
NetworkInfo info = getActiveNetworkInfo(context);
return info != null && info.isConnected();
}
/**
* 判断网络是否是4G
* <p>需添加权限 {@code <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>}</p>
*
* @param context 上下文
* @return {@code true}: 是<br>{@code false}: 不是
*/
public static boolean is4G(Context context) {
NetworkInfo info = getActiveNetworkInfo(context);
return info != null && info.isAvailable() && info.getSubtype() == TelephonyManager.NETWORK_TYPE_LTE;
}
/**
* 判断wifi是否连接状态
* <p>需添加权限 {@code <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>}</p>
*
* @param context 上下文
* @return {@code true}: 连接<br>{@code false}: 未连接
*/
public static boolean isWifiConnected(Context context) {
ConnectivityManager cm = (ConnectivityManager) context
.getSystemService(Context.CONNECTIVITY_SERVICE);
return cm != null && cm.getActiveNetworkInfo() != null
&& cm.getActiveNetworkInfo().getType() == ConnectivityManager.TYPE_WIFI;
}
/**
* 获取移动网络运营商名称
* <p>如中国联通、中国移动、中国电信</p>
*
* @param context 上下文
* @return 移动网络运营商名称
*/
public static String getNetworkOperatorName(Context context) {
TelephonyManager tm = (TelephonyManager) context
.getSystemService(Context.TELEPHONY_SERVICE);
return tm != null ? tm.getNetworkOperatorName() : null;
}
/**
* 获取移动终端类型
*
* @param context 上下文
* @return 手机制式
* <ul>
* <li>{@link TelephonyManager#PHONE_TYPE_NONE } : 0 手机制式未知</li>
* <li>{@link TelephonyManager#PHONE_TYPE_GSM } : 1 手机制式为GSM,移动和联通</li>
* <li>{@link TelephonyManager#PHONE_TYPE_CDMA } : 2 手机制式为CDMA,电信</li>
* <li>{@link TelephonyManager#PHONE_TYPE_SIP } : 3</li>
* </ul>
*/
public static int getPhoneType(Context context) {
TelephonyManager tm = (TelephonyManager) context
.getSystemService(Context.TELEPHONY_SERVICE);
return tm != null ? tm.getPhoneType() : -1;
}
/**
* 获取当前的网络类型(WIFI,2G,3G,4G)
* <p>需添加权限 {@code <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>}</p>
*
* @param context 上下文
* @return 网络类型
* <ul>
* <li>{@link #NETWORK_WIFI } = 1;</li>
* <li>{@link #NETWORK_4G } = 4;</li>
* <li>{@link #NETWORK_3G } = 3;</li>
* <li>{@link #NETWORK_2G } = 2;</li>
* <li>{@link #NETWORK_UNKNOWN} = 5;</li>
* <li>{@link #NETWORK_NO } = -1;</li>
* </ul>
*/
public static int getNetWorkType(Context context) {
int netType = NETWORK_NO;
NetworkInfo info = getActiveNetworkInfo(context);
if (info != null && info.isAvailable()) {
if (info.getType() == ConnectivityManager.TYPE_WIFI) {
netType = NETWORK_WIFI;
} else if (info.getType() == ConnectivityManager.TYPE_MOBILE) {
switch (info.getSubtype()) {
case NETWORK_TYPE_GSM:
case TelephonyManager.NETWORK_TYPE_GPRS:
case TelephonyManager.NETWORK_TYPE_CDMA:
case TelephonyManager.NETWORK_TYPE_EDGE:
case TelephonyManager.NETWORK_TYPE_1xRTT:
case TelephonyManager.NETWORK_TYPE_IDEN:
netType = NETWORK_2G;
break;
case NETWORK_TYPE_TD_SCDMA:
case TelephonyManager.NETWORK_TYPE_EVDO_A:
case TelephonyManager.NETWORK_TYPE_UMTS:
case TelephonyManager.NETWORK_TYPE_EVDO_0:
case TelephonyManager.NETWORK_TYPE_HSDPA:
case TelephonyManager.NETWORK_TYPE_HSUPA:
case TelephonyManager.NETWORK_TYPE_HSPA:
case TelephonyManager.NETWORK_TYPE_EVDO_B:
case TelephonyManager.NETWORK_TYPE_EHRPD:
case TelephonyManager.NETWORK_TYPE_HSPAP:
netType = NETWORK_3G;
break;
case NETWORK_TYPE_IWLAN:
case TelephonyManager.NETWORK_TYPE_LTE:
netType = NETWORK_4G;
break;
default:
String subtypeName = info.getSubtypeName();
if (subtypeName.equalsIgnoreCase("TD-SCDMA")
|| subtypeName.equalsIgnoreCase("WCDMA")
|| subtypeName.equalsIgnoreCase("CDMA2000")) {
netType = NETWORK_3G;
} else {
netType = NETWORK_UNKNOWN;
}
break;
}
} else {
netType = NETWORK_UNKNOWN;
}
}
return netType;
}
/**
* 获取当前的网络类型(WIFI,2G,3G,4G)
* <p>依赖上面的方法</p>
*
* @param context 上下文
* @return 网络类型名称
* <ul>
* <li>NETWORK_WIFI </li>
* <li>NETWORK_4G </li>
* <li>NETWORK_3G </li>
* <li>NETWORK_2G </li>
* <li>NETWORK_UNKNOWN</li>
* <li>NETWORK_NO </li>
* </ul>
*/
public static String getNetWorkTypeName(Context context) {
switch (getNetWorkType(context)) {
case NETWORK_WIFI:
return "NETWORK_WIFI";
case NETWORK_4G:
return "NETWORK_4G";
case NETWORK_3G:
return "NETWORK_3G";
case NETWORK_2G:
return "NETWORK_2G";
case NETWORK_NO:
return "NETWORK_NO";
default:
return "NETWORK_UNKNOWN";
}
}
} | zhongjuncoder/AggregationNews | app/src/main/java/com/juncoder/aggregationnews/utils/NetworkUtils.java | Java | apache-2.0 | 9,087 |
/*******************************************************************************
* Copyright 2002-2015, OpenNebula Project (OpenNebula.org), C12G Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
import static org.junit.Assert.assertTrue;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.opennebula.client.Client;
import org.opennebula.client.OneResponse;
import org.opennebula.client.OneSystem;
import org.opennebula.client.group.Group;
import org.opennebula.client.group.GroupPool;
import org.w3c.dom.Node;
public class GroupTest
{
private static Group group;
private static GroupPool groupPool;
private static Client client;
private static OneResponse res;
private static String group_name = "test_group";
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception
{
client = new Client();
groupPool = new GroupPool(client);
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception
{
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception
{
res = Group.allocate(client, group_name);
int group_id = res.isError() ? -1 : Integer.parseInt(res.getMessage());
group = new Group(group_id, client);
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception
{
group.delete();
}
@Test
public void allocate()
{
group.delete();
res = Group.allocate(client, group_name);
assertTrue( res.getErrorMessage(), !res.isError() );
int group_id = res.isError() ? -1 : Integer.parseInt(res.getMessage());
group = new Group(group_id, client);
groupPool.info();
boolean found = false;
for(Group img : groupPool)
{
found = found || img.getName().equals(group_name);
}
assertTrue( found );
}
@Test
public void info()
{
res = group.info();
assertTrue( res.getErrorMessage(), !res.isError() );
assertTrue( group.id() >= 100 );
assertTrue( group.getName().equals(group_name) );
}
@Test
public void delete()
{
res = group.delete();
assertTrue( res.getErrorMessage(), !res.isError() );
res = group.info();
assertTrue( res.isError() );
res = groupPool.info();
assertTrue( res.getErrorMessage(), !res.isError() );
boolean found = false;
for(Group g : groupPool)
{
found = found || g.getName().equals(group_name);
}
assertTrue( !found );
}
@Test
public void defaultqutoas()
{
OneSystem system = new OneSystem(client);
res = system.getGroupQuotas();
assertTrue( res.getErrorMessage(), !res.isError() );
res = system.setGroupQuotas("VM = [ VMS = 7, MEMORY = 0, CPU = 3, VOLATILE_SIZE = 1 ]");
assertTrue( res.getErrorMessage(), !res.isError() );
Node node = system.getGroupQuotasXML();
XPathFactory factory = XPathFactory.newInstance();
XPath xpath = factory.newXPath();
try
{
assertTrue( xpath.evaluate("VM_QUOTA/VM/VMS", node).equals("7") );
} catch (XPathExpressionException e)
{
assertTrue(e.getMessage(), false);
}
}
// Commented out, secondary groups do not exist any more
/*
@Test
public void userGroupRelations()
{
Hashtable<String, User> users = new Hashtable<String, User>();
Hashtable<String, Group> groups = new Hashtable<String, Group>();
// Create all users and groups. Add user_* to corresponding group_*
String[] names = {"a", "b", "c", "d"};
for(String name : names)
{
res = User.allocate(client, "user_"+name, "password");
assertTrue( res.getErrorMessage(), !res.isError() );
users.put( name,
new User(Integer.parseInt(res.getMessage()), client )
);
res = Group.allocate(client, "group_"+name);
assertTrue( res.getErrorMessage(), !res.isError() );
groups.put( name,
new Group(Integer.parseInt(res.getMessage()), client )
);
users.get(name).addgroup( groups.get(name).id() );
}
// Add all users to group_b
for( User u : users.values() )
{
u.addgroup( groups.get("b").id() );
}
// Change user_c & _d main group
users.get("c").chgrp( groups.get("d").id() );
users.get("d").chgrp( groups.get("c").id() );
// Check cross-references so far
for( User u : users.values() )
{
assertTrue( !u.info().isError() );
}
for( Group g : groups.values() )
{
assertTrue( !g.info().isError() );
}
assertTrue( users.get("a").isPartOf( groups.get("a").id() ) );
assertTrue( users.get("a").isPartOf( groups.get("b").id() ) );
assertFalse( users.get("a").isPartOf( groups.get("c").id() ) );
assertFalse( users.get("a").isPartOf( groups.get("d").id() ) );
assertFalse( users.get("b").isPartOf( groups.get("a").id() ) );
assertTrue( users.get("b").isPartOf( groups.get("b").id() ) );
assertFalse( users.get("b").isPartOf( groups.get("c").id() ) );
assertFalse( users.get("b").isPartOf( groups.get("d").id() ) );
assertFalse( users.get("c").isPartOf( groups.get("a").id() ) );
assertTrue( users.get("c").isPartOf( groups.get("b").id() ) );
assertTrue( users.get("c").isPartOf( groups.get("c").id() ) );
assertTrue( users.get("c").isPartOf( groups.get("d").id() ) );
assertFalse( users.get("d").isPartOf( groups.get("a").id() ) );
assertTrue( users.get("d").isPartOf( groups.get("b").id() ) );
assertTrue( users.get("d").isPartOf( groups.get("c").id() ) );
assertTrue( users.get("d").isPartOf( groups.get("d").id() ) );
assertTrue( groups.get("a").contains( users.get("a").id() ) );
assertFalse( groups.get("a").contains( users.get("b").id() ) );
assertFalse( groups.get("a").contains( users.get("c").id() ) );
assertFalse( groups.get("a").contains( users.get("d").id() ) );
assertTrue( groups.get("b").contains( users.get("a").id() ) );
assertTrue( groups.get("b").contains( users.get("b").id() ) );
assertTrue( groups.get("b").contains( users.get("c").id() ) );
assertTrue( groups.get("b").contains( users.get("d").id() ) );
assertFalse( groups.get("c").contains( users.get("a").id() ) );
assertFalse( groups.get("c").contains( users.get("b").id() ) );
assertTrue( groups.get("c").contains( users.get("c").id() ) );
assertTrue( groups.get("c").contains( users.get("d").id() ) );
assertFalse( groups.get("d").contains( users.get("a").id() ) );
assertFalse( groups.get("d").contains( users.get("b").id() ) );
assertTrue( groups.get("d").contains( users.get("c").id() ) );
assertTrue( groups.get("d").contains( users.get("d").id() ) );
}
*/
}
| Terradue/one | src/oca/java/test/GroupTest.java | Java | apache-2.0 | 8,209 |
/**
* Copyright (C) 2016 - 2030 youtongluan.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yx.http.handler;
import org.yx.annotation.Bean;
@Bean
public class ReqToStringHandler implements HttpHandler {
@Override
public int order() {
return 1700;
}
@Override
public void handle(WebContext ctx) throws Exception {
Object obj = ctx.data();
if (obj == null) {
return;
}
if (!(obj instanceof byte[])) {
return;
}
byte[] bs = (byte[]) obj;
String data = new String(bs, ctx.charset());
ctx.data(data);
}
}
| youtongluan/sumk | src/main/java/org/yx/http/handler/ReqToStringHandler.java | Java | apache-2.0 | 1,061 |
package com.coolweather.android.db;
import org.litepal.crud.DataSupport;
/**
* Created by xiaoY on 2017/2/9.
*/
public class Province extends DataSupport {
private int id;
private String provinceName;
private int provinceCode;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getProvinceName() {
return provinceName;
}
public void setProvinceName(String provinceName) {
this.provinceName = provinceName;
}
public int getProvinceCode() {
return provinceCode;
}
public void setProvinceCode(int provinceCode) {
this.provinceCode = provinceCode;
}
}
| KeithHongYu/coolweather | app/src/main/java/com/coolweather/android/db/Province.java | Java | apache-2.0 | 710 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.inspector2.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.inspector2.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* EcrContainerImageMetadata JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class EcrContainerImageMetadataJsonUnmarshaller implements Unmarshaller<EcrContainerImageMetadata, JsonUnmarshallerContext> {
public EcrContainerImageMetadata unmarshall(JsonUnmarshallerContext context) throws Exception {
EcrContainerImageMetadata ecrContainerImageMetadata = new EcrContainerImageMetadata();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("tags", targetDepth)) {
context.nextToken();
ecrContainerImageMetadata.setTags(new ListUnmarshaller<String>(context.getUnmarshaller(String.class))
.unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return ecrContainerImageMetadata;
}
private static EcrContainerImageMetadataJsonUnmarshaller instance;
public static EcrContainerImageMetadataJsonUnmarshaller getInstance() {
if (instance == null)
instance = new EcrContainerImageMetadataJsonUnmarshaller();
return instance;
}
}
| aws/aws-sdk-java | aws-java-sdk-inspector2/src/main/java/com/amazonaws/services/inspector2/model/transform/EcrContainerImageMetadataJsonUnmarshaller.java | Java | apache-2.0 | 2,906 |
package org.echoice.ums.plugins;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.echoice.ums.dao.EcGroupDao;
import org.echoice.ums.domain.EcGroup;
import org.echoice.ums.domain.EcObjects;
import org.echoice.ums.domain.EcRole;
import org.echoice.ums.domain.EcUser;
import org.echoice.ums.service.UmsServiceFactory;
public class OAUserAassingRoleFilterCmd implements Command<Boolean,String>{
private final String FILTER_ROLE_OBJECT_ALIAS="FILTER_ROLE_OBJECT_ALIAS";//±ØÐëÑéÖ¤OA¹¤ºÅµÄ½ÇÉ«Áбíids
private final String FILTER_ROLE_OBJECT_ALIAS2="FILTER_ROLE_OBJECT_ALIAS2";//µ±²ã¼¶´óÓÚX£¬ÇÒΪĩÉÒ½Úµãʱ£¬¿ÉÒÔ·ÖÅä¸ø¸Ã½ÇÉ«ids
private final String FILTER_GTROUP_LEVEL_ALIAS="FILTER_GTROUP_LEVEL_ALIAS";//Óû§×é²ã¼¶´óÓÚX
private Long userIdArr[];
private Long roleIdsArr[];
private String msg="·ÇOA¹¤ºÅ²»¿É·ÖÅäÁìµ¼½ÇÉ«";
public OAUserAassingRoleFilterCmd(Long[] userIdArr, Long[] roleIdsArr) {
this.userIdArr = userIdArr;
this.roleIdsArr = roleIdsArr;
}
public Boolean execute(String obj) {
// TODO Auto-generated method stub
EcObjects objects=UmsServiceFactory.getEcObjectsDao().getObjectsByAlias(FILTER_ROLE_OBJECT_ALIAS);
String ids=StringUtils.join(userIdArr, ",");
if(objects!=null){
if(StringUtils.isNotBlank(objects.getNote())){
String roleNameStr=findRoleNameStr(objects.getNote());
if(StringUtils.isNotBlank(roleNameStr)){
String configRoleArr[]=StringUtils.splitByWholeSeparator(objects.getNote(), ",");
//²é¿´Òª·ÖÅäµÄ½ÇÉ«ÖÐÊÇ·ñ´æÔÚOA¹¤ºÅ¼ì²éµÄ
boolean isCheckRole=false;
String tmpId=null;
for (int i = 0; i < roleIdsArr.length; i++) {
tmpId=String.valueOf(roleIdsArr[i]);
for (int k = 0; k < configRoleArr.length; k++) {
if(tmpId.equals(configRoleArr[k])){
isCheckRole=true;
break;
}
}
}
if(isCheckRole){
//¸ù¾ÝÓû§Id£¬È¡Óû§Êý¾Ý
List<EcUser> userList=UmsServiceFactory.getEcUserDao().findUserListByIds(ids);
String jobNumberArr[]=new String[userList.size()];
int j=0;
for (EcUser ecUser : userList) {
jobNumberArr[j]=ecUser.getAlias();
j++;
}
int count=UmsServiceFactory.getAppPluginDao().findOAUserList("'"+StringUtils.join(jobNumberArr,"','")+"'");
if(count!=jobNumberArr.length){
msg="<div style='word-wrap: break-word;width:300px'>" +
"·ÇOA¹¤ºÅ£¬²»¿É·ÖÅ䣨"+roleNameStr+"£©½ÇÉ«" +
"</div>";
return false;
}
}
}
}
}
EcObjects objects2=UmsServiceFactory.getEcObjectsDao().getObjectsByAlias(FILTER_ROLE_OBJECT_ALIAS2);
if(objects2!=null){
if(StringUtils.isNotBlank(objects2.getNote())){
//¸ù¾ÝIDARRSÈ¡½ÇÉ«Ãû³Æ
String roleNameStr=findRoleNameStr(objects2.getNote());
if(StringUtils.isNotBlank(roleNameStr)){
//²é¿´Ñ¡ÔñµÄ½ÇÉ«ÊÇ·ñÔÚÏÞÖƵĽÇÉ«ÖÐ
String configRoleArr[]=StringUtils.splitByWholeSeparator(objects2.getNote(), ",");
//²é¿´Òª·ÖÅäµÄ½ÇÉ«ÖÐÊÇ·ñ´æÔÚÅäÖÃÖÐÒª¹ýÂ˵Ä
boolean isCheckRole=false;
String tmpId=null;
for (int i = 0; i < roleIdsArr.length; i++) {
tmpId=String.valueOf(roleIdsArr[i]);
for (int k = 0; k < configRoleArr.length; k++) {
if(tmpId.equals(configRoleArr[k])){
isCheckRole=true;
break;
}
}
}
if(isCheckRole){
//È¡Óû§µÄ×é²ã¼¶£¬ÄÄЩ½ÇÉ«¿ÉÒÔÌø¹ýOA¹¤ºÅ·ÖÅä
//Èç²ã¼¶´óÓÚ3£¬ÇÒΪĩÉÒ½Úµã
List<EcGroup> groupList=UmsServiceFactory.getAppPluginDao().findUserGroupNotOA(ids);
if(groupList!=null&&groupList.size()>0){
EcGroupDao ecGroupDao=UmsServiceFactory.getEcGroupDao();
List<Long> parentIdList=ecGroupDao.findGroupTreeParent();
StringBuffer bf=new StringBuffer();
bf.append("|");
for (Long temp : parentIdList) {
bf.append(temp);
bf.append("|");
}
String strParentTree=bf.toString();
//ÅжÏÊÇ·ñΪĩÉÒ½Úµã
for (EcGroup ecGroup : groupList) {
if(strParentTree.indexOf("|"+ecGroup.getGroupId()+"|")!=-1){
//msg="·ÇOA¹¤ºÅ¼°Ä©¼¶¿âµãÏÂÓû§£¬²»¿É·ÖÅäÁìµ¼ÉóºË½ÇÉ«";
msg="<div style='word-wrap: break-word;width:300px'>" +
"OA¹¤ºÅÓû§»òÄ©¼¶¿âµãÏ·ÇOA¹¤ºÅÓû§£¬²Å¿É·ÖÅ䣨"+roleNameStr+"£©½ÇÉ«" +
"</div>";
return false;
}
}
//È¡²ã¼¶ÅжÏ
EcObjects objectsLevel=UmsServiceFactory.getEcObjectsDao().getObjectsByAlias(FILTER_GTROUP_LEVEL_ALIAS);
if(objectsLevel!=null&&StringUtils.isNotBlank(objectsLevel.getNote())){
int level=Integer.parseInt(objectsLevel.getNote());
int groupLevel=0;
for (EcGroup ecGroup : groupList) {
groupLevel=StringUtils.splitByWholeSeparator(ecGroup.getAlias(), "-").length;
if(groupLevel<level){
//msg="·ÇOA¹¤ºÅ¼°¿âµã²ã¼¶´óÓÚ"+level+"µÄÄ©¼¶¿âµãÏÂÓû§£¬²»¿É·ÖÅäÁìµ¼ÉóºË½ÇÉ«";
msg="<div style='word-wrap: break-word;width:300px'>" +
"OA¹¤ºÅÓû§»ò²ã¼¶´óÓÚ"+level+"µÄÄ©¼¶¿âµãÏ·ÇOA¹¤ºÅÓû§£¬<br />²Å¿É·ÖÅä["+roleNameStr+"]½ÇÉ«" +
"<div>";
return false;
}
}
}
}
}
}
}
}
return true;
}
private String findRoleNameStr(String roleIds){
List<EcRole> roleList=UmsServiceFactory.getEcRoleDao().findRoleByIDs(roleIds);
if(roleList!=null&&roleList.size()>0){
String roleNameArr[]=new String[roleList.size()];
int j=0;
for (EcRole ecRole : roleList) {
roleNameArr[j]=ecRole.getName();
j++;
}
String roleNameStr=StringUtils.join(roleNameArr, ",");
return roleNameStr;
}
return null;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
}
| junyangren/echoice-ums | src/main/java/org/echoice/ums/plugins/OAUserAassingRoleFilterCmd.java | Java | apache-2.0 | 5,676 |
package com.dianping.cat.report.page.alteration;
public enum JspFile {
INSERT("/jsp/report/alteration/alter_insertResult.jsp"),
VIEW("/jsp/report/alteration/alter_view.jsp"),
;
private String m_path;
private JspFile(String path) {
m_path = path;
}
public String getPath() {
return m_path;
}
}
| ServerStarted/cat | cat-home/src/main/java/com/dianping/cat/report/page/alteration/JspFile.java | Java | apache-2.0 | 310 |
package com.cloudata.keyvalue;
import io.netty.util.concurrent.DefaultThreadFactory;
import java.io.File;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import org.robotninjas.barge.RaftException;
import org.robotninjas.barge.RaftMembership;
import org.robotninjas.barge.RaftService;
import org.robotninjas.barge.Replica;
import org.robotninjas.barge.log.journalio.JournalRaftLog;
import org.robotninjas.barge.proto.RaftEntry.Membership;
import org.robotninjas.barge.proto.RaftEntry.SnapshotInfo;
import org.robotninjas.barge.rpc.netty.NettyRaftService;
import org.robotninjas.barge.state.ConfigurationState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cloudata.ProtobufServer;
import com.cloudata.cluster.ClusterService;
import com.cloudata.cluster.RepairService;
import com.cloudata.keyvalue.protobuf.KeyValueProtobufEndpoint;
import com.cloudata.keyvalue.redis.RedisEndpoint;
import com.cloudata.keyvalue.redis.RedisServer;
import com.cloudata.keyvalue.web.WebModule;
import com.cloudata.services.CloseableService;
import com.cloudata.services.CompoundService;
import com.cloudata.services.JettyService;
import com.cloudata.snapshots.LocalSnapshotStorage;
import com.cloudata.snapshots.SnapshotStorage;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.net.HostAndPort;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.protobuf.Service;
public class KeyValueServer extends CompoundService {
private static final Logger log = LoggerFactory.getLogger(KeyValueServer.class);
final File baseDir;
private final Replica local;
private final RaftService raft;
private final KeyValueStateMachine stateMachine;
private final KeyValueConfig config;
public KeyValueServer(File baseDir, Replica local, KeyValueConfig config, SnapshotStorage snapshotStorage) {
Preconditions.checkNotNull(config);
Preconditions.checkNotNull(config.seedConfig);
this.baseDir = baseDir;
this.local = local;
this.config = config.deepCopy();
File logDir = new File(baseDir, "logs");
File stateDir = new File(baseDir, "state");
logDir.mkdirs();
stateDir.mkdirs();
ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors
.newCachedThreadPool(new DefaultThreadFactory("pool-worker-keyvalue")));
this.stateMachine = new KeyValueStateMachine(executor, stateDir, snapshotStorage);
{
JournalRaftLog.Builder logBuilder = new JournalRaftLog.Builder();
logBuilder.logDirectory = logDir;
logBuilder.stateMachine = stateMachine;
logBuilder.config = ConfigurationState.buildSeed(config.seedConfig);
NettyRaftService.Builder b = NettyRaftService.newBuilder();
// b.seedConfig = config.seedConfig;
b.log = logBuilder;
// b.listener = groupOfCounters;
this.raft = b.build();
}
}
public void bootstrap() {
Membership membership = Membership.newBuilder().addMembers(local.getKey()).build();
this.raft.bootstrap(membership);
}
public String getHttpUrl() {
return "http://localhost:" + config.httpPort + "/";
}
public static void main(String... args) throws Exception {
final int port = Integer.parseInt(args[0]);
Replica local = Replica.fromString("localhost:" + (10000 + port));
KeyValueConfig config = new KeyValueConfig();
File baseDir = new File(args[0]);
config.httpPort = (9990 + port);
int redisPort = 6379 + port;
int protobufPort = 2000 + port;
config.redisEndpoint = HostAndPort.fromParts("", redisPort);
config.protobufEndpoint = HostAndPort.fromParts("", protobufPort);
SnapshotStorage snapshotStore = new LocalSnapshotStorage(new File(baseDir, "snapshots"));
final KeyValueServer server = new KeyValueServer(baseDir, local, config, snapshotStore);
server.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
server.stop();
} catch (Exception e) {
log.error("Error stopping server", e);
}
}
});
}
public HostAndPort getRedisSocketAddress() {
return config.redisEndpoint;
}
public HostAndPort getProtobufSocketAddress() {
return config.protobufEndpoint;
}
public String getRaftServerKey() {
return raft.getServerKey();
}
public void reconfigure(List<String> servers) throws RaftException {
RaftMembership oldMembership = raft.getClusterMembership();
RaftMembership newMembership = new RaftMembership(-1, servers);
ListenableFuture<Boolean> future = raft.setConfiguration(oldMembership, newMembership);
Boolean result = Futures.get(future, RaftException.class);
if (!Boolean.TRUE.equals(result)) {
throw new IllegalStateException();
}
}
public boolean isLeader() {
return raft.isLeader();
}
@Override
protected List<com.google.common.util.concurrent.Service> buildServices() {
Injector injector = Guice.createInjector(new KeyValueModule(stateMachine), new WebModule());
List<com.google.common.util.concurrent.Service> services = Lists.newArrayList();
services.add(raft);
JettyService jetty = injector.getInstance(JettyService.class);
jetty.init(config.httpPort);
services.add(jetty);
if (config.redisEndpoint != null) {
long storeId = 1;
RedisServer redisServer = new RedisServer(stateMachine, storeId);
RedisEndpoint redisEndpoint = new RedisEndpoint(config.redisEndpoint, redisServer);
services.add(redisEndpoint);
}
if (config.protobufEndpoint != null) {
ProtobufServer protobufServer = new ProtobufServer(config.protobufEndpoint);
KeyValueProtobufEndpoint endpoint = injector.getInstance(KeyValueProtobufEndpoint.class);
Service service = KeyValueProtocol.KeyValueService.newReflectiveService(endpoint);
protobufServer.addService(service);
services.add(protobufServer);
}
if (config.gossip != null) {
ScheduledExecutorService executor = Executors.newScheduledThreadPool(0, new DefaultThreadFactory(
"pool-gossip-workers"));
ClusterService cluster = new ClusterService(config.gossip, executor);
services.add(cluster);
services.add(new RepairService(raft, cluster, executor));
}
return services;
}
@Override
public String toString() {
return "KeyValueServer [raft=" + raft + "]";
}
public Replica getReplica() {
return this.raft.self();
}
public SnapshotInfo getLastSnapshotInfo() {
return this.raft.getLastSnapshotInfo();
}
}
| justinsb/cloudata | cloudata-keyvalue/src/main/java/com/cloudata/keyvalue/KeyValueServer.java | Java | apache-2.0 | 6,949 |
package apple.coretext;
import java.io.*;
import java.nio.*;
import java.util.*;
import com.google.j2objc.annotations.*;
import com.google.j2objc.runtime.*;
import com.google.j2objc.runtime.block.*;
import apple.audiotoolbox.*;
import apple.corefoundation.*;
import apple.coregraphics.*;
import apple.coreservices.*;
import apple.foundation.*;
@Library("CoreText/CoreText.h")
@Mapping("CTFramePathFillRule")
public final class CTFramePathFillRule extends ObjCEnum {
@GlobalConstant("kCTFramePathFillEvenOdd")
public static final long EvenOdd = 0L;
@GlobalConstant("kCTFramePathFillWindingNumber")
public static final long WindingNumber = 1L;
}
| Sellegit/j2objc | runtime/src/main/java/apple/coretext/CTFramePathFillRule.java | Java | apache-2.0 | 671 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.net.UnknownHostException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.DNS;
import com.google.protobuf.ServiceException;
/**
* Retries scanner operations such as create, next, etc.
* Used by {@link ResultScanner}s made by {@link HTable}.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class ScannerCallable extends ServerCallable<Result[]> {
public static final String LOG_SCANNER_LATENCY_CUTOFF
= "hbase.client.log.scanner.latency.cutoff";
public static final String LOG_SCANNER_ACTIVITY = "hbase.client.log.scanner.activity";
private static final Log LOG = LogFactory.getLog(ScannerCallable.class);
private long scannerId = -1L;
private boolean instantiated = false;
private boolean closed = false;
private Scan scan;
private int caching = 1;
private ScanMetrics scanMetrics;
private boolean logScannerActivity = false;
private int logCutOffLatency = 1000;
// indicate if it is a remote server call
private boolean isRegionServerRemote = true;
/**
* @param connection which connection
* @param tableName table callable is on
* @param scan the scan to execute
* @param scanMetrics the ScanMetrics to used, if it is null, ScannerCallable
* won't collect metrics
*/
public ScannerCallable (HConnection connection, byte [] tableName, Scan scan,
ScanMetrics scanMetrics) {
super(connection, tableName, scan.getStartRow());
this.scan = scan;
this.scanMetrics = scanMetrics;
Configuration conf = connection.getConfiguration();
logScannerActivity = conf.getBoolean(LOG_SCANNER_ACTIVITY, false);
logCutOffLatency = conf.getInt(LOG_SCANNER_LATENCY_CUTOFF, 1000);
}
/**
* @param reload force reload of server location
* @throws IOException
*/
@Override
public void connect(boolean reload) throws IOException {
if (!instantiated || reload) {
super.connect(reload);
checkIfRegionServerIsRemote();
instantiated = true;
}
// check how often we retry.
// HConnectionManager will call instantiateServer with reload==true
// if and only if for retries.
if (reload && this.scanMetrics != null) {
this.scanMetrics.countOfRPCRetries.inc();
if (isRegionServerRemote) {
this.scanMetrics.countOfRemoteRPCRetries.inc();
}
}
}
/**
* compare the local machine hostname with region server's hostname
* to decide if hbase client connects to a remote region server
* @throws UnknownHostException.
*/
private void checkIfRegionServerIsRemote() throws UnknownHostException {
String myAddress = DNS.getDefaultHost("default", "default");
if (this.location.getHostname().equalsIgnoreCase(myAddress)) {
isRegionServerRemote = false;
} else {
isRegionServerRemote = true;
}
}
/**
* @see java.util.concurrent.Callable#call()
*/
public Result [] call() throws IOException {
if (closed) {
if (scannerId != -1) {
close();
}
} else {
if (scannerId == -1L) {
this.scannerId = openScanner();
} else {
Result [] rrs = null;
try {
incRPCcallsMetrics();
ScanRequest request =
RequestConverter.buildScanRequest(scannerId, caching, false);
try {
ScanResponse response = server.scan(null, request);
long timestamp = System.currentTimeMillis();
rrs = ResponseConverter.getResults(response);
if (logScannerActivity) {
long now = System.currentTimeMillis();
if (now - timestamp > logCutOffLatency) {
int rows = rrs == null ? 0 : rrs.length;
LOG.info("Took " + (now-timestamp) + "ms to fetch "
+ rows + " rows from scanner=" + scannerId);
}
}
if (response.hasMoreResults()
&& !response.getMoreResults()) {
scannerId = -1L;
closed = true;
return null;
}
} catch (ServiceException se) {
throw ProtobufUtil.getRemoteException(se);
}
updateResultsMetrics(rrs);
} catch (IOException e) {
if (logScannerActivity) {
LOG.info("Got exception in fetching from scanner="
+ scannerId, e);
}
IOException ioe = e;
if (e instanceof RemoteException) {
ioe = RemoteExceptionHandler.decodeRemoteException((RemoteException)e);
}
if (logScannerActivity && (ioe instanceof UnknownScannerException)) {
try {
HRegionLocation location =
connection.relocateRegion(tableName, scan.getStartRow());
LOG.info("Scanner=" + scannerId
+ " expired, current region location is " + location.toString()
+ " ip:" + location.getServerAddress().getBindAddress());
} catch (Throwable t) {
LOG.info("Failed to relocate region", t);
}
}
if (ioe instanceof NotServingRegionException) {
// Throw a DNRE so that we break out of cycle of calling NSRE
// when what we need is to open scanner against new location.
// Attach NSRE to signal client that it needs to resetup scanner.
if (this.scanMetrics != null) {
this.scanMetrics.countOfNSRE.inc();
}
throw new DoNotRetryIOException("Reset scanner", ioe);
} else if (ioe instanceof RegionServerStoppedException) {
// Throw a DNRE so that we break out of cycle of calling RSSE
// when what we need is to open scanner against new location.
// Attach RSSE to signal client that it needs to resetup scanner.
throw new DoNotRetryIOException("Reset scanner", ioe);
} else {
// The outer layers will retry
throw ioe;
}
}
return rrs;
}
}
return null;
}
private void incRPCcallsMetrics() {
if (this.scanMetrics == null) {
return;
}
this.scanMetrics.countOfRPCcalls.inc();
if (isRegionServerRemote) {
this.scanMetrics.countOfRemoteRPCcalls.inc();
}
}
private void updateResultsMetrics(Result[] rrs) {
if (this.scanMetrics == null || rrs == null) {
return;
}
for (Result rr : rrs) {
if (rr.getBytes() != null) {
this.scanMetrics.countOfBytesInResults.inc(rr.getBytes().getLength());
if (isRegionServerRemote) {
this.scanMetrics.countOfBytesInRemoteResults.inc(
rr.getBytes().getLength());
}
}
}
}
private void close() {
if (this.scannerId == -1L) {
return;
}
try {
incRPCcallsMetrics();
ScanRequest request =
RequestConverter.buildScanRequest(this.scannerId, 0, true);
try {
server.scan(null, request);
} catch (ServiceException se) {
throw ProtobufUtil.getRemoteException(se);
}
} catch (IOException e) {
LOG.warn("Ignore, probably already closed", e);
}
this.scannerId = -1L;
}
protected long openScanner() throws IOException {
incRPCcallsMetrics();
ScanRequest request =
RequestConverter.buildScanRequest(
this.location.getRegionInfo().getRegionName(),
this.scan, 0, false);
try {
ScanResponse response = server.scan(null, request);
long id = response.getScannerId();
if (logScannerActivity) {
LOG.info("Open scanner=" + id + " for scan=" + scan.toString()
+ " on region " + this.location.toString() + " ip:"
+ this.location.getServerAddress().getBindAddress());
}
return id;
} catch (ServiceException se) {
throw ProtobufUtil.getRemoteException(se);
}
}
protected Scan getScan() {
return scan;
}
/**
* Call this when the next invocation of call should close the scanner
*/
public void setClose() {
this.closed = true;
}
/**
* @return the HRegionInfo for the current region
*/
public HRegionInfo getHRegionInfo() {
if (!instantiated) {
return null;
}
return location.getRegionInfo();
}
/**
* Get the number of rows that will be fetched on next
* @return the number of rows for caching
*/
public int getCaching() {
return caching;
}
/**
* Set the number of rows that will be fetched on next
* @param caching the number of rows for caching
*/
public void setCaching(int caching) {
this.caching = caching;
}
}
| matteobertozzi/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java | Java | apache-2.0 | 10,524 |
package com.fubaisum.okhttpsample;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.widget.TextView;
import com.fubaisum.okhttphelper.OkHttpRequest;
import com.fubaisum.okhttphelper.ThreadMode;
import com.fubaisum.okhttphelper.callback.DownloadCallback;
import com.fubaisum.okhttphelper.params.FormParams;
import com.fubaisum.okhttphelper.params.MultipartParams;
import com.fubaisum.okhttphelper.progress.UiProgressListener;
import java.io.File;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class MainActivity extends AppCompatActivity {
private static final String testUrl = "https://raw.githubusercontent.com/fubaisum/AndroidCollections/master/testUser.json";
private TextView tvTest;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
tvTest = (TextView) findViewById(R.id.tv_test);
testSyncString();
testParseModel();
testMultipartParams();
// testDownload();
}
private void testSyncString() {
ExecutorService executorService = Executors.newSingleThreadExecutor();
executorService.execute(new Runnable() {
@Override
public void run() {
try {
String result = new OkHttpRequest.Builder()
.setUrl(testUrl)
.get()
.build()
.string();
Log.e("MainActivity", "sync string result = " + result);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
private void testParseModel() {
User user = new User();
user.name = "abc";
user.address = "unknown";
FormParams params = new FormParams();
params.put("user", User.class, user);
new OkHttpRequest.Builder()
.setUrl(testUrl)
.post(params)
.build()
.threadMode(ThreadMode.MAIN)//default
.callback(new ApiCallback<User>() {
@Override
protected void onApiSuccess(String msg, User user) {
Log.e("MainActivity", user.toString());
}
@Override
protected void onApiFailure(String message) {
Log.e("MainActivity", "onResponseFailure : " + message);
}
});
}
private void testMultipartParams() {
String json = "{\"name\":\"5555555555555555555555\"}";
User user = new User();
user.name = "abc";
user.address = "unknown";
MultipartParams params = new MultipartParams();
params.put("token", "fsfaiufy8jn2ir");
params.putJson("json", json);
params.put("user", User.class, user);
params.putJson("userJson", User.class, user);
new OkHttpRequest.Builder()
.setUrl(testUrl)
// .post(params)
.build()
.threadMode(ThreadMode.MAIN)//default
.callback(new ApiCallback<User>() {
@Override
protected void onApiSuccess(String msg, User user) {
Log.e("MainActivity", user.toString());
}
@Override
protected void onApiFailure(String message) {
Log.e("MainActivity", "onResponseFailure : " + message);
}
});
}
private void testDownload() {
File file = new File(getCacheDir().getAbsolutePath(), "tmp.db");
new OkHttpRequest.Builder()
// .setUrl("http://pic41.nipic.com/20140430/18021738_213628575106_2.jpg")
.setUrl("http://api.youqingjia.com/db/20160614_v1.db")
.setResponseProgressListener(new UiProgressListener() {
@Override
public void onUiProgress(long currentBytesCount, long totalBytesCount) {
if (totalBytesCount == -1) {
tvTest.setText("totalBytesCount is unknown.");
} else {
float progress = currentBytesCount * 1.0f / totalBytesCount * 100;
tvTest.setText("progress = " + progress);
}
}
})
.build()
.threadMode(ThreadMode.BACKGROUND)
.callback(new DownloadCallback(file) {
@Override
public void onResponseSuccess(String fileAbsolutePath) {
Log.e("MainActivity", "fileAbsolutePath = " + fileAbsolutePath);
}
@Override
public void onResponseFailure(Exception e) {
Log.e("MainActivity", "download onResponseFailure() : " + e);
}
});
}
}
| fubaisum/OkHttpHelper | app/src/main/java/com/fubaisum/okhttpsample/MainActivity.java | Java | apache-2.0 | 5,252 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.fonts;
import java.nio.CharBuffer;
import java.nio.IntBuffer;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.fop.complexscripts.fonts.GlyphDefinitionTable;
import org.apache.fop.complexscripts.fonts.GlyphPositioningTable;
import org.apache.fop.complexscripts.fonts.GlyphSubstitutionTable;
import org.apache.fop.complexscripts.fonts.Positionable;
import org.apache.fop.complexscripts.fonts.Substitutable;
import org.apache.fop.complexscripts.util.GlyphSequence;
import org.apache.fop.util.CharUtilities;
/**
* Generic MultiByte (CID) font
*/
public class MultiByteFont extends CIDFont implements Substitutable, Positionable {
/** logging instance */
private static final Log log // CSOK: ConstantNameCheck
= LogFactory.getLog(MultiByteFont.class);
private String ttcName = null;
private String encoding = "Identity-H";
private int defaultWidth = 0;
private CIDFontType cidType = CIDFontType.CIDTYPE2;
private CIDSubset subset = new CIDSubset();
/* advanced typographic support */
private GlyphDefinitionTable gdef;
private GlyphSubstitutionTable gsub;
private GlyphPositioningTable gpos;
/* dynamic private use (character) mappings */
private int numMapped;
private int numUnmapped;
private int nextPrivateUse = 0xE000;
private int firstPrivate;
private int lastPrivate;
private int firstUnmapped;
private int lastUnmapped;
/**
* Default constructor
*/
public MultiByteFont() {
subset.setupFirstGlyph();
setFontType(FontType.TYPE0);
}
/** {@inheritDoc} */
@Override
public int getDefaultWidth() {
return defaultWidth;
}
/** {@inheritDoc} */
@Override
public String getRegistry() {
return "Adobe";
}
/** {@inheritDoc} */
@Override
public String getOrdering() {
return "UCS";
}
/** {@inheritDoc} */
@Override
public int getSupplement() {
return 0;
}
/** {@inheritDoc} */
@Override
public CIDFontType getCIDType() {
return cidType;
}
/**
* Sets the CIDType.
* @param cidType The cidType to set
*/
public void setCIDType(CIDFontType cidType) {
this.cidType = cidType;
}
/** {@inheritDoc} */
@Override
public String getEmbedFontName() {
if (isEmbeddable()) {
return FontUtil.stripWhiteSpace(super.getFontName());
} else {
return super.getFontName();
}
}
/** {@inheritDoc} */
public boolean isEmbeddable() {
return !(getEmbedFileName() == null && getEmbedResourceName() == null);
}
public boolean isSubsetEmbedded() {
return true;
}
/** {@inheritDoc} */
@Override
public CIDSubset getCIDSubset() {
return this.subset;
}
/** {@inheritDoc} */
@Override
public String getEncodingName() {
return encoding;
}
/** {@inheritDoc} */
public int getWidth(int i, int size) {
if (isEmbeddable()) {
int glyphIndex = subset.getGlyphIndexForSubsetIndex(i);
return size * width[glyphIndex];
} else {
return size * width[i];
}
}
/** {@inheritDoc} */
public int[] getWidths() {
int[] arr = new int[width.length];
System.arraycopy(width, 0, arr, 0, width.length);
return arr;
}
/**
* Returns the glyph index for a Unicode character. The method returns 0 if there's no
* such glyph in the character map.
* @param c the Unicode character index
* @return the glyph index (or 0 if the glyph is not available)
*/
// [TBD] - needs optimization, i.e., change from linear search to binary search
private int findGlyphIndex(int c) {
int idx = c;
int retIdx = SingleByteEncoding.NOT_FOUND_CODE_POINT;
for (int i = 0; (i < cmap.length) && retIdx == 0; i++) {
if (cmap[i].getUnicodeStart() <= idx
&& cmap[i].getUnicodeEnd() >= idx) {
retIdx = cmap[i].getGlyphStartIndex()
+ idx
- cmap[i].getUnicodeStart();
}
}
return retIdx;
}
/**
* Add a private use mapping {PU,GI} to the existing character map.
* N.B. Does not insert in order, merely appends to end of existing map.
*/
private synchronized void addPrivateUseMapping ( int pu, int gi ) {
assert findGlyphIndex ( pu ) == SingleByteEncoding.NOT_FOUND_CODE_POINT;
CMapSegment[] oldCmap = cmap;
int cmapLength = oldCmap.length;
CMapSegment[] newCmap = new CMapSegment [ cmapLength + 1 ];
System.arraycopy ( oldCmap, 0, newCmap, 0, cmapLength );
newCmap [ cmapLength ] = new CMapSegment ( pu, pu, gi );
cmap = newCmap;
}
/**
* Given a glyph index, create a new private use mapping, augmenting the bfentries
* table. This is needed to accommodate the presence of an (output) glyph index in a
* complex script glyph substitution that does not correspond to a character in the
* font's CMAP. The creation of such private use mappings is deferred until an
* attempt is actually made to perform the reverse lookup from the glyph index. This
* is necessary in order to avoid exhausting the private use space on fonts containing
* many such non-mapped glyph indices, if these mappings had been created statically
* at font load time.
* @param gi glyph index
* @returns unicode scalar value
*/
private int createPrivateUseMapping ( int gi ) {
while ( ( nextPrivateUse < 0xF900 )
&& ( findGlyphIndex(nextPrivateUse) != SingleByteEncoding.NOT_FOUND_CODE_POINT ) ) {
nextPrivateUse++;
}
if ( nextPrivateUse < 0xF900 ) {
int pu = nextPrivateUse;
addPrivateUseMapping ( pu, gi );
if ( firstPrivate == 0 ) {
firstPrivate = pu;
}
lastPrivate = pu;
numMapped++;
if (log.isDebugEnabled()) {
log.debug ( "Create private use mapping from "
+ CharUtilities.format ( pu )
+ " to glyph index " + gi
+ " in font '" + getFullName() + "'" );
}
return pu;
} else {
if ( firstUnmapped == 0 ) {
firstUnmapped = gi;
}
lastUnmapped = gi;
numUnmapped++;
log.warn ( "Exhausted private use area: unable to map "
+ numUnmapped + " glyphs in glyph index range ["
+ firstUnmapped + "," + lastUnmapped
+ "] (inclusive) of font '" + getFullName() + "'" );
return 0;
}
}
/**
* Returns the Unicode scalar value that corresponds to the glyph index. If more than
* one correspondence exists, then the first one is returned (ordered by bfentries[]).
* @param gi glyph index
* @returns unicode scalar value
*/
// [TBD] - needs optimization, i.e., change from linear search to binary search
private int findCharacterFromGlyphIndex ( int gi, boolean augment ) {
int cc = 0;
for ( int i = 0, n = cmap.length; i < n; i++ ) {
CMapSegment segment = cmap [ i ];
int s = segment.getGlyphStartIndex();
int e = s + ( segment.getUnicodeEnd() - segment.getUnicodeStart() );
if ( ( gi >= s ) && ( gi <= e ) ) {
cc = segment.getUnicodeStart() + ( gi - s );
break;
}
}
if ( ( cc == 0 ) && augment ) {
cc = createPrivateUseMapping ( gi );
}
return cc;
}
private int findCharacterFromGlyphIndex ( int gi ) {
return findCharacterFromGlyphIndex ( gi, true );
}
/** {@inheritDoc} */
@Override
public char mapChar(char c) {
notifyMapOperation();
int glyphIndex = findGlyphIndex(c);
if (glyphIndex == SingleByteEncoding.NOT_FOUND_CODE_POINT) {
warnMissingGlyph(c);
glyphIndex = findGlyphIndex(Typeface.NOT_FOUND);
}
if (isEmbeddable()) {
glyphIndex = subset.mapSubsetChar(glyphIndex, c);
}
return (char)glyphIndex;
}
/** {@inheritDoc} */
@Override
public boolean hasChar(char c) {
return (findGlyphIndex(c) != SingleByteEncoding.NOT_FOUND_CODE_POINT);
}
/**
* Sets the defaultWidth.
* @param defaultWidth The defaultWidth to set
*/
public void setDefaultWidth(int defaultWidth) {
this.defaultWidth = defaultWidth;
}
/**
* Returns the TrueType Collection Name.
* @return the TrueType Collection Name
*/
public String getTTCName() {
return ttcName;
}
/**
* Sets the the TrueType Collection Name.
* @param ttcName the TrueType Collection Name
*/
public void setTTCName(String ttcName) {
this.ttcName = ttcName;
}
/**
* Sets the width array.
* @param wds array of widths.
*/
public void setWidthArray(int[] wds) {
this.width = wds;
}
/**
* Returns a Map of used Glyphs.
* @return Map Map of used Glyphs
*/
public Map<Integer, Integer> getUsedGlyphs() {
return subset.getSubsetGlyphs();
}
/** @return an array of the chars used */
public char[] getCharsUsed() {
if (!isEmbeddable()) {
return null;
}
return subset.getSubsetChars();
}
/**
* Establishes the glyph definition table.
* @param gdef the glyph definition table to be used by this font
*/
public void setGDEF ( GlyphDefinitionTable gdef ) {
if ( ( this.gdef == null ) || ( gdef == null ) ) {
this.gdef = gdef;
} else {
throw new IllegalStateException ( "font already associated with GDEF table" );
}
}
/**
* Obtain glyph definition table.
* @return glyph definition table or null if none is associated with font
*/
public GlyphDefinitionTable getGDEF() {
return gdef;
}
/**
* Establishes the glyph substitution table.
* @param gsub the glyph substitution table to be used by this font
*/
public void setGSUB ( GlyphSubstitutionTable gsub ) {
if ( ( this.gsub == null ) || ( gsub == null ) ) {
this.gsub = gsub;
} else {
throw new IllegalStateException ( "font already associated with GSUB table" );
}
}
/**
* Obtain glyph substitution table.
* @return glyph substitution table or null if none is associated with font
*/
public GlyphSubstitutionTable getGSUB() {
return gsub;
}
/**
* Establishes the glyph positioning table.
* @param gpos the glyph positioning table to be used by this font
*/
public void setGPOS ( GlyphPositioningTable gpos ) {
if ( ( this.gpos == null ) || ( gpos == null ) ) {
this.gpos = gpos;
} else {
throw new IllegalStateException ( "font already associated with GPOS table" );
}
}
/**
* Obtain glyph positioning table.
* @return glyph positioning table or null if none is associated with font
*/
public GlyphPositioningTable getGPOS() {
return gpos;
}
/** {@inheritDoc} */
public boolean performsSubstitution() {
return gsub != null;
}
/** {@inheritDoc} */
public CharSequence performSubstitution ( CharSequence cs, String script, String language ) {
if ( gsub != null ) {
GlyphSequence igs = mapCharsToGlyphs ( cs );
GlyphSequence ogs = gsub.substitute ( igs, script, language );
CharSequence ocs = mapGlyphsToChars ( ogs );
return ocs;
} else {
return cs;
}
}
/** {@inheritDoc} */
public CharSequence reorderCombiningMarks
( CharSequence cs, int[][] gpa, String script, String language ) {
if ( gdef != null ) {
GlyphSequence igs = mapCharsToGlyphs ( cs );
GlyphSequence ogs = gdef.reorderCombiningMarks ( igs, gpa, script, language );
CharSequence ocs = mapGlyphsToChars ( ogs );
return ocs;
} else {
return cs;
}
}
/** {@inheritDoc} */
public boolean performsPositioning() {
return gpos != null;
}
/** {@inheritDoc} */
public int[][]
performPositioning ( CharSequence cs, String script, String language, int fontSize ) {
if ( gpos != null ) {
GlyphSequence gs = mapCharsToGlyphs ( cs );
int[][] adjustments = new int [ gs.getGlyphCount() ] [ 4 ];
if ( gpos.position ( gs, script, language, fontSize, this.width, adjustments ) ) {
return scaleAdjustments ( adjustments, fontSize );
} else {
return null;
}
} else {
return null;
}
}
/** {@inheritDoc} */
public int[][] performPositioning ( CharSequence cs, String script, String language ) {
throw new UnsupportedOperationException();
}
private int[][] scaleAdjustments ( int[][] adjustments, int fontSize ) {
if ( adjustments != null ) {
for ( int i = 0, n = adjustments.length; i < n; i++ ) {
int[] gpa = adjustments [ i ];
for ( int k = 0; k < 4; k++ ) {
gpa [ k ] = ( gpa [ k ] * fontSize ) / 1000;
}
}
return adjustments;
} else {
return null;
}
}
/**
* Map sequence CS, comprising a sequence of UTF-16 encoded Unicode Code Points, to
* an output character sequence GS, comprising a sequence of Glyph Indices. N.B. Unlike
* mapChar(), this method does not make use of embedded subset encodings.
* @param cs a CharSequence containing UTF-16 encoded Unicode characters
* @returns a CharSequence containing glyph indices
*/
private GlyphSequence mapCharsToGlyphs ( CharSequence cs ) {
IntBuffer cb = IntBuffer.allocate ( cs.length() );
IntBuffer gb = IntBuffer.allocate ( cs.length() );
int gi;
int giMissing = findGlyphIndex ( Typeface.NOT_FOUND );
for ( int i = 0, n = cs.length(); i < n; i++ ) {
int cc = cs.charAt ( i );
if ( ( cc >= 0xD800 ) && ( cc < 0xDC00 ) ) {
if ( ( i + 1 ) < n ) {
int sh = cc;
int sl = cs.charAt ( ++i );
if ( ( sl >= 0xDC00 ) && ( sl < 0xE000 ) ) {
cc = 0x10000 + ( ( sh - 0xD800 ) << 10 ) + ( ( sl - 0xDC00 ) << 0 );
} else {
throw new IllegalArgumentException
( "ill-formed UTF-16 sequence, "
+ "contains isolated high surrogate at index " + i );
}
} else {
throw new IllegalArgumentException
( "ill-formed UTF-16 sequence, "
+ "contains isolated high surrogate at end of sequence" );
}
} else if ( ( cc >= 0xDC00 ) && ( cc < 0xE000 ) ) {
throw new IllegalArgumentException
( "ill-formed UTF-16 sequence, "
+ "contains isolated low surrogate at index " + i );
}
notifyMapOperation();
gi = findGlyphIndex ( cc );
if ( gi == SingleByteEncoding.NOT_FOUND_CODE_POINT ) {
warnMissingGlyph ( (char) cc );
gi = giMissing;
}
cb.put ( cc );
gb.put ( gi );
}
cb.flip();
gb.flip();
return new GlyphSequence ( cb, gb, null );
}
/**
* Map sequence GS, comprising a sequence of Glyph Indices, to output sequence CS,
* comprising a sequence of UTF-16 encoded Unicode Code Points.
* @param gs a GlyphSequence containing glyph indices
* @returns a CharSequence containing UTF-16 encoded Unicode characters
*/
private CharSequence mapGlyphsToChars ( GlyphSequence gs ) {
int ng = gs.getGlyphCount();
CharBuffer cb = CharBuffer.allocate ( ng );
int ccMissing = Typeface.NOT_FOUND;
for ( int i = 0, n = ng; i < n; i++ ) {
int gi = gs.getGlyph ( i );
int cc = findCharacterFromGlyphIndex ( gi );
if ( ( cc == 0 ) || ( cc > 0x10FFFF ) ) {
cc = ccMissing;
log.warn("Unable to map glyph index " + gi
+ " to Unicode scalar in font '"
+ getFullName() + "', substituting missing character '"
+ (char) cc + "'");
}
if ( cc > 0x00FFFF ) {
int sh;
int sl;
cc -= 0x10000;
sh = ( ( cc >> 10 ) & 0x3FF ) + 0xD800;
sl = ( ( cc >> 0 ) & 0x3FF ) + 0xDC00;
cb.put ( (char) sh );
cb.put ( (char) sl );
} else {
cb.put ( (char) cc );
}
}
cb.flip();
return (CharSequence) cb;
}
}
| Distrotech/fop | src/java/org/apache/fop/fonts/MultiByteFont.java | Java | apache-2.0 | 18,455 |
/*
* Copyright (c) 2016-2020 Flux Capacitor.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fluxcapacitor.common;
@FunctionalInterface
public interface Registration {
static Registration noOp() {
return () -> {};
}
void cancel();
default Registration merge(Registration otherRegistration) {
return () -> {
cancel();
otherRegistration.cancel();
};
}
}
| flux-capacitor-io/flux-capacitor-client | common/src/main/java/io/fluxcapacitor/common/Registration.java | Java | apache-2.0 | 945 |
package org.visallo.core.model.search;
import org.json.JSONArray;
import org.json.JSONObject;
import org.visallo.core.exception.VisalloException;
import java.lang.reflect.Array;
import java.util.Collection;
import java.util.Map;
import static com.google.common.base.Preconditions.checkNotNull;
public class SearchOptions {
private final Map<String, Object> parameters;
private final String workspaceId;
public SearchOptions(Map<String, Object> parameters, String workspaceId) {
this.parameters = parameters;
this.workspaceId = workspaceId;
}
public String getWorkspaceId() {
return workspaceId;
}
public <T> T getOptionalParameter(String parameterName, Class<T> resultType) {
Object obj = parameters.get(parameterName);
if (obj == null) {
return null;
}
try {
if (resultType.isArray() && obj instanceof Collection) {
Collection collection = (Collection) obj;
Class type = resultType.getComponentType();
return (T) collection.toArray((Object[]) Array.newInstance(type, collection.size()));
} else if (resultType.isArray() && !obj.getClass().isArray()) {
Object[] array = (Object[]) Array.newInstance(resultType.getComponentType(), 1);
array[0] = objectToType(obj, resultType.getComponentType());
return objectToType(array, resultType);
}
return objectToType(obj, resultType);
} catch (Exception ex) {
throw new VisalloException("Could not cast object \"" + obj + "\" to type \"" + resultType.getName() + "\"", ex);
}
}
private <T> T objectToType(Object obj, Class<T> resultType) {
if (obj != null && resultType == obj.getClass()) {
//noinspection unchecked
return (T) obj;
}
if (resultType == Integer.class && obj instanceof String) {
return resultType.cast(Integer.parseInt((String) obj));
}
if (resultType == Long.class && obj instanceof String) {
return resultType.cast(Long.parseLong((String) obj));
}
if (resultType == Long.class && obj instanceof Integer) {
return resultType.cast(((Integer) obj).longValue());
}
if (resultType == Double.class && obj instanceof String) {
return resultType.cast(Double.parseDouble((String) obj));
}
if (resultType == Float.class && obj instanceof String) {
return resultType.cast(Float.parseFloat((String) obj));
}
if (resultType == JSONArray.class && obj instanceof String) {
return resultType.cast(new JSONArray((String) obj));
}
if (resultType == JSONArray.class && obj instanceof String[]) {
return resultType.cast(new JSONArray(obj));
}
if (resultType == Boolean.class && obj instanceof String) {
return resultType.cast(Boolean.parseBoolean((String) obj));
}
if (resultType == String.class && obj instanceof JSONObject) {
return resultType.cast(obj.toString());
}
return resultType.cast(obj);
}
public <T> T getOptionalParameter(String parameterName, T defaultValue) {
checkNotNull(defaultValue, "defaultValue cannot be null");
T obj = (T) getOptionalParameter(parameterName, defaultValue.getClass());
if (obj == null) {
// null is a possible value, for example limit=null signifies don't limit the results. If limit is
// not specified use the defaultValue
if (parameters.containsKey(parameterName)) {
return null;
}
return defaultValue;
}
return obj;
}
public <T> T getRequiredParameter(String parameterName, Class<T> resultType) {
T obj = getOptionalParameter(parameterName, resultType);
if (obj == null) {
throw new VisalloException("Missing parameter: " + parameterName);
}
return obj;
}
}
| visallo/visallo | core/core/src/main/java/org/visallo/core/model/search/SearchOptions.java | Java | apache-2.0 | 4,104 |
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.function.tri.obj;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.ThrowableDoubleConsumer;
import at.gridtec.lambda4j.consumer.tri.obj.ThrowableBiObjIntConsumer;
import at.gridtec.lambda4j.core.exception.ThrownByFunctionalInterfaceException;
import at.gridtec.lambda4j.core.util.ThrowableUtils;
import at.gridtec.lambda4j.function.ThrowableBooleanFunction;
import at.gridtec.lambda4j.function.ThrowableByteFunction;
import at.gridtec.lambda4j.function.ThrowableCharFunction;
import at.gridtec.lambda4j.function.ThrowableDoubleFunction;
import at.gridtec.lambda4j.function.ThrowableFloatFunction;
import at.gridtec.lambda4j.function.ThrowableFunction;
import at.gridtec.lambda4j.function.ThrowableIntFunction;
import at.gridtec.lambda4j.function.ThrowableLongFunction;
import at.gridtec.lambda4j.function.ThrowableShortFunction;
import at.gridtec.lambda4j.function.bi.obj.ThrowableObjIntToDoubleFunction;
import at.gridtec.lambda4j.function.bi.to.ThrowableToDoubleBiFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableBooleanToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableByteToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableCharToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableDoubleToByteFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableDoubleToCharFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableDoubleToFloatFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableDoubleToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableDoubleToLongFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableDoubleToShortFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableFloatToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableIntToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableLongToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableShortToIntFunction;
import at.gridtec.lambda4j.function.to.ThrowableToDoubleFunction;
import at.gridtec.lambda4j.function.to.ThrowableToIntFunction;
import at.gridtec.lambda4j.function.tri.ThrowableTriFunction;
import at.gridtec.lambda4j.function.tri.conversion.ThrowableTriBooleanToDoubleFunction;
import at.gridtec.lambda4j.function.tri.conversion.ThrowableTriByteToDoubleFunction;
import at.gridtec.lambda4j.function.tri.conversion.ThrowableTriCharToDoubleFunction;
import at.gridtec.lambda4j.function.tri.conversion.ThrowableTriFloatToDoubleFunction;
import at.gridtec.lambda4j.function.tri.conversion.ThrowableTriIntToDoubleFunction;
import at.gridtec.lambda4j.function.tri.conversion.ThrowableTriLongToDoubleFunction;
import at.gridtec.lambda4j.function.tri.conversion.ThrowableTriShortToDoubleFunction;
import at.gridtec.lambda4j.function.tri.to.ThrowableToDoubleTriFunction;
import at.gridtec.lambda4j.operator.ternary.ThrowableDoubleTernaryOperator;
import at.gridtec.lambda4j.operator.unary.ThrowableDoubleUnaryOperator;
import at.gridtec.lambda4j.operator.unary.ThrowableIntUnaryOperator;
import at.gridtec.lambda4j.predicate.ThrowableDoublePredicate;
import at.gridtec.lambda4j.predicate.tri.obj.ThrowableBiObjIntPredicate;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
/**
* Represents an operation that accepts two object-valued and one {@code int}-valued input argument and produces a
* {@code double}-valued result which is able to throw any {@link Throwable}.
* This is a (reference, reference, int) specialization of {@link ThrowableTriFunction}.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #applyAsDoubleThrows(Object, Object, int)}.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @see ThrowableTriFunction
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface ThrowableBiObjIntToDoubleFunction<T, U, X extends Throwable> extends Lambda {
/**
* Constructs a {@link ThrowableBiObjIntToDoubleFunction} based on a lambda expression or a method reference.
* Thereby the given lambda expression or method reference is returned on an as-is basis to implicitly transform it
* to the desired type. With this method, it is possible to ensure that correct type is used from lambda expression
* or method reference.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code ThrowableBiObjIntToDoubleFunction} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static <T, U, X extends Throwable> ThrowableBiObjIntToDoubleFunction<T, U, X> of(
@Nullable final ThrowableBiObjIntToDoubleFunction<T, U, X> expression) {
return expression;
}
/**
* Calls the given {@link ThrowableBiObjIntToDoubleFunction} with the given arguments and returns its result.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param function The function to be called
* @param t The first argument to the function
* @param u The second argument to the function
* @param value The third argument to the function
* @return The result from the given {@code ThrowableBiObjIntToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
* @throws X Any throwable from this functions action
*/
static <T, U, X extends Throwable> double call(
@Nonnull final ThrowableBiObjIntToDoubleFunction<? super T, ? super U, ? extends X> function, T t, U u,
int value) throws X {
Objects.requireNonNull(function);
return function.applyAsDoubleThrows(t, u, value);
}
/**
* Creates a {@link ThrowableBiObjIntToDoubleFunction} which uses the {@code first} parameter of this one as
* argument for the given {@link ThrowableToDoubleFunction}.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param function The function which accepts the {@code first} parameter of this one
* @return Creates a {@code ThrowableBiObjIntToDoubleFunction} which uses the {@code first} parameter of this one as
* argument for the given {@code ThrowableToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T, U, X extends Throwable> ThrowableBiObjIntToDoubleFunction<T, U, X> onlyFirst(
@Nonnull final ThrowableToDoubleFunction<? super T, ? extends X> function) {
Objects.requireNonNull(function);
return (t, u, value) -> function.applyAsDoubleThrows(t);
}
/**
* Creates a {@link ThrowableBiObjIntToDoubleFunction} which uses the {@code second} parameter of this one as
* argument for the given {@link ThrowableToDoubleFunction}.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param function The function which accepts the {@code second} parameter of this one
* @return Creates a {@code ThrowableBiObjIntToDoubleFunction} which uses the {@code second} parameter of this one
* as argument for the given {@code ThrowableToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T, U, X extends Throwable> ThrowableBiObjIntToDoubleFunction<T, U, X> onlySecond(
@Nonnull final ThrowableToDoubleFunction<? super U, ? extends X> function) {
Objects.requireNonNull(function);
return (t, u, value) -> function.applyAsDoubleThrows(u);
}
/**
* Creates a {@link ThrowableBiObjIntToDoubleFunction} which uses the {@code third} parameter of this one as
* argument for the given {@link ThrowableIntToDoubleFunction}.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param function The function which accepts the {@code third} parameter of this one
* @return Creates a {@code ThrowableBiObjIntToDoubleFunction} which uses the {@code third} parameter of this one as
* argument for the given {@code ThrowableIntToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T, U, X extends Throwable> ThrowableBiObjIntToDoubleFunction<T, U, X> onlyThird(
@Nonnull final ThrowableIntToDoubleFunction<? extends X> function) {
Objects.requireNonNull(function);
return (t, u, value) -> function.applyAsDoubleThrows(value);
}
/**
* Creates a {@link ThrowableBiObjIntToDoubleFunction} which always returns a given value.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param ret The return value for the constant
* @return A {@code ThrowableBiObjIntToDoubleFunction} which always returns a given value.
*/
@Nonnull
static <T, U, X extends Throwable> ThrowableBiObjIntToDoubleFunction<T, U, X> constant(double ret) {
return (t, u, value) -> ret;
}
/**
* Applies this function to the given arguments.
*
* @param t The first argument to the function
* @param u The second argument to the function
* @param value The third argument to the function
* @return The return value from the function, which is its result.
* @throws X Any throwable from this functions action
*/
double applyAsDoubleThrows(T t, U u, int value) throws X;
/**
* Applies this function to the given tuple.
*
* @param tuple The tuple to be applied to the function
* @param value The primitive value to be applied to the function
* @return The return value from the function, which is its result.
* @throws NullPointerException If given argument is {@code null}
* @throws X Any throwable from this functions action
* @see org.apache.commons.lang3.tuple.Pair
*/
default double applyAsDoubleThrows(@Nonnull Pair<T, U> tuple, int value) throws X {
Objects.requireNonNull(tuple);
return applyAsDoubleThrows(tuple.getLeft(), tuple.getRight(), value);
}
/**
* Applies this function partially to some arguments of this one, producing a {@link
* ThrowableObjIntToDoubleFunction} as result.
*
* @param t The first argument to this function used to partially apply this function
* @return A {@code ThrowableObjIntToDoubleFunction} that represents this function partially applied the some
* arguments.
*/
@Nonnull
default ThrowableObjIntToDoubleFunction<U, X> papplyAsDoubleThrows(T t) {
return (u, value) -> this.applyAsDoubleThrows(t, u, value);
}
/**
* Applies this function partially to some arguments of this one, producing a {@link ThrowableIntToDoubleFunction}
* as result.
*
* @param t The first argument to this function used to partially apply this function
* @param u The second argument to this function used to partially apply this function
* @return A {@code ThrowableIntToDoubleFunction} that represents this function partially applied the some
* arguments.
*/
@Nonnull
default ThrowableIntToDoubleFunction<X> papplyAsDoubleThrows(T t, U u) {
return (value) -> this.applyAsDoubleThrows(t, u, value);
}
/**
* Applies this function partially to some arguments of this one, producing a {@link ThrowableToDoubleBiFunction} as
* result.
*
* @param value The third argument to this function used to partially apply this function
* @return A {@code ThrowableToDoubleBiFunction} that represents this function partially applied the some arguments.
*/
@Nonnull
default ThrowableToDoubleBiFunction<T, U, X> papplyAsDoubleThrows(int value) {
return (t, u) -> this.applyAsDoubleThrows(t, u, value);
}
/**
* Applies this function partially to some arguments of this one, producing a {@link ThrowableToDoubleFunction} as
* result.
*
* @param t The first argument to this function used to partially apply this function
* @param value The third argument to this function used to partially apply this function
* @return A {@code ThrowableToDoubleFunction} that represents this function partially applied the some arguments.
*/
@Nonnull
default ThrowableToDoubleFunction<U, X> papplyAsDoubleThrows(T t, int value) {
return (u) -> this.applyAsDoubleThrows(t, u, value);
}
/**
* Returns the number of arguments for this function.
*
* @return The number of arguments for this function.
* @implSpec The default implementation always returns {@code 3}.
*/
@Nonnegative
default int arity() {
return 3;
}
/**
* Returns a composed {@link ThrowableToDoubleTriFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result.
*
* @param <A> The type of the argument to the first given function, and of composed function
* @param <B> The type of the argument to the second given function, and of composed function
* @param <C> The type of the argument to the third given function, and of composed function
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ThrowableToDoubleTriFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A, B, C> ThrowableToDoubleTriFunction<A, B, C, X> compose(
@Nonnull final ThrowableFunction<? super A, ? extends T, ? extends X> before1,
@Nonnull final ThrowableFunction<? super B, ? extends U, ? extends X> before2,
@Nonnull final ThrowableToIntFunction<? super C, ? extends X> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (a, b, c) -> applyAsDoubleThrows(before1.applyThrows(a), before2.applyThrows(b),
before3.applyAsIntThrows(c));
}
/**
* Returns a composed {@link ThrowableTriBooleanToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result. This method is just convenience, to provide the ability
* to execute an operation which accepts {@code boolean} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ThrowableTriBooleanToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default ThrowableTriBooleanToDoubleFunction<X> composeFromBoolean(
@Nonnull final ThrowableBooleanFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableBooleanFunction<? extends U, ? extends X> before2,
@Nonnull final ThrowableBooleanToIntFunction<? extends X> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsDoubleThrows(before1.applyThrows(value1), before2.applyThrows(value2),
before3.applyAsIntThrows(value3));
}
/**
* Returns a composed {@link ThrowableTriByteToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ThrowableTriByteToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default ThrowableTriByteToDoubleFunction<X> composeFromByte(
@Nonnull final ThrowableByteFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableByteFunction<? extends U, ? extends X> before2,
@Nonnull final ThrowableByteToIntFunction<? extends X> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsDoubleThrows(before1.applyThrows(value1), before2.applyThrows(value2),
before3.applyAsIntThrows(value3));
}
/**
* Returns a composed {@link ThrowableTriCharToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ThrowableTriCharToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default ThrowableTriCharToDoubleFunction<X> composeFromChar(
@Nonnull final ThrowableCharFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableCharFunction<? extends U, ? extends X> before2,
@Nonnull final ThrowableCharToIntFunction<? extends X> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsDoubleThrows(before1.applyThrows(value1), before2.applyThrows(value2),
before3.applyAsIntThrows(value3));
}
/**
* Returns a composed {@link ThrowableDoubleTernaryOperator} that first applies the {@code before} functions to its
* input, and then applies this function to the result. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code double} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ThrowableDoubleTernaryOperator} that first applies the {@code before} functions to its
* input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default ThrowableDoubleTernaryOperator<X> composeFromDouble(
@Nonnull final ThrowableDoubleFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableDoubleFunction<? extends U, ? extends X> before2,
@Nonnull final ThrowableDoubleToIntFunction<? extends X> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsDoubleThrows(before1.applyThrows(value1), before2.applyThrows(value2),
before3.applyAsIntThrows(value3));
}
/**
* Returns a composed {@link ThrowableTriFloatToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result. This method is just convenience, to provide the ability
* to execute an operation which accepts {@code float} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ThrowableTriFloatToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default ThrowableTriFloatToDoubleFunction<X> composeFromFloat(
@Nonnull final ThrowableFloatFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableFloatFunction<? extends U, ? extends X> before2,
@Nonnull final ThrowableFloatToIntFunction<? extends X> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsDoubleThrows(before1.applyThrows(value1), before2.applyThrows(value2),
before3.applyAsIntThrows(value3));
}
/**
* Returns a composed {@link ThrowableTriIntToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third operator to apply before this function is applied
* @return A composed {@code ThrowableTriIntToDoubleFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default ThrowableTriIntToDoubleFunction<X> composeFromInt(
@Nonnull final ThrowableIntFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableIntFunction<? extends U, ? extends X> before2,
@Nonnull final ThrowableIntUnaryOperator<? extends X> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsDoubleThrows(before1.applyThrows(value1), before2.applyThrows(value2),
before3.applyAsIntThrows(value3));
}
/**
* Returns a composed {@link ThrowableTriLongToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ThrowableTriLongToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default ThrowableTriLongToDoubleFunction<X> composeFromLong(
@Nonnull final ThrowableLongFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableLongFunction<? extends U, ? extends X> before2,
@Nonnull final ThrowableLongToIntFunction<? extends X> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsDoubleThrows(before1.applyThrows(value1), before2.applyThrows(value2),
before3.applyAsIntThrows(value3));
}
/**
* Returns a composed {@link ThrowableTriShortToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result. This method is just convenience, to provide the ability
* to execute an operation which accepts {@code short} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @param before3 The third function to apply before this function is applied
* @return A composed {@code ThrowableTriShortToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ThrowableTriShortToDoubleFunction<X> composeFromShort(
@Nonnull final ThrowableShortFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableShortFunction<? extends U, ? extends X> before2,
@Nonnull final ThrowableShortToIntFunction<? extends X> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> applyAsDoubleThrows(before1.applyThrows(value1), before2.applyThrows(value2),
before3.applyAsIntThrows(value3));
}
/**
* Returns a composed {@link ThrowableBiObjIntFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
*
* @param <S> The type of return value from the {@code after} function, and of the composed function
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableBiObjIntFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to return every type.
*/
@Nonnull
default <S> ThrowableBiObjIntFunction<T, U, S, X> andThen(
@Nonnull final ThrowableDoubleFunction<? extends S, ? extends X> after) {
Objects.requireNonNull(after);
return (t, u, value) -> after.applyThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a composed {@link ThrowableBiObjIntPredicate} that first applies this function to its input, and then
* applies the {@code after} predicate to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code boolean}.
*
* @param after The predicate to apply after this function is applied
* @return A composed {@code ThrowableBiObjIntPredicate} that first applies this function to its input, and then
* applies the {@code after} predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default ThrowableBiObjIntPredicate<T, U, X> andThenToBoolean(
@Nonnull final ThrowableDoublePredicate<? extends X> after) {
Objects.requireNonNull(after);
return (t, u, value) -> after.testThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a composed {@link ThrowableBiObjIntToByteFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code byte}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableBiObjIntToByteFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default ThrowableBiObjIntToByteFunction<T, U, X> andThenToByte(
@Nonnull final ThrowableDoubleToByteFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u, value) -> after.applyAsByteThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a composed {@link ThrowableBiObjIntToCharFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code char}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableBiObjIntToCharFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default ThrowableBiObjIntToCharFunction<T, U, X> andThenToChar(
@Nonnull final ThrowableDoubleToCharFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u, value) -> after.applyAsCharThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a composed {@link ThrowableBiObjIntToDoubleFunction} that first applies this function to its input, and
* then applies the {@code after} operator to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code double}.
*
* @param after The operator to apply after this function is applied
* @return A composed {@code ThrowableBiObjIntToDoubleFunction} that first applies this function to its input, and
* then applies the {@code after} operator to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default ThrowableBiObjIntToDoubleFunction<T, U, X> andThenToDouble(
@Nonnull final ThrowableDoubleUnaryOperator<? extends X> after) {
Objects.requireNonNull(after);
return (t, u, value) -> after.applyAsDoubleThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a composed {@link ThrowableBiObjIntToFloatFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code float}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableBiObjIntToFloatFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default ThrowableBiObjIntToFloatFunction<T, U, X> andThenToFloat(
@Nonnull final ThrowableDoubleToFloatFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u, value) -> after.applyAsFloatThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a composed {@link ThrowableBiObjIntToIntFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code int}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableBiObjIntToIntFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default ThrowableBiObjIntToIntFunction<T, U, X> andThenToInt(
@Nonnull final ThrowableDoubleToIntFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u, value) -> after.applyAsIntThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a composed {@link ThrowableBiObjIntToLongFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code long}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableBiObjIntToLongFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default ThrowableBiObjIntToLongFunction<T, U, X> andThenToLong(
@Nonnull final ThrowableDoubleToLongFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u, value) -> after.applyAsLongThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a composed {@link ThrowableBiObjIntToShortFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code short}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableBiObjIntToShortFunction} that first applies this function to its input, and
* then applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ThrowableBiObjIntToShortFunction<T, U, X> andThenToShort(
@Nonnull final ThrowableDoubleToShortFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u, value) -> after.applyAsShortThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a composed {@link ThrowableBiObjIntConsumer} that fist applies this function to its input, and then
* consumes the result using the given {@link ThrowableDoubleConsumer}.
*
* @param consumer The operation which consumes the result from this operation
* @return A composed {@code ThrowableBiObjIntConsumer} that first applies this function to its input, and then
* consumes the result using the given {@code ThrowableDoubleConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default ThrowableBiObjIntConsumer<T, U, X> consume(@Nonnull final ThrowableDoubleConsumer<? extends X> consumer) {
Objects.requireNonNull(consumer);
return (t, u, value) -> consumer.acceptThrows(applyAsDoubleThrows(t, u, value));
}
/**
* Returns a tupled version of this function.
*
* @return A tupled version of this function.
*/
@Nonnull
default ThrowableObjIntToDoubleFunction<Pair<T, U>, X> tupled() {
return this::applyAsDoubleThrows;
}
/**
* Returns a memoized (caching) version of this {@link ThrowableBiObjIntToDoubleFunction}. Whenever it is called,
* the mapping between the input parameters and the return value is preserved in a cache, making subsequent calls
* returning the memoized value instead of computing the return value again.
* <p>
* Unless the function and therefore the used cache will be garbage-collected, it will keep all memoized values
* forever.
*
* @return A memoized (caching) version of this {@code ThrowableBiObjIntToDoubleFunction}.
* @implSpec This implementation does not allow the input parameters or return value to be {@code null} for the
* resulting memoized function, as the cache used internally does not permit {@code null} keys or values.
* @implNote The returned memoized function can be safely used concurrently from multiple threads which makes it
* thread-safe.
*/
@Nonnull
default ThrowableBiObjIntToDoubleFunction<T, U, X> memoized() {
if (isMemoized()) {
return this;
} else {
final Map<Triple<T, U, Integer>, Double> cache = new ConcurrentHashMap<>();
final Object lock = new Object();
return (ThrowableBiObjIntToDoubleFunction<T, U, X> & Memoized) (t, u, value) -> {
final double returnValue;
synchronized (lock) {
returnValue = cache.computeIfAbsent(Triple.of(t, u, value), ThrowableFunction.of(
key -> applyAsDoubleThrows(key.getLeft(), key.getMiddle(), key.getRight())));
}
return returnValue;
};
}
}
/**
* Returns a composed {@link ThrowableTriFunction} which represents this {@link ThrowableBiObjIntToDoubleFunction}.
* Thereby the primitive input argument for this function is autoboxed. This method provides the possibility to use
* this {@code ThrowableBiObjIntToDoubleFunction} with methods provided by the {@code JDK}.
*
* @return A composed {@code ThrowableTriFunction} which represents this {@code ThrowableBiObjIntToDoubleFunction}.
*/
@Nonnull
default ThrowableTriFunction<T, U, Integer, Double, X> boxed() {
return this::applyAsDoubleThrows;
}
/**
* Returns a composed {@link BiObjIntToDoubleFunction} that applies this function to its input and nests the thrown
* {@link Throwable} from it. The {@code Throwable} is nested (wrapped) in a {@link
* ThrownByFunctionalInterfaceException}, which is constructed from the thrown {@code Throwable}s message and the
* thrown {@code Throwable} itself.
*
* @return A composed {@link BiObjIntToDoubleFunction} that applies this function to its input and nests the thrown
* {@code Throwable} from it.
* @implNote If thrown {@code Throwable} is of type {@link Error} it is thrown as-is and thus not nested.
* @see #nest(Function)
* @see ThrownByFunctionalInterfaceException
*/
@Nonnull
default BiObjIntToDoubleFunction<T, U> nest() {
return nest(throwable -> new ThrownByFunctionalInterfaceException(throwable.getMessage(), throwable));
}
/**
* Returns a composed {@link BiObjIntToDoubleFunction} that applies this function to its input and nests the thrown
* {@link Throwable} from it using {@code mapper} operation. Thereby {@code mapper} may modify the thrown {@code
* Throwable}, regarding its implementation, and returns it nested (wrapped) in a {@link RuntimeException}.
*
* @param mapper The operation to map the thrown {@code Throwable} to {@code RuntimeException}
* @return A composed {@link BiObjIntToDoubleFunction} that applies this function to its input and nests the thrown
* {@code Throwable} from it using {@code mapper} operation.
* @throws NullPointerException If given argument is {@code null}
* @implNote If thrown {@code Throwable} is of type {@link Error} it is thrown as-is and thus not nested.
* @see #nest()
*/
@Nonnull
default BiObjIntToDoubleFunction<T, U> nest(
@Nonnull final Function<? super Throwable, ? extends RuntimeException> mapper) {
return recover(throwable -> {
throw mapper.apply(throwable);
});
}
/**
* Returns a composed {@link BiObjIntToDoubleFunction} that first applies this function to its input, and then
* applies the {@code recover} operation if a {@link Throwable} is thrown from this one. The {@code recover}
* operation is represented by a curried operation which is called with throwable information and same arguments of
* this function.
*
* @param recover The operation to apply if this function throws a {@code Throwable}
* @return A composed {@link BiObjIntToDoubleFunction} that first applies this function to its input, and then
* applies the {@code recover} operation if a {@code Throwable} is thrown from this one.
* @throws NullPointerException If given argument or the returned enclosing function is {@code null}
* @implSpec The implementation checks that the returned enclosing function from {@code recover} operation is not
* {@code null}. If it is, then a {@link NullPointerException} with appropriate message is thrown.
* @implNote If thrown {@code Throwable} is of type {@link Error}, it is thrown as-is and thus not passed to {@code
* recover} operation.
*/
@Nonnull
default BiObjIntToDoubleFunction<T, U> recover(
@Nonnull final Function<? super Throwable, ? extends BiObjIntToDoubleFunction<? super T, ? super U>> recover) {
Objects.requireNonNull(recover);
return (t, u, value) -> {
try {
return this.applyAsDoubleThrows(t, u, value);
} catch (Error e) {
throw e;
} catch (Throwable throwable) {
final BiObjIntToDoubleFunction<? super T, ? super U> function = recover.apply(throwable);
Objects.requireNonNull(function, () -> "recover returned null for " + throwable.getClass() + ": "
+ throwable.getMessage());
return function.applyAsDouble(t, u, value);
}
};
}
/**
* Returns a composed {@link BiObjIntToDoubleFunction} that applies this function to its input and sneakily throws
* the thrown {@link Throwable} from it, if it is not of type {@link RuntimeException} or {@link Error}. This means
* that each throwable thrown from the returned composed function behaves exactly the same as an <em>unchecked</em>
* throwable does. As a result, there is no need to handle the throwable of this function in the returned composed
* function by either wrapping it in an <em>unchecked</em> throwable or to declare it in the {@code throws} clause,
* as it would be done in a non sneaky throwing function.
* <p>
* What sneaky throwing simply does, is to fake out the compiler and thus it bypasses the principle of
* <em>checked</em> throwables. On the JVM (class file) level, all throwables, checked or not, can be thrown
* regardless of the {@code throws} clause of methods, which is why this works at all.
* <p>
* However, when using this method to get a sneaky throwing function variant of this throwable function, the
* following advantages, disadvantages and limitations will apply:
* <p>
* If the calling-code is to handle the sneakily thrown throwable, it is required to add it to the {@code throws}
* clause of the method that applies the returned composed function. The compiler will not force the declaration in
* the {@code throws} clause anymore.
* <p>
* If the calling-code already handles the sneakily thrown throwable, the compiler requires it to be added to the
* {@code throws} clause of the method that applies the returned composed function. If not added, the compiler will
* error that the caught throwable is never thrown in the corresponding {@code try} block.
* <p>
* If the returned composed function is directly surrounded by a {@code try}-{@code catch} block to catch the
* sneakily thrown throwable from it, the compiler will error that the caught throwable is never thrown in the
* corresponding {@code try} block.
* <p>
* In any case, if the throwable is not added to the to the {@code throws} clause of the method that applies the
* returned composed function, the calling-code won't be able to catch the throwable by name. It will bubble and
* probably be caught in some {@code catch} statement, catching a base type such as {@code try { ... }
* catch(RuntimeException e) { ... }} or {@code try { ... } catch(Exception e) { ... }}, but perhaps this is
* intended.
* <p>
* When the called code never throws the specific throwable that it declares, it should obviously be omitted. For
* example: {@code new String(byteArr, "UTF-8") throws UnsupportedEncodingException}, but {@code UTF-8} is
* guaranteed by the Java specification to be always present. Here, the {@code throws} declaration is a nuisance and
* any solution to silence it with minimal boilerplate is welcome. The throwable should therefore be omitted in the
* {@code throws} clause of the method that applies the returned composed function.
* <p>
* With all that mentioned, the following example will demonstrate this methods correct use:
* <pre>{@code
* // when called with illegal value ClassNotFoundException is thrown
* public Class<?> sneakyThrowingFunctionalInterface(final String className) throws ClassNotFoundException {
* return ThrowableFunction.of(Class::forName) // create the correct throwable functional interface
* .sneakyThrow() // create a non-throwable variant which is able to sneaky throw (this method)
* .apply(className); // apply non-throwable variant -> may sneaky throw a throwable
* }
*
* // call the the method which surround the sneaky throwing functional interface
* public void callingMethod() {
* try {
* final Class<?> clazz = sneakyThrowingFunctionalInterface("some illegal class name");
* // ... do something with clazz ...
* } catch(ClassNotFoundException e) {
* // ... do something with e ...
* }
* }
* }</pre>
* In conclusion, this somewhat contentious ability should be used carefully, of course, with the advantages,
* disadvantages and limitations described above kept in mind.
*
* @return A composed {@link BiObjIntToDoubleFunction} that applies this function to its input and sneakily throws
* the thrown {@link Throwable} from it, unless it is of type {@link RuntimeException} or {@link Error}.
* @implNote If thrown {@link Throwable} is of type {@link RuntimeException} or {@link Error}, it is thrown as-is
* and thus not sneakily thrown.
*/
@Nonnull
default BiObjIntToDoubleFunction<T, U> sneakyThrow() {
return (t, u, value) -> {
try {
return this.applyAsDoubleThrows(t, u, value);
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable throwable) {
throw ThrowableUtils.sneakyThrow(throwable);
}
};
}
} | Gridtec/lambda4j | lambda4j/src-gen/main/java/at/gridtec/lambda4j/function/tri/obj/ThrowableBiObjIntToDoubleFunction.java | Java | apache-2.0 | 52,515 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.directory.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Contains the results of the <a>GetSnapshotLimits</a> operation.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ds-2015-04-16/GetSnapshotLimits" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetSnapshotLimitsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A <a>SnapshotLimits</a> object that contains the manual snapshot limits for the specified directory.
* </p>
*/
private SnapshotLimits snapshotLimits;
/**
* <p>
* A <a>SnapshotLimits</a> object that contains the manual snapshot limits for the specified directory.
* </p>
*
* @param snapshotLimits
* A <a>SnapshotLimits</a> object that contains the manual snapshot limits for the specified directory.
*/
public void setSnapshotLimits(SnapshotLimits snapshotLimits) {
this.snapshotLimits = snapshotLimits;
}
/**
* <p>
* A <a>SnapshotLimits</a> object that contains the manual snapshot limits for the specified directory.
* </p>
*
* @return A <a>SnapshotLimits</a> object that contains the manual snapshot limits for the specified directory.
*/
public SnapshotLimits getSnapshotLimits() {
return this.snapshotLimits;
}
/**
* <p>
* A <a>SnapshotLimits</a> object that contains the manual snapshot limits for the specified directory.
* </p>
*
* @param snapshotLimits
* A <a>SnapshotLimits</a> object that contains the manual snapshot limits for the specified directory.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetSnapshotLimitsResult withSnapshotLimits(SnapshotLimits snapshotLimits) {
setSnapshotLimits(snapshotLimits);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSnapshotLimits() != null)
sb.append("SnapshotLimits: ").append(getSnapshotLimits());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetSnapshotLimitsResult == false)
return false;
GetSnapshotLimitsResult other = (GetSnapshotLimitsResult) obj;
if (other.getSnapshotLimits() == null ^ this.getSnapshotLimits() == null)
return false;
if (other.getSnapshotLimits() != null && other.getSnapshotLimits().equals(this.getSnapshotLimits()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSnapshotLimits() == null) ? 0 : getSnapshotLimits().hashCode());
return hashCode;
}
@Override
public GetSnapshotLimitsResult clone() {
try {
return (GetSnapshotLimitsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| dagnir/aws-sdk-java | aws-java-sdk-directory/src/main/java/com/amazonaws/services/directory/model/GetSnapshotLimitsResult.java | Java | apache-2.0 | 4,338 |
/*
* Copyright (C) 2014-2022 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.commons.datetime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.ResolverStyle;
import java.util.Locale;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.ThreadSafe;
import com.helger.commons.ValueEnforcer;
import com.helger.commons.annotation.DevelopersNote;
import com.helger.commons.annotation.Nonempty;
import com.helger.commons.hashcode.HashCodeGenerator;
import com.helger.commons.hashcode.IHashCodeGenerator;
import com.helger.commons.string.ToStringGenerator;
/**
* This class encapsulates a String pattern and a set of options to be used in
* parsing.
*
* @author Philip Helger
*/
@ThreadSafe
@DevelopersNote ("The mutable m_nHashCode does not contradict thread safety")
public final class DateTimeFormatterPattern
{
private final String m_sPattern;
private final ResolverStyle m_eResolverStyle;
private final DateTimeFormatter m_aFormatter;
// Status vars
private int m_nHashCode = IHashCodeGenerator.ILLEGAL_HASHCODE;
public DateTimeFormatterPattern (@Nonnull @Nonempty final String sPattern, @Nonnull final ResolverStyle eResolverStyle)
{
ValueEnforcer.notEmpty (sPattern, "RegEx");
ValueEnforcer.notNull (eResolverStyle, "ResolverStyle");
m_sPattern = sPattern;
m_eResolverStyle = eResolverStyle;
m_aFormatter = new DateTimeFormatterBuilder ().appendPattern (sPattern)
.toFormatter (Locale.getDefault (Locale.Category.FORMAT))
.withResolverStyle (m_eResolverStyle);
}
/**
* @return The source pattern string. Neither <code>null</code> nor empty.
*/
@Nonnull
@Nonempty
public String getPattern ()
{
return m_sPattern;
}
/**
* @return The resolver style as passed in the constructor. Never
* <code>null</code>.
*/
@Nonnull
public ResolverStyle getResolverStyle ()
{
return m_eResolverStyle;
}
/**
* @return The precompiled formatter. Never <code>null</code>.
*/
@Nonnull
public DateTimeFormatter getAsFormatter ()
{
return m_aFormatter;
}
@Override
public boolean equals (final Object o)
{
if (o == this)
return true;
if (o == null || !getClass ().equals (o.getClass ()))
return false;
final DateTimeFormatterPattern rhs = (DateTimeFormatterPattern) o;
// m_aFormatter is a state variable
return m_sPattern.equals (rhs.m_sPattern) && m_eResolverStyle.equals (rhs.m_eResolverStyle);
}
@Override
public int hashCode ()
{
int ret = m_nHashCode;
if (ret == IHashCodeGenerator.ILLEGAL_HASHCODE)
ret = m_nHashCode = new HashCodeGenerator (this).append (m_sPattern).append (m_eResolverStyle).getHashCode ();
return ret;
}
@Override
public String toString ()
{
return new ToStringGenerator (this).append ("Pattern", m_sPattern).append ("ResolverStyle", m_eResolverStyle).getToString ();
}
}
| phax/ph-commons | ph-commons/src/main/java/com/helger/commons/datetime/DateTimeFormatterPattern.java | Java | apache-2.0 | 3,661 |
/* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.file.remote;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.component.file.remote.FtpEndpointConfigurer;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class FtpsEndpointConfigurer extends FtpEndpointConfigurer implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("host", java.lang.String.class);
map.put("port", int.class);
map.put("directoryName", java.lang.String.class);
map.put("binary", boolean.class);
map.put("charset", java.lang.String.class);
map.put("disconnect", boolean.class);
map.put("doneFileName", java.lang.String.class);
map.put("fileName", java.lang.String.class);
map.put("passiveMode", boolean.class);
map.put("separator", org.apache.camel.component.file.remote.RemoteFileConfiguration.PathSeparator.class);
map.put("transferLoggingIntervalSeconds", int.class);
map.put("transferLoggingLevel", org.apache.camel.LoggingLevel.class);
map.put("transferLoggingVerbose", boolean.class);
map.put("fastExistsCheck", boolean.class);
map.put("bridgeErrorHandler", boolean.class);
map.put("delete", boolean.class);
map.put("moveFailed", java.lang.String.class);
map.put("noop", boolean.class);
map.put("preMove", java.lang.String.class);
map.put("preSort", boolean.class);
map.put("recursive", boolean.class);
map.put("resumeDownload", boolean.class);
map.put("sendEmptyMessageWhenIdle", boolean.class);
map.put("streamDownload", boolean.class);
map.put("download", boolean.class);
map.put("exceptionHandler", org.apache.camel.spi.ExceptionHandler.class);
map.put("exchangePattern", org.apache.camel.ExchangePattern.class);
map.put("handleDirectoryParserAbsoluteResult", boolean.class);
map.put("ignoreFileNotFoundOrPermissionError", boolean.class);
map.put("inProgressRepository", org.apache.camel.spi.IdempotentRepository.class);
map.put("localWorkDirectory", java.lang.String.class);
map.put("onCompletionExceptionHandler", org.apache.camel.spi.ExceptionHandler.class);
map.put("pollStrategy", org.apache.camel.spi.PollingConsumerPollStrategy.class);
map.put("processStrategy", org.apache.camel.component.file.GenericFileProcessStrategy.class);
map.put("useList", boolean.class);
map.put("fileExist", org.apache.camel.component.file.GenericFileExist.class);
map.put("flatten", boolean.class);
map.put("jailStartingDirectory", boolean.class);
map.put("lazyStartProducer", boolean.class);
map.put("moveExisting", java.lang.String.class);
map.put("tempFileName", java.lang.String.class);
map.put("tempPrefix", java.lang.String.class);
map.put("allowNullBody", boolean.class);
map.put("chmod", java.lang.String.class);
map.put("disconnectOnBatchComplete", boolean.class);
map.put("eagerDeleteTargetFile", boolean.class);
map.put("keepLastModified", boolean.class);
map.put("moveExistingFileStrategy", org.apache.camel.component.file.strategy.FileMoveExistingStrategy.class);
map.put("sendNoop", boolean.class);
map.put("activePortRange", java.lang.String.class);
map.put("autoCreate", boolean.class);
map.put("basicPropertyBinding", boolean.class);
map.put("bufferSize", int.class);
map.put("connectTimeout", int.class);
map.put("ftpClient", org.apache.commons.net.ftp.FTPClient.class);
map.put("ftpClientConfig", org.apache.commons.net.ftp.FTPClientConfig.class);
map.put("ftpClientConfigParameters", java.util.Map.class);
map.put("ftpClientParameters", java.util.Map.class);
map.put("maximumReconnectAttempts", int.class);
map.put("reconnectDelay", long.class);
map.put("siteCommand", java.lang.String.class);
map.put("soTimeout", int.class);
map.put("stepwise", boolean.class);
map.put("synchronous", boolean.class);
map.put("throwExceptionOnConnectFailed", boolean.class);
map.put("timeout", int.class);
map.put("antExclude", java.lang.String.class);
map.put("antFilterCaseSensitive", boolean.class);
map.put("antInclude", java.lang.String.class);
map.put("eagerMaxMessagesPerPoll", boolean.class);
map.put("exclude", java.lang.String.class);
map.put("filter", org.apache.camel.component.file.GenericFileFilter.class);
map.put("filterDirectory", java.lang.String.class);
map.put("filterFile", java.lang.String.class);
map.put("idempotent", java.lang.Boolean.class);
map.put("idempotentKey", java.lang.String.class);
map.put("idempotentRepository", org.apache.camel.spi.IdempotentRepository.class);
map.put("include", java.lang.String.class);
map.put("maxDepth", int.class);
map.put("maxMessagesPerPoll", int.class);
map.put("minDepth", int.class);
map.put("move", java.lang.String.class);
map.put("exclusiveReadLockStrategy", org.apache.camel.component.file.GenericFileExclusiveReadLockStrategy.class);
map.put("readLock", java.lang.String.class);
map.put("readLockCheckInterval", long.class);
map.put("readLockDeleteOrphanLockFiles", boolean.class);
map.put("readLockLoggingLevel", org.apache.camel.LoggingLevel.class);
map.put("readLockMarkerFile", boolean.class);
map.put("readLockMinAge", long.class);
map.put("readLockMinLength", long.class);
map.put("readLockRemoveOnCommit", boolean.class);
map.put("readLockRemoveOnRollback", boolean.class);
map.put("readLockTimeout", long.class);
map.put("backoffErrorThreshold", int.class);
map.put("backoffIdleThreshold", int.class);
map.put("backoffMultiplier", int.class);
map.put("delay", long.class);
map.put("greedy", boolean.class);
map.put("initialDelay", long.class);
map.put("repeatCount", long.class);
map.put("runLoggingLevel", org.apache.camel.LoggingLevel.class);
map.put("scheduledExecutorService", java.util.concurrent.ScheduledExecutorService.class);
map.put("scheduler", java.lang.Object.class);
map.put("schedulerProperties", java.util.Map.class);
map.put("startScheduler", boolean.class);
map.put("timeUnit", java.util.concurrent.TimeUnit.class);
map.put("useFixedDelay", boolean.class);
map.put("account", java.lang.String.class);
map.put("disableSecureDataChannelDefaults", boolean.class);
map.put("execPbsz", java.lang.Long.class);
map.put("execProt", java.lang.String.class);
map.put("ftpClientKeyStoreParameters", java.util.Map.class);
map.put("ftpClientTrustStoreParameters", java.util.Map.class);
map.put("implicit", boolean.class);
map.put("password", java.lang.String.class);
map.put("securityProtocol", java.lang.String.class);
map.put("sslContextParameters", org.apache.camel.support.jsse.SSLContextParameters.class);
map.put("username", java.lang.String.class);
map.put("shuffle", boolean.class);
map.put("sortBy", java.lang.String.class);
map.put("sorter", java.util.Comparator.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
FtpsEndpoint target = (FtpsEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "disablesecuredatachanneldefaults":
case "disableSecureDataChannelDefaults": target.getConfiguration().setDisableSecureDataChannelDefaults(property(camelContext, boolean.class, value)); return true;
case "execpbsz":
case "execPbsz": target.getConfiguration().setExecPbsz(property(camelContext, java.lang.Long.class, value)); return true;
case "execprot":
case "execProt": target.getConfiguration().setExecProt(property(camelContext, java.lang.String.class, value)); return true;
case "ftpclientkeystoreparameters":
case "ftpClientKeyStoreParameters": target.setFtpClientKeyStoreParameters(property(camelContext, java.util.Map.class, value)); return true;
case "ftpclienttruststoreparameters":
case "ftpClientTrustStoreParameters": target.setFtpClientTrustStoreParameters(property(camelContext, java.util.Map.class, value)); return true;
case "implicit": target.getConfiguration().setImplicit(property(camelContext, boolean.class, value)); return true;
case "securityprotocol":
case "securityProtocol": target.getConfiguration().setSecurityProtocol(property(camelContext, java.lang.String.class, value)); return true;
case "sslcontextparameters":
case "sslContextParameters": target.setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
default: return super.configure(camelContext, obj, name, value, ignoreCase);
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
FtpsEndpoint target = (FtpsEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "disablesecuredatachanneldefaults":
case "disableSecureDataChannelDefaults": return target.getConfiguration().isDisableSecureDataChannelDefaults();
case "execpbsz":
case "execPbsz": return target.getConfiguration().getExecPbsz();
case "execprot":
case "execProt": return target.getConfiguration().getExecProt();
case "ftpclientkeystoreparameters":
case "ftpClientKeyStoreParameters": return target.getFtpClientKeyStoreParameters();
case "ftpclienttruststoreparameters":
case "ftpClientTrustStoreParameters": return target.getFtpClientTrustStoreParameters();
case "implicit": return target.getConfiguration().isImplicit();
case "securityprotocol":
case "securityProtocol": return target.getConfiguration().getSecurityProtocol();
case "sslcontextparameters":
case "sslContextParameters": return target.getSslContextParameters();
default: return super.getOptionValue(obj, name, ignoreCase);
}
}
}
| alvinkwekel/camel | components/camel-ftp/src/generated/java/org/apache/camel/component/file/remote/FtpsEndpointConfigurer.java | Java | apache-2.0 | 11,017 |
package net.avantica.xinef.dapp;
import android.app.Fragment;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import net.avantica.xinef.dapp.view.activity.ProjectDetailActivity;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
/**
* Instrumentation test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Rule
private ProjectDetailActivity projectDetailActivity;
@Test
public void useAppContext() throws Exception {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("net.avantica.xinef.dapp", appContext.getPackageName());
}
@Test
public void testContainsUserDetailsFragment() {
Fragment projectDetailFragment = projectDetailActivity.getFragmentManager().findFragmentById(R.id.container);
assertThat(projectDetailFragment, is(notNullValue()));
}
}
| avantica-peru/dapp-mobile | dapp/presentation/src/androidTest/java/net/avantica/xinef/dapp/ExampleInstrumentedTest.java | Java | apache-2.0 | 1,353 |
package com.github.quinn.iconlibrary.icons;
import com.github.quinn.iconlibrary.utils.TypefaceManager.IconicTypeface;
public enum OctIcon implements Icon {
STAR(0xf02a),
FORK(0xf002),
FILE(0xf011),
FOLDER(0xf016),
SEARCH(0xf02e),
BRANCH(0xf020),
EMAIL(0xf03b),
LOCATE(0xf060),
COMPANY(0xf037),
BLOG(0xf05C),
JOIN(0xf046),
REPO(0xf001),
PUSH(0xf01f),
COMMIT(0xf07e),
CODE(0xf05f),
ISSUE(0xf026),
TAG(0xf015);
private final int mIconUtfValue;
private OctIcon(int iconUtfValue) {
mIconUtfValue = iconUtfValue;
}
@Override
public IconicTypeface getIconicTypeface() {
return IconicTypeface.OCTICON;
}
@Override
public int getIconUtfValue() {
// TODO Auto-generated method stub
return mIconUtfValue;
}
}
| knightingal/WeGit | iconlibrary/src/main/java/com/github/quinn/iconlibrary/icons/OctIcon.java | Java | apache-2.0 | 774 |
/**
* Copyright (c) 2012-2021 WebGate Consulting AG and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.webgate.dominoext.poi.component.data.ss.cell;
public interface ICellValue {
public Object getValue();
}
| OpenNTF/POI4Xpages | poi4xpages/bundles/biz.webgate.dominoext.poi/src/biz/webgate/dominoext/poi/component/data/ss/cell/ICellValue.java | Java | apache-2.0 | 745 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.waiters;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.annotation.SdkInternalApi;
import com.amazonaws.waiters.WaiterAcceptor;
import com.amazonaws.waiters.WaiterState;
import com.amazonaws.services.identitymanagement.model.*;
import com.amazonaws.jmespath.*;
import javax.annotation.Generated;
@SdkInternalApi
@Generated("com.amazonaws:aws-java-sdk-code-generator")
class PolicyExists {
static class IsNoSuchEntityMatcher extends WaiterAcceptor<GetPolicyResult> {
/**
* Takes the response exception and determines whether this exception matches the expected exception, by
* comparing the respective error codes.
*
* @param e
* Response Exception
* @return True if it matches, False otherwise
*/
@Override
public boolean matches(AmazonServiceException e) {
return "NoSuchEntity".equals(e.getErrorCode());
}
/**
* Represents the current waiter state in the case where resource state matches the expected state
*
* @return Corresponding state of the waiter
*/
@Override
public WaiterState getState() {
return WaiterState.RETRY;
}
}
}
| aws/aws-sdk-java | aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/waiters/PolicyExists.java | Java | apache-2.0 | 1,895 |
/*******************************************************************************
* Copyright (c) 2009 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* Zend Technologies
*******************************************************************************/
package org.eclipse.php.internal.debug.core.pathmapper;
import org.eclipse.osgi.util.NLS;
public class Messages extends NLS {
private static final String BUNDLE_NAME = "org.eclipse.php.internal.debug.core.pathmapper.messages"; //$NON-NLS-1$
public static String DebugSearchEngine_0;
public static String LocalFileSearchEngine_Searching_for_local_file;
public static String PathMapper_MappingSource_Environment_Name;
public static String PathMapper_MappingSource_Unknown_Name;
public static String PathMapper_MappingSource_User_Name;
static {
// initialize resource bundle
NLS.initializeMessages(BUNDLE_NAME, Messages.class);
}
private Messages() {
}
}
| vovagrechka/fucking-everything | phizdets/phizdets-idea/eclipse-src/org.eclipse.php.debug.core/src/org/eclipse/php/internal/debug/core/pathmapper/Messages.java | Java | apache-2.0 | 1,262 |
/**
* Copyright (C) 2012 Ness Computing, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nesscomputing.jackson;
import com.fasterxml.jackson.databind.Module;
import com.google.inject.Binder;
import com.google.inject.TypeLiteral;
import com.google.inject.binder.LinkedBindingBuilder;
import com.google.inject.multibindings.MapBinder;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.name.Named;
import com.google.inject.name.Names;
public final class NessObjectMapperBinder
{
public static final String JACKSON_NAME = "_jackson";
public static final Named JACKSON_NAMED = Names.named(JACKSON_NAME);
private NessObjectMapperBinder()
{
}
/**
* Bind a Jackson module to the object mapper.
*/
public static LinkedBindingBuilder<Module> bindJacksonModule(final Binder binder)
{
final Multibinder<Module> moduleBinder = Multibinder.newSetBinder(binder, Module.class, JACKSON_NAMED);
return moduleBinder.addBinding();
}
/**
* Set a Jackson feature on the Object Mapper.
*
* @see {@link JsonGenerator.Feature}, {@link JsonParser.Feature}, {@link SerializationConfig.Feature} and {@link DeserializationConfig.Feature} for available features.
*/
public static LinkedBindingBuilder<Boolean> bindJacksonOption(final Binder binder, final Enum<?> option)
{
final MapBinder<Enum<?>, Boolean> optionBinder = MapBinder.newMapBinder(binder, new TypeLiteral<Enum<?>>() {}, new TypeLiteral<Boolean>() {}, JACKSON_NAMED);
return optionBinder.addBinding(option);
}
}
| NessComputing/components-ness-jackson | src/main/java/com/nesscomputing/jackson/NessObjectMapperBinder.java | Java | apache-2.0 | 2,120 |
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.source.ads;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import android.net.Uri;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.source.CompositeMediaSource;
import com.google.android.exoplayer2.source.LoadEventInfo;
import com.google.android.exoplayer2.source.MaskingMediaPeriod;
import com.google.android.exoplayer2.source.MediaLoadData;
import com.google.android.exoplayer2.source.MediaPeriod;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import com.google.android.exoplayer2.source.MediaSourceEventListener;
import com.google.android.exoplayer2.source.MediaSourceFactory;
import com.google.android.exoplayer2.ui.AdViewProvider;
import com.google.android.exoplayer2.upstream.Allocator;
import com.google.android.exoplayer2.upstream.DataSpec;
import com.google.android.exoplayer2.upstream.TransferListener;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.checkerframework.checker.nullness.compatqual.NullableType;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* A {@link MediaSource} that inserts ads linearly into a provided content media source.
*
* <p>The wrapped content media source must contain a single {@link Timeline.Period}.
*/
public final class AdsMediaSource extends CompositeMediaSource<MediaPeriodId> {
/**
* Wrapper for exceptions that occur while loading ads, which are notified via {@link
* MediaSourceEventListener#onLoadError(int, MediaPeriodId, LoadEventInfo, MediaLoadData,
* IOException, boolean)}.
*/
public static final class AdLoadException extends IOException {
/**
* Types of ad load exceptions. One of {@link #TYPE_AD}, {@link #TYPE_AD_GROUP}, {@link
* #TYPE_ALL_ADS} or {@link #TYPE_UNEXPECTED}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({TYPE_AD, TYPE_AD_GROUP, TYPE_ALL_ADS, TYPE_UNEXPECTED})
public @interface Type {}
/** Type for when an ad failed to load. The ad will be skipped. */
public static final int TYPE_AD = 0;
/** Type for when an ad group failed to load. The ad group will be skipped. */
public static final int TYPE_AD_GROUP = 1;
/** Type for when all ad groups failed to load. All ads will be skipped. */
public static final int TYPE_ALL_ADS = 2;
/** Type for when an unexpected error occurred while loading ads. All ads will be skipped. */
public static final int TYPE_UNEXPECTED = 3;
/** Returns a new ad load exception of {@link #TYPE_AD}. */
public static AdLoadException createForAd(Exception error) {
return new AdLoadException(TYPE_AD, error);
}
/** Returns a new ad load exception of {@link #TYPE_AD_GROUP}. */
public static AdLoadException createForAdGroup(Exception error, int adGroupIndex) {
return new AdLoadException(
TYPE_AD_GROUP, new IOException("Failed to load ad group " + adGroupIndex, error));
}
/** Returns a new ad load exception of {@link #TYPE_ALL_ADS}. */
public static AdLoadException createForAllAds(Exception error) {
return new AdLoadException(TYPE_ALL_ADS, error);
}
/** Returns a new ad load exception of {@link #TYPE_UNEXPECTED}. */
public static AdLoadException createForUnexpected(RuntimeException error) {
return new AdLoadException(TYPE_UNEXPECTED, error);
}
/** The {@link Type} of the ad load exception. */
public final @Type int type;
private AdLoadException(@Type int type, Exception cause) {
super(cause);
this.type = type;
}
/**
* Returns the {@link RuntimeException} that caused the exception if its type is {@link
* #TYPE_UNEXPECTED}.
*/
public RuntimeException getRuntimeExceptionForUnexpected() {
Assertions.checkState(type == TYPE_UNEXPECTED);
return (RuntimeException) checkNotNull(getCause());
}
}
// Used to identify the content "child" source for CompositeMediaSource.
private static final MediaPeriodId CHILD_SOURCE_MEDIA_PERIOD_ID =
new MediaPeriodId(/* periodUid= */ new Object());
private final MediaSource contentMediaSource;
private final MediaSourceFactory adMediaSourceFactory;
private final AdsLoader adsLoader;
private final AdViewProvider adViewProvider;
private final DataSpec adTagDataSpec;
private final Object adsId;
private final Handler mainHandler;
private final Timeline.Period period;
// Accessed on the player thread.
@Nullable private ComponentListener componentListener;
@Nullable private Timeline contentTimeline;
@Nullable private AdPlaybackState adPlaybackState;
private @NullableType AdMediaSourceHolder[][] adMediaSourceHolders;
/**
* Constructs a new source that inserts ads linearly with the content specified by {@code
* contentMediaSource}.
*
* @param contentMediaSource The {@link MediaSource} providing the content to play.
* @param adTagDataSpec The data specification of the ad tag to load.
* @param adsId An opaque identifier for ad playback state associated with this instance. Ad
* loading and playback state is shared among all playlist items that have the same ads id (by
* {@link Object#equals(Object) equality}), so it is important to pass the same identifiers
* when constructing playlist items each time the player returns to the foreground.
* @param adMediaSourceFactory Factory for media sources used to load ad media.
* @param adsLoader The loader for ads.
* @param adViewProvider Provider of views for the ad UI.
*/
public AdsMediaSource(
MediaSource contentMediaSource,
DataSpec adTagDataSpec,
Object adsId,
MediaSourceFactory adMediaSourceFactory,
AdsLoader adsLoader,
AdViewProvider adViewProvider) {
this.contentMediaSource = contentMediaSource;
this.adMediaSourceFactory = adMediaSourceFactory;
this.adsLoader = adsLoader;
this.adViewProvider = adViewProvider;
this.adTagDataSpec = adTagDataSpec;
this.adsId = adsId;
mainHandler = new Handler(Looper.getMainLooper());
period = new Timeline.Period();
adMediaSourceHolders = new AdMediaSourceHolder[0][];
adsLoader.setSupportedContentTypes(adMediaSourceFactory.getSupportedTypes());
}
@Override
public MediaItem getMediaItem() {
return contentMediaSource.getMediaItem();
}
@Override
protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) {
super.prepareSourceInternal(mediaTransferListener);
ComponentListener componentListener = new ComponentListener();
this.componentListener = componentListener;
prepareChildSource(CHILD_SOURCE_MEDIA_PERIOD_ID, contentMediaSource);
mainHandler.post(
() ->
adsLoader.start(
/* adsMediaSource= */ this,
adTagDataSpec,
adsId,
adViewProvider,
componentListener));
}
@Override
public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) {
AdPlaybackState adPlaybackState = checkNotNull(this.adPlaybackState);
if (adPlaybackState.adGroupCount > 0 && id.isAd()) {
int adGroupIndex = id.adGroupIndex;
int adIndexInAdGroup = id.adIndexInAdGroup;
if (adMediaSourceHolders[adGroupIndex].length <= adIndexInAdGroup) {
int adCount = adIndexInAdGroup + 1;
adMediaSourceHolders[adGroupIndex] =
Arrays.copyOf(adMediaSourceHolders[adGroupIndex], adCount);
}
@Nullable
AdMediaSourceHolder adMediaSourceHolder =
adMediaSourceHolders[adGroupIndex][adIndexInAdGroup];
if (adMediaSourceHolder == null) {
adMediaSourceHolder = new AdMediaSourceHolder(id);
adMediaSourceHolders[adGroupIndex][adIndexInAdGroup] = adMediaSourceHolder;
maybeUpdateAdMediaSources();
}
return adMediaSourceHolder.createMediaPeriod(id, allocator, startPositionUs);
} else {
MaskingMediaPeriod mediaPeriod = new MaskingMediaPeriod(id, allocator, startPositionUs);
mediaPeriod.setMediaSource(contentMediaSource);
mediaPeriod.createPeriod(id);
return mediaPeriod;
}
}
@Override
public void releasePeriod(MediaPeriod mediaPeriod) {
MaskingMediaPeriod maskingMediaPeriod = (MaskingMediaPeriod) mediaPeriod;
MediaPeriodId id = maskingMediaPeriod.id;
if (id.isAd()) {
AdMediaSourceHolder adMediaSourceHolder =
checkNotNull(adMediaSourceHolders[id.adGroupIndex][id.adIndexInAdGroup]);
adMediaSourceHolder.releaseMediaPeriod(maskingMediaPeriod);
if (adMediaSourceHolder.isInactive()) {
adMediaSourceHolder.release();
adMediaSourceHolders[id.adGroupIndex][id.adIndexInAdGroup] = null;
}
} else {
maskingMediaPeriod.releasePeriod();
}
}
@Override
protected void releaseSourceInternal() {
super.releaseSourceInternal();
ComponentListener componentListener = checkNotNull(this.componentListener);
this.componentListener = null;
componentListener.stop();
contentTimeline = null;
adPlaybackState = null;
adMediaSourceHolders = new AdMediaSourceHolder[0][];
mainHandler.post(() -> adsLoader.stop(/* adsMediaSource= */ this, componentListener));
}
@Override
protected void onChildSourceInfoRefreshed(
MediaPeriodId mediaPeriodId, MediaSource mediaSource, Timeline timeline) {
if (mediaPeriodId.isAd()) {
int adGroupIndex = mediaPeriodId.adGroupIndex;
int adIndexInAdGroup = mediaPeriodId.adIndexInAdGroup;
checkNotNull(adMediaSourceHolders[adGroupIndex][adIndexInAdGroup])
.handleSourceInfoRefresh(timeline);
} else {
Assertions.checkArgument(timeline.getPeriodCount() == 1);
contentTimeline = timeline;
}
maybeUpdateSourceInfo();
}
@Override
protected MediaPeriodId getMediaPeriodIdForChildMediaPeriodId(
MediaPeriodId childId, MediaPeriodId mediaPeriodId) {
// The child id for the content period is just CHILD_SOURCE_MEDIA_PERIOD_ID. That's why
// we need to forward the reported mediaPeriodId in this case.
return childId.isAd() ? childId : mediaPeriodId;
}
// Internal methods.
private void onAdPlaybackState(AdPlaybackState adPlaybackState) {
if (this.adPlaybackState == null) {
adMediaSourceHolders = new AdMediaSourceHolder[adPlaybackState.adGroupCount][];
Arrays.fill(adMediaSourceHolders, new AdMediaSourceHolder[0]);
} else {
checkState(adPlaybackState.adGroupCount == this.adPlaybackState.adGroupCount);
}
this.adPlaybackState = adPlaybackState;
maybeUpdateAdMediaSources();
maybeUpdateSourceInfo();
}
/**
* Initializes any {@link AdMediaSourceHolder AdMediaSourceHolders} where the ad media URI is
* newly known.
*/
private void maybeUpdateAdMediaSources() {
@Nullable AdPlaybackState adPlaybackState = this.adPlaybackState;
if (adPlaybackState == null) {
return;
}
for (int adGroupIndex = 0; adGroupIndex < adMediaSourceHolders.length; adGroupIndex++) {
for (int adIndexInAdGroup = 0;
adIndexInAdGroup < this.adMediaSourceHolders[adGroupIndex].length;
adIndexInAdGroup++) {
@Nullable
AdMediaSourceHolder adMediaSourceHolder =
this.adMediaSourceHolders[adGroupIndex][adIndexInAdGroup];
AdPlaybackState.AdGroup adGroup = adPlaybackState.getAdGroup(adGroupIndex);
if (adMediaSourceHolder != null
&& !adMediaSourceHolder.hasMediaSource()
&& adIndexInAdGroup < adGroup.uris.length) {
@Nullable Uri adUri = adGroup.uris[adIndexInAdGroup];
if (adUri != null) {
MediaItem.Builder adMediaItem = new MediaItem.Builder().setUri(adUri);
// Propagate the content's DRM config into the ad media source.
@Nullable
MediaItem.LocalConfiguration contentLocalConfiguration =
contentMediaSource.getMediaItem().localConfiguration;
if (contentLocalConfiguration != null) {
adMediaItem.setDrmConfiguration(contentLocalConfiguration.drmConfiguration);
}
MediaSource adMediaSource = adMediaSourceFactory.createMediaSource(adMediaItem.build());
adMediaSourceHolder.initializeWithMediaSource(adMediaSource, adUri);
}
}
}
}
}
private void maybeUpdateSourceInfo() {
@Nullable Timeline contentTimeline = this.contentTimeline;
if (adPlaybackState != null && contentTimeline != null) {
if (adPlaybackState.adGroupCount == 0) {
refreshSourceInfo(contentTimeline);
} else {
adPlaybackState = adPlaybackState.withAdDurationsUs(getAdDurationsUs());
refreshSourceInfo(new SinglePeriodAdTimeline(contentTimeline, adPlaybackState));
}
}
}
private long[][] getAdDurationsUs() {
long[][] adDurationsUs = new long[adMediaSourceHolders.length][];
for (int i = 0; i < adMediaSourceHolders.length; i++) {
adDurationsUs[i] = new long[adMediaSourceHolders[i].length];
for (int j = 0; j < adMediaSourceHolders[i].length; j++) {
@Nullable AdMediaSourceHolder holder = adMediaSourceHolders[i][j];
adDurationsUs[i][j] = holder == null ? C.TIME_UNSET : holder.getDurationUs();
}
}
return adDurationsUs;
}
/** Listener for component events. All methods are called on the main thread. */
private final class ComponentListener implements AdsLoader.EventListener {
private final Handler playerHandler;
private volatile boolean stopped;
/**
* Creates new listener which forwards ad playback states on the creating thread and all other
* events on the external event listener thread.
*/
public ComponentListener() {
playerHandler = Util.createHandlerForCurrentLooper();
}
/** Stops event delivery from this instance. */
public void stop() {
stopped = true;
playerHandler.removeCallbacksAndMessages(null);
}
@Override
public void onAdPlaybackState(final AdPlaybackState adPlaybackState) {
if (stopped) {
return;
}
playerHandler.post(
() -> {
if (stopped) {
return;
}
AdsMediaSource.this.onAdPlaybackState(adPlaybackState);
});
}
@Override
public void onAdLoadError(final AdLoadException error, DataSpec dataSpec) {
if (stopped) {
return;
}
createEventDispatcher(/* mediaPeriodId= */ null)
.loadError(
new LoadEventInfo(
LoadEventInfo.getNewId(),
dataSpec,
/* elapsedRealtimeMs= */ SystemClock.elapsedRealtime()),
C.DATA_TYPE_AD,
error,
/* wasCanceled= */ true);
}
}
private final class AdPrepareListener implements MaskingMediaPeriod.PrepareListener {
private final Uri adUri;
public AdPrepareListener(Uri adUri) {
this.adUri = adUri;
}
@Override
public void onPrepareComplete(MediaPeriodId mediaPeriodId) {
mainHandler.post(
() ->
adsLoader.handlePrepareComplete(
/* adsMediaSource= */ AdsMediaSource.this,
mediaPeriodId.adGroupIndex,
mediaPeriodId.adIndexInAdGroup));
}
@Override
public void onPrepareError(MediaPeriodId mediaPeriodId, IOException exception) {
createEventDispatcher(mediaPeriodId)
.loadError(
new LoadEventInfo(
LoadEventInfo.getNewId(),
new DataSpec(adUri),
/* elapsedRealtimeMs= */ SystemClock.elapsedRealtime()),
C.DATA_TYPE_AD,
AdLoadException.createForAd(exception),
/* wasCanceled= */ true);
mainHandler.post(
() ->
adsLoader.handlePrepareError(
/* adsMediaSource= */ AdsMediaSource.this,
mediaPeriodId.adGroupIndex,
mediaPeriodId.adIndexInAdGroup,
exception));
}
}
private final class AdMediaSourceHolder {
private final MediaPeriodId id;
private final List<MaskingMediaPeriod> activeMediaPeriods;
private @MonotonicNonNull Uri adUri;
private @MonotonicNonNull MediaSource adMediaSource;
private @MonotonicNonNull Timeline timeline;
public AdMediaSourceHolder(MediaPeriodId id) {
this.id = id;
activeMediaPeriods = new ArrayList<>();
}
public void initializeWithMediaSource(MediaSource adMediaSource, Uri adUri) {
this.adMediaSource = adMediaSource;
this.adUri = adUri;
for (int i = 0; i < activeMediaPeriods.size(); i++) {
MaskingMediaPeriod maskingMediaPeriod = activeMediaPeriods.get(i);
maskingMediaPeriod.setMediaSource(adMediaSource);
maskingMediaPeriod.setPrepareListener(new AdPrepareListener(adUri));
}
prepareChildSource(id, adMediaSource);
}
public MediaPeriod createMediaPeriod(
MediaPeriodId id, Allocator allocator, long startPositionUs) {
MaskingMediaPeriod maskingMediaPeriod =
new MaskingMediaPeriod(id, allocator, startPositionUs);
activeMediaPeriods.add(maskingMediaPeriod);
if (adMediaSource != null) {
maskingMediaPeriod.setMediaSource(adMediaSource);
maskingMediaPeriod.setPrepareListener(new AdPrepareListener(checkNotNull(adUri)));
}
if (timeline != null) {
Object periodUid = timeline.getUidOfPeriod(/* periodIndex= */ 0);
MediaPeriodId adSourceMediaPeriodId = new MediaPeriodId(periodUid, id.windowSequenceNumber);
maskingMediaPeriod.createPeriod(adSourceMediaPeriodId);
}
return maskingMediaPeriod;
}
public void handleSourceInfoRefresh(Timeline timeline) {
Assertions.checkArgument(timeline.getPeriodCount() == 1);
if (this.timeline == null) {
Object periodUid = timeline.getUidOfPeriod(/* periodIndex= */ 0);
for (int i = 0; i < activeMediaPeriods.size(); i++) {
MaskingMediaPeriod mediaPeriod = activeMediaPeriods.get(i);
MediaPeriodId adSourceMediaPeriodId =
new MediaPeriodId(periodUid, mediaPeriod.id.windowSequenceNumber);
mediaPeriod.createPeriod(adSourceMediaPeriodId);
}
}
this.timeline = timeline;
}
public long getDurationUs() {
return timeline == null
? C.TIME_UNSET
: timeline.getPeriod(/* periodIndex= */ 0, period).getDurationUs();
}
public void releaseMediaPeriod(MaskingMediaPeriod maskingMediaPeriod) {
activeMediaPeriods.remove(maskingMediaPeriod);
maskingMediaPeriod.releasePeriod();
}
public void release() {
if (hasMediaSource()) {
releaseChildSource(id);
}
}
public boolean hasMediaSource() {
return adMediaSource != null;
}
public boolean isInactive() {
return activeMediaPeriods.isEmpty();
}
}
}
| ened/ExoPlayer | library/core/src/main/java/com/google/android/exoplayer2/source/ads/AdsMediaSource.java | Java | apache-2.0 | 20,381 |
/*
* Copyright (C) 2014 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger2.internal.codegen;
import com.google.common.collect.Iterables;
import com.google.testing.compile.CompilationRule;
import dagger2.Lazy;
import dagger2.MembersInjector;
import dagger2.Module;
import dagger2.Provides;
import dagger2.producers.Produced;
import dagger2.producers.Producer;
import dagger2.producers.ProducerModule;
import dagger2.producers.Produces;
import java.util.List;
import javax.inject.Provider;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.util.ElementFilter;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import static com.google.common.truth.Truth.assertThat;
/**
* Test case for {@link DependencyRequestMapper}.
*/
@RunWith(JUnit4.class)
public class DependencyRequestMapperTest {
@Rule public CompilationRule compilationRule = new CompilationRule();
private Elements elements;
private Types types;
private Key.Factory keyFactory;
private DependencyRequest.Factory dependencyRequestFactory;
@Before public void setUp() {
this.types = compilationRule.getTypes();
this.elements = compilationRule.getElements();
this.keyFactory = new Key.Factory(types, elements);
this.dependencyRequestFactory = new DependencyRequest.Factory(keyFactory);
}
private List<? extends VariableElement> sampleProviderParameters() {
TypeElement moduleElement =
elements.getTypeElement(ProvidesMethodModule.class.getCanonicalName());
ExecutableElement providesMethod =
Iterables.getOnlyElement(ElementFilter.methodsIn(moduleElement.getEnclosedElements()));
return providesMethod.getParameters();
}
private List<? extends VariableElement> sampleProducerParameters() {
TypeElement moduleElement =
elements.getTypeElement(ProducesMethodModule.class.getCanonicalName());
ExecutableElement producesMethod =
Iterables.getOnlyElement(ElementFilter.methodsIn(moduleElement.getEnclosedElements()));
return producesMethod.getParameters();
}
private DependencyRequest dependencyRequestForInstance() {
return dependencyRequestFactory.forRequiredVariable(sampleProviderParameters().get(0));
}
private DependencyRequest dependencyRequestForLazy() {
return dependencyRequestFactory.forRequiredVariable(sampleProviderParameters().get(1));
}
private DependencyRequest dependencyRequestForProvider() {
return dependencyRequestFactory.forRequiredVariable(sampleProviderParameters().get(2));
}
private DependencyRequest dependencyRequestForMembersInjector() {
return dependencyRequestFactory.forRequiredVariable(sampleProviderParameters().get(3));
}
private DependencyRequest dependencyRequestForProducer() {
return dependencyRequestFactory.forRequiredVariable(sampleProducerParameters().get(0));
}
private DependencyRequest dependencyRequestForProduced() {
return dependencyRequestFactory.forRequiredVariable(sampleProducerParameters().get(1));
}
@Test public void forProvider() {
DependencyRequestMapper mapper = DependencyRequestMapper.FOR_PROVIDER;
assertThat(mapper.getFrameworkClass(dependencyRequestForInstance()))
.isEqualTo(Provider.class);
assertThat(mapper.getFrameworkClass(dependencyRequestForLazy()))
.isEqualTo(Provider.class);
assertThat(mapper.getFrameworkClass(dependencyRequestForProvider()))
.isEqualTo(Provider.class);
assertThat(mapper.getFrameworkClass(dependencyRequestForMembersInjector()))
.isEqualTo(MembersInjector.class);
}
@Test public void forProducer() {
DependencyRequestMapper mapper = DependencyRequestMapper.FOR_PRODUCER;
assertThat(mapper.getFrameworkClass(dependencyRequestForInstance()))
.isEqualTo(Producer.class);
assertThat(mapper.getFrameworkClass(dependencyRequestForLazy()))
.isEqualTo(Provider.class);
assertThat(mapper.getFrameworkClass(dependencyRequestForProvider()))
.isEqualTo(Provider.class);
assertThat(mapper.getFrameworkClass(dependencyRequestForMembersInjector()))
.isEqualTo(MembersInjector.class);
assertThat(mapper.getFrameworkClass(dependencyRequestForProducer()))
.isEqualTo(Producer.class);
assertThat(mapper.getFrameworkClass(dependencyRequestForProduced()))
.isEqualTo(Producer.class);
}
@Module
static final class ProvidesMethodModule {
@Provides String provideString(
Integer a, Lazy<Integer> b, Provider<Integer> c, MembersInjector<Integer> d) {
return null;
}
}
@ProducerModule
static final class ProducesMethodModule {
@Produces String produceString(Producer<Integer> a, Produced<Integer> b) {
return null;
}
}
}
| goinstant/dagger | compiler/src/test/java/dagger2/internal/codegen/DependencyRequestMapperTest.java | Java | apache-2.0 | 5,503 |
/*
Licensed to Diennea S.r.l. under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. Diennea S.r.l. licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package herddb.sql.functions;
import herddb.model.StatementEvaluationContext;
import herddb.model.StatementExecutionException;
import herddb.sql.AggregatedColumnCalculator;
import herddb.sql.expressions.CompiledSQLExpression;
import herddb.utils.DataAccessor;
/**
* @author enrico.olivelli
*/
public abstract class AbstractSingleExpressionArgumentColumnCalculator implements AggregatedColumnCalculator {
protected final String fieldName;
protected final CompiledSQLExpression expression;
protected final ValueComputer valueExtractor;
@FunctionalInterface
public interface ValueComputer {
Comparable apply(DataAccessor tuple) throws StatementExecutionException;
}
protected AbstractSingleExpressionArgumentColumnCalculator(
String fieldName, CompiledSQLExpression expression,
StatementEvaluationContext context
) throws StatementExecutionException {
this.fieldName = fieldName;
this.expression = expression;
valueExtractor = (DataAccessor t) -> (Comparable) this.expression.evaluate(t, context);
}
}
| diennea/herddb | herddb-core/src/main/java/herddb/sql/functions/AbstractSingleExpressionArgumentColumnCalculator.java | Java | apache-2.0 | 1,858 |
package com.adioss.security.model;
import java.util.ArrayList;
import java.util.List;
public class CipherDescription {
private final String name;
private final List<String> exclusions;
private final boolean isEnableByDefault;
public CipherDescription(String name, boolean isEnableByDefault) {
this.name = name;
this.exclusions = new ArrayList<>();
this.isEnableByDefault = isEnableByDefault;
}
public String getName() {
return name;
}
public List<String> getExclusions() {
return exclusions;
}
public boolean isEnableByDefault() {
return isEnableByDefault;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CipherDescription cipherDescription = (CipherDescription) o;
return !(name != null ? !name.equals(cipherDescription.name) : cipherDescription.name != null);
}
@Override
public int hashCode() {
return name != null ? name.hashCode() : 0;
}
@Override
public String toString() {
return "CipherDescription{" +
"name='" + name + '\'' +
", exclusions=" + exclusions +
", isEnableByDefault=" + isEnableByDefault +
'}';
}
}
| adioss/CipherExplorer | src/main/java/com/adioss/security/model/CipherDescription.java | Java | apache-2.0 | 1,357 |
package com.beamofsoul.bip.management.util;
import java.net.InetAddress;
import java.net.UnknownHostException;
import javax.servlet.http.HttpServletRequest;
import lombok.extern.slf4j.Slf4j;
/**
* @ClassName ClientInformationUtils
* @Description 客户端相关信息工具类
* @author MingshuJian
* @Date 2017年4月12日 上午9:06:09
* @version 1.0.0
*/
@Slf4j
public class ClientInformationUtils {
public static final String CLIENT_IP_ADDRESS = "clientIp";
public static final String CLIENT_MAC_ADDRESS = "clientMac";
public static String getIpAddress(HttpServletRequest request) throws Exception {
String ipAddress = request.getHeader("x-forwarded-for");
if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) {
ipAddress = request.getHeader("Proxy-Client-IP");
}
if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) {
ipAddress = request.getHeader("WL-Proxy-Client-IP");
}
if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) {
ipAddress = request.getRemoteAddr();
if (ipAddress.equals("127.0.0.1") || ipAddress.equals("0:0:0:0:0:0:0:1")) {
// 根据网卡取本机配置的IP
InetAddress inet = null;
try {
inet = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
log.debug("获取本机IP地址失败", e);
}
ipAddress = inet.getHostAddress();
}
}
// 对于通过多个代理的情况,第一个IP为客户端真实IP,多个IP按照','分割
if (ipAddress != null && ipAddress.length() > 15) { // "***.***.***.***".length()
// = 15
if (ipAddress.indexOf(",") > 0) {
ipAddress = ipAddress.substring(0, ipAddress.indexOf(","));
}
}
return ipAddress;
}
public static String getMacAddress(String ipAddress) throws Exception {
ClientMacAddressHandler handler = new ClientMacAddressHandler(ipAddress);
return handler.getRemoteMacAddress();
}
public static String getOperatingSystem(HttpServletRequest request) {
String userAgent = request.getHeader("User-Agent");
String lowerUserAgent = userAgent.toLowerCase();
String os = "";
if (lowerUserAgent.contains("windows")) {
os = "Windows";
} else if (lowerUserAgent.contains("mac")) {
os = "Mac";
} else if (lowerUserAgent.contains("x11")) {
os = "Unix";
} else if (lowerUserAgent.contains("android")) {
os = "Android";
} else if (lowerUserAgent.contains("iphone")) {
os = "IOS";
} else {
os = "Others, More-Info: " + userAgent;
}
return os;
}
public static String getBrowser(HttpServletRequest request) {
String userAgent = request.getHeader("User-Agent");
String lowerUserAgent = userAgent.toLowerCase();
String browser = "";
if (lowerUserAgent.contains("edge")) {
browser = (userAgent.substring(userAgent.indexOf("Edge")).split(" ")[0]).replace("/", "-");
} else if (lowerUserAgent.contains("msie")) {
String substring = userAgent.substring(userAgent.indexOf("MSIE")).split(";")[0];
browser = substring.split(" ")[0].replace("MSIE", "IE")+"-"+substring.split(" ")[1];
} else if (lowerUserAgent.contains("safari") && lowerUserAgent.contains("version")) {
browser = (userAgent.substring(userAgent.indexOf("Safari")).split(" ")[0]).split("/")[0]+ "-" +(userAgent.substring(userAgent.indexOf("Version")).split(" ")[0]).split("/")[1];
} else if (lowerUserAgent.contains("opr") || lowerUserAgent.contains("opera")) {
if(lowerUserAgent.contains("opera")){
browser = (userAgent.substring(userAgent.indexOf("Opera")).split(" ")[0]).split("/")[0]+"-"+(userAgent.substring(userAgent.indexOf("Version")).split(" ")[0]).split("/")[1];
}else if(lowerUserAgent.contains("opr")){
browser = ((userAgent.substring(userAgent.indexOf("OPR")).split(" ")[0]).replace("/", "-")).replace("OPR", "Opera");
}
} else if (lowerUserAgent.contains("chrome")) {
browser = (userAgent.substring(userAgent.indexOf("Chrome")).split(" ")[0]).replace("/", "-");
} else if ((lowerUserAgent.indexOf("mozilla/7.0") > -1) || (lowerUserAgent.indexOf("netscape6") != -1) ||
(lowerUserAgent.indexOf("mozilla/4.7") != -1) || (lowerUserAgent.indexOf("mozilla/4.78") != -1) ||
(lowerUserAgent.indexOf("mozilla/4.08") != -1) || (lowerUserAgent.indexOf("mozilla/3") != -1)) {
browser = "Netscape-?";
} else if (lowerUserAgent.contains("firefox")) {
browser = (userAgent.substring(userAgent.indexOf("Firefox")).split(" ")[0]).replace("/", "-");
} else if(lowerUserAgent.contains("rv")) {
String IEVersion = (userAgent.substring(userAgent.indexOf("rv")).split(" ")[0]).replace("rv:", "-");
browser = "IE" + IEVersion.substring(0,IEVersion.length() - 1);
} else {
browser = "Others, More-Info: " + userAgent;
}
return browser;
}
public static String getBrand(HttpServletRequest request) {
return request.getHeader("clientBrand");
}
public static String getModel(HttpServletRequest request) {
return request.getHeader("clientModel");
}
public static String getScreenSize(HttpServletRequest request) {
return request.getHeader("clientScreenSize");
}
}
| beamofsoul/BusinessInfrastructurePlatformGroupVersion | src/main/java/com/beamofsoul/bip/management/util/ClientInformationUtils.java | Java | apache-2.0 | 5,309 |
/*
* Copyright 2017 David Karnok
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hu.akarnokd.reactive4javaflow.processors;
import hu.akarnokd.reactive4javaflow.*;
import hu.akarnokd.reactive4javaflow.fused.FusedSubscription;
import hu.akarnokd.reactive4javaflow.impl.FailingFusedSubscription;
import org.junit.Test;
import java.io.IOException;
import java.util.concurrent.CancellationException;
import static org.junit.Assert.*;
public class MulticastProcessorTest {
@Test
public void normal() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(4);
assertFalse(mp.hasSubscribers());
assertFalse(mp.hasComplete());
assertFalse(mp.hasThrowable());
assertNull(mp.getThrowable());
mp.start();
mp.onNext(1);
mp.onNext(2);
assertTrue(mp.tryOnNext(3));
assertTrue(mp.tryOnNext(4));
assertFalse(mp.tryOnNext(5));
mp.onComplete();
TestConsumer<Integer> tc1 = mp.test(0);
TestConsumer<Integer> tc2 = mp.test(0);
assertTrue(mp.hasSubscribers());
tc1.requestMore(1);
tc1.assertEmpty();
tc2.assertEmpty();
tc2.requestMore(2);
tc1.assertValues(1);
tc2.assertValues(1);
tc1.requestMore(3);
tc2.requestMore(2);
tc1.assertResult(1, 2, 3, 4);
tc2.assertResult(1, 2, 3, 4);
assertFalse(mp.hasSubscribers());
assertTrue(mp.hasComplete());
assertFalse(mp.hasThrowable());
assertNull(mp.getThrowable());
mp.test().assertResult();
}
@Test
public void error() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(4);
assertFalse(mp.hasSubscribers());
assertFalse(mp.hasComplete());
assertFalse(mp.hasThrowable());
assertNull(mp.getThrowable());
mp.start();
mp.onError(new IOException());
assertFalse(mp.hasSubscribers());
assertFalse(mp.hasComplete());
assertTrue(mp.hasThrowable());
assertTrue("" + mp.getThrowable(), mp.getThrowable() instanceof IOException);
mp.test().assertFailure(IOException.class);
}
@Test
public void syncFusedSource() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(4);
Folyam.range(1, 5).subscribe(mp);
TestConsumer<Integer> tc1 = mp.test(0);
TestConsumer<Integer> tc2 = mp.test(0);
assertTrue(mp.hasSubscribers());
tc1.requestMore(1);
tc1.assertEmpty();
tc2.assertEmpty();
tc2.requestMore(2);
tc1.assertValues(1);
tc2.assertValues(1);
tc1.requestMore(4);
tc2.requestMore(3);
tc1.assertResult(1, 2, 3, 4, 5);
tc2.assertResult(1, 2, 3, 4, 5);
assertFalse(mp.hasSubscribers());
assertTrue(mp.hasComplete());
assertFalse(mp.hasThrowable());
assertNull(mp.getThrowable());
mp.test().assertResult();
}
@Test
public void asyncFusedSource() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(4);
SolocastProcessor<Integer> sp = new SolocastProcessor<>();
Folyam.range(1, 5).subscribe(sp);
sp.subscribe(mp);
TestConsumer<Integer> tc1 = mp.test(0);
TestConsumer<Integer> tc2 = mp.test(0);
assertTrue(mp.hasSubscribers());
tc1.requestMore(1);
tc1.assertEmpty();
tc2.assertEmpty();
tc2.requestMore(2);
tc1.assertValues(1);
tc2.assertValues(1);
tc1.requestMore(4);
tc2.requestMore(3);
tc1.assertResult(1, 2, 3, 4, 5);
tc2.assertResult(1, 2, 3, 4, 5);
assertFalse(mp.hasSubscribers());
assertTrue(mp.hasComplete());
assertFalse(mp.hasThrowable());
assertNull(mp.getThrowable());
mp.test().assertResult();
}
@Test
public void cancelUnblocksTheOthers() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(4);
mp.start();
mp.onNext(1);
mp.onNext(2);
mp.onNext(3);
mp.onNext(4);
TestConsumer<Integer> tc1 = mp.test(0);
TestConsumer<Integer> tc2 = mp.test(0);
TestConsumer<Integer> tc3 = mp.test(0);
tc1.requestMore(1);
tc3.requestMore(1);
tc2.cancel();
tc1.assertValues(1);
tc2.assertEmpty();
tc3.assertValues(1);
}
@Test
public void syncFusedCrash() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>();
new Folyam<Integer>() {
@Override
protected void subscribeActual(FolyamSubscriber<? super Integer> s) {
s.onSubscribe(new FailingFusedSubscription(FusedSubscription.SYNC));
}
}
.subscribe(mp);
mp.test().assertFailure(IOException.class);
assertFalse(mp.hasSubscribers());
assertFalse(mp.hasComplete());
assertTrue(mp.hasThrowable());
assertTrue("" + mp.getThrowable(), mp.getThrowable() instanceof IOException);
}
@Test
public void asyncFusedCrash() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>();
new Folyam<Integer>() {
@Override
protected void subscribeActual(FolyamSubscriber<? super Integer> s) {
s.onSubscribe(new FailingFusedSubscription(FusedSubscription.ASYNC));
}
}
.subscribe(mp);
mp.test().assertFailure(IOException.class);
assertFalse(mp.hasSubscribers());
assertFalse(mp.hasComplete());
assertTrue(mp.hasThrowable());
assertTrue("" + mp.getThrowable(), mp.getThrowable() instanceof IOException);
}
@Test
public void cancelUpfront() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(1);
mp.start();
mp.test(0, true, 0)
.assertEmpty();
}
@Test
public void overflow() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(1);
mp.start();
mp.onNext(1);
mp.onNext(2);
assertFalse(mp.hasSubscribers());
assertFalse(mp.hasComplete());
assertTrue(mp.hasThrowable());
assertTrue("" + mp.getThrowable(), mp.getThrowable() instanceof IllegalStateException);
mp.test()
.assertFailure(IllegalStateException.class, 1);
}
@Test(expected = IllegalStateException.class)
public void tryOnNextFail() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(1);
new SolocastProcessor<Integer>().subscribe(mp);
mp.tryOnNext(1);
}
@Test
public void alreadyDone() {
TestHelper.withErrorTracking(errors -> {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(1);
mp.start();
mp.onComplete();
mp.onError(new IOException());
mp.onNext(1);
mp.tryOnNext(2);
mp.test().assertResult();
});
}
@Test
public void close() {
DirectProcessor<Integer> dp = new DirectProcessor<>();
MulticastProcessor<Integer> mp = new MulticastProcessor<>(1);
dp.subscribe(mp);
assertTrue(dp.hasSubscribers());
mp.close();
assertFalse(dp.hasSubscribers());
mp.test().assertFailure(CancellationException.class);
}
@Test
public void beforeOnSubscribe() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(1);
TestConsumer<Integer> tc = mp.test(0);
tc.requestMore(1);
mp.start();
mp.onNext(1);
mp.onComplete();
tc.assertResult(1);
}
@Test
public void emptyFused() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(1);
TestConsumer<Integer> tc = mp.test();
Folyam.<Integer>empty().subscribe(mp);
tc.assertResult();
mp.test().assertResult();
}
@Test
public void justFused() {
MulticastProcessor<Integer> mp = new MulticastProcessor<>(1);
TestConsumer<Integer> tc = mp.test(2);
Folyam.just(1).subscribe(mp);
tc.assertResult(1);
mp.test().assertResult();
}
} | akarnokd/Reactive4JavaFlow | src/test/java/hu/akarnokd/reactive4javaflow/processors/MulticastProcessorTest.java | Java | apache-2.0 | 8,824 |
/*
* Copyright 2015 Technische Universitat Wien (TUW), Distributed Systems Group E184
*
* This work was partially supported by the European Commission in terms of the
* CELAR FP7 project (FP7-ICT-2011-8 \#317790)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.ac.tuwien.dsg.mela.costeval.engines;
import at.ac.tuwien.dsg.mela.common.applicationdeploymentconfiguration.UsedCloudOfferedService;
import at.ac.tuwien.dsg.mela.common.configuration.metricComposition.CompositionRulesBlock;
import at.ac.tuwien.dsg.mela.common.configuration.metricComposition.CompositionRulesConfiguration;
import at.ac.tuwien.dsg.mela.common.elasticityAnalysis.concepts.elasticityPathway.ServiceElasticityPathway;
import at.ac.tuwien.dsg.mela.common.elasticityAnalysis.concepts.elasticitySpace.ElasticitySpace;
import at.ac.tuwien.dsg.mela.common.jaxbEntities.configuration.ConfigurationXMLRepresentation;
import at.ac.tuwien.dsg.mela.common.monitoringConcepts.Metric;
import at.ac.tuwien.dsg.mela.common.monitoringConcepts.MetricValue;
import at.ac.tuwien.dsg.mela.common.monitoringConcepts.MonitoredElement;
import at.ac.tuwien.dsg.mela.common.monitoringConcepts.MonitoredElementMonitoringSnapshot;
import at.ac.tuwien.dsg.mela.common.monitoringConcepts.ServiceMonitoringSnapshot;
import at.ac.tuwien.dsg.mela.common.persistence.PersistenceSQLAccess;
import at.ac.tuwien.dsg.mela.common.requirements.Requirements;
import at.ac.tuwien.dsg.mela.costeval.control.CostEvalManager;
import static at.ac.tuwien.dsg.mela.costeval.engines.CostEvalEngine.log;
import at.ac.tuwien.dsg.mela.costeval.model.CloudServicesSpecification;
import at.ac.tuwien.dsg.mela.costeval.model.CostEnrichedSnapshot;
import at.ac.tuwien.dsg.mela.costeval.model.LifetimeEnrichedSnapshot;
import at.ac.tuwien.dsg.mela.costeval.persistence.CostPersistenceDelegate;
import at.ac.tuwien.dsg.mela.costeval.utils.conversion.CostJSONConverter;
import at.ac.tuwien.dsg.quelle.cloudServicesModel.concepts.CloudProvider;
import at.ac.tuwien.dsg.mela.dataservice.aggregation.DataAggregationEngine;
import at.ac.tuwien.dsg.quelle.cloudServicesModel.concepts.CostElement;
import at.ac.tuwien.dsg.quelle.cloudServicesModel.concepts.CostFunction;
import at.ac.tuwien.dsg.quelle.cloudServicesModel.concepts.CloudOfferedService;
import at.ac.tuwien.dsg.quelle.extensions.neo4jPersistenceAdapter.daos.CloudProviderDAO;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Marshaller;
import org.apache.cxf.common.i18n.UncheckedException;
import org.hsqldb.server.ServerConstants;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.util.Assert;
/**
*
* @author daniel-tuwien
*/
//@RunWith(SpringJUnit4ClassRunner.class)
//@ContextConfiguration(locations = {"file:src/test/java/spring/test-context.xml"})
public class CostTotalEvalTest {
// @Value("#{persistenceDelegate}")
private CostPersistenceDelegate persistenceDelegate;
private at.ac.tuwien.dsg.mela.dataservice.persistence.PersistenceDelegate dataAccessPersistenceDelegate;
private PersistenceSQLAccess generalAccess;
public CostTotalEvalTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
try {
//run hsql in memory only for testing purposes
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setUrl("jdbc:hsqldb:mem:mela-test-total-db");
dataSource.setDriverClassName("org.hsqldb.jdbcDriver");
dataSource.setUsername("sa");
dataSource.setPassword("");
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
//read content of sql schema
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader("src/test/resources/create-initial-db-schema.sql"));
} catch (FileNotFoundException ex) {
Logger.getLogger(CostTotalEvalTest.class.getName()).log(Level.SEVERE, null, ex);
fail(ex.getMessage());
}
String line = "";
while ((line = reader.readLine()) != null) {
if (!line.isEmpty()) {
jdbcTemplate.execute(line);
}
}
persistenceDelegate = new CostPersistenceDelegate();
persistenceDelegate.setDataSource(dataSource);
persistenceDelegate.setJdbcTemplate(jdbcTemplate);
generalAccess = new PersistenceSQLAccess().withDataSource(dataSource).withJdbcTemplate(jdbcTemplate);
persistenceDelegate.setPersistenceSQLAccess(generalAccess);
} catch (IOException ex) {
Logger.getLogger(CostTotalEvalTest.class.getName()).log(Level.SEVERE, null, ex);
fail(ex.getMessage());
}
}
@After
public void tearDown() {
}
/**
* Test of applyCompositionRules method, of class CostEvalEngine.
*/
@Test
public void testCostEvalTotal() throws Exception {
// assertFalse( "Please check why in the enriched monitoring snapshot we still have the COST ORIGIN metrics as COST metrics, and thus, why are they USED in RADIAL chart, and especially"
// + "why they HAVE COST in this radial crap. THEY SHOULD NOOOOOT HAAAAVE", true);
CostEvalEngine costEvalEngine = new CostEvalEngine();
DataAggregationEngine aggregationEngine = new DataAggregationEngine();
costEvalEngine.setInstantMonitoringDataEnrichmentEngine(aggregationEngine);
CloudProvider provider = new CloudProvider("Amazon");
provider.setUuid(UUID.fromString("251ed7c7-aa4d-49d4-b42b-7efefd970d6b"));
CloudServicesSpecification cloudServicesSpecification = new CloudServicesSpecification();
cloudServicesSpecification.addCloudProvider(provider);
CloudOfferedService vm1SmallService = new CloudOfferedService("IaaS", "VM", "m1.small");
vm1SmallService.withUuid(UUID.fromString("38400000-8cf0-11bd-b23e-000000000000"));
//VM COST
{
CostFunction vmCost = new CostFunction(vm1SmallService.getName() + "_cost");
CostElement periodicCostElement = new CostElement("vmCost", new Metric("instance", "#/h", Metric.MetricType.COST), CostElement.Type.PERIODIC);
periodicCostElement.addBillingInterval(new MetricValue(Double.POSITIVE_INFINITY), 1d);
vmCost.addCostElement(periodicCostElement);
vm1SmallService.addCostFunction(vmCost);
}
provider.addCloudOfferedService(vm1SmallService);
Map<UUID, Map<UUID, CloudOfferedService>> cloudProvidersMap = new HashMap<UUID, Map<UUID, CloudOfferedService>>();
Map<UUID, CloudOfferedService> cloudUnits = new HashMap<UUID, CloudOfferedService>();
cloudProvidersMap.put(UUID.fromString("251ed7c7-aa4d-49d4-b42b-7efefd970d6b"), cloudUnits);
for (CloudOfferedService unit : provider.getCloudOfferedServices()) {
cloudUnits.put(unit.getUuid(), unit);
}
int instanceIndex = 0;
MonitoredElement vm = new MonitoredElement("10.0.0.1").withLevel(MonitoredElement.MonitoredElementLevel.VM)
.withCloudOfferedService(new UsedCloudOfferedService()
.withCloudProviderID(UUID.fromString("251ed7c7-aa4d-49d4-b42b-7efefd970d6b"))
.withCloudProviderName("Amazon")
.withInstanceUUID(UUID.fromString("98400000-8cf0-11bd-b23e-00000000000" + instanceIndex++))
.withId(UUID.fromString("38400000-8cf0-11bd-b23e-000000000000"))
.withName("m1.small")
);
MonitoredElement unit = new MonitoredElement("Unit").withLevel(MonitoredElement.MonitoredElementLevel.SERVICE_UNIT)
.withContainedElement(vm);
MonitoredElement topology = new MonitoredElement("Topology").withLevel(MonitoredElement.MonitoredElementLevel.SERVICE_TOPOLOGY)
.withContainedElement(unit);
MonitoredElement service = new MonitoredElement("Service").withLevel(MonitoredElement.MonitoredElementLevel.SERVICE)
.withContainedElement(topology);
//make sure all is clean
persistenceDelegate.removeService(service.getId());
persistenceDelegate.writeMonitoringSequenceId(service.getId());
persistenceDelegate.writeConfiguration(service.getId(), new ConfigurationXMLRepresentation().withServiceConfiguration(service).withCompositionRulesConfiguration(new CompositionRulesConfiguration()).withRequirements(new Requirements()));
Metric ELEMENT_COST_METRIC = new Metric("element_cost", "costUnits", Metric.MetricType.COST);
ServiceMonitoringSnapshot monitoringSnapshot1 = new ServiceMonitoringSnapshot().withTimestamp("1000");
{
MonitoredElementMonitoringSnapshot elementMonitoringSnapshot = new MonitoredElementMonitoringSnapshot(vm);
MonitoredElementMonitoringSnapshot unitMonSnapshpot = new MonitoredElementMonitoringSnapshot(unit);
unitMonSnapshpot.addChild(elementMonitoringSnapshot);
MonitoredElementMonitoringSnapshot topologyMonSnapshpot = new MonitoredElementMonitoringSnapshot(topology);
topologyMonSnapshpot.addChild(unitMonSnapshpot);
MonitoredElementMonitoringSnapshot serviceMonSnapshpot = new MonitoredElementMonitoringSnapshot(service);
serviceMonSnapshpot.addChild(topologyMonSnapshpot);
monitoringSnapshot1.addMonitoredData(elementMonitoringSnapshot);
monitoringSnapshot1.addMonitoredData(unitMonSnapshpot);
monitoringSnapshot1.addMonitoredData(topologyMonSnapshpot);
monitoringSnapshot1.addMonitoredData(serviceMonSnapshpot);
monitoringSnapshot1 = monitoringSnapshot1.clone();
generalAccess.writeInTimestamp(monitoringSnapshot1.getTimestamp(), service, service.getId());
generalAccess.writeStructuredMonitoringData(monitoringSnapshot1.getTimestamp(), monitoringSnapshot1, service.getId());
}
//test1
LifetimeEnrichedSnapshot totalUsageSnapshot = costEvalEngine.updateTotalUsageSoFarWithCompleteStructureIncludingServicesAsCloudOfferedService(cloudProvidersMap, null, monitoringSnapshot1);
{
CompositionRulesBlock totalCostRules = costEvalEngine.createCompositionRulesForTotalCostIncludingServicesAsCloudOfferedService(cloudProvidersMap, totalUsageSnapshot, totalUsageSnapshot.getSnapshot().getTimestamp());
ServiceMonitoringSnapshot totalCostEnrichedSnapshot = costEvalEngine.applyCompositionRules(totalCostRules, totalUsageSnapshot.getSnapshot());
{
assertEquals(new MetricValue(1.0), totalCostEnrichedSnapshot.getMonitoredData(service).getMetricValue(ELEMENT_COST_METRIC));
}
}
ServiceMonitoringSnapshot monitoringSnapshot2 = new ServiceMonitoringSnapshot().withTimestamp("2000");
{
MonitoredElement vm2 = new MonitoredElement("10.0.0.2").withLevel(MonitoredElement.MonitoredElementLevel.VM)
.withCloudOfferedService(new UsedCloudOfferedService()
.withCloudProviderID(UUID.fromString("251ed7c7-aa4d-49d4-b42b-7efefd970d6b"))
.withCloudProviderName("Amazon")
.withInstanceUUID(UUID.fromString("98400000-8cf0-11bd-b23e-00000000000" + instanceIndex++))
.withId(UUID.fromString("38400000-8cf0-11bd-b23e-000000000000"))
.withName("m1.small")
);
unit.getContainedElements().clear();
unit.withContainedElement(vm);
unit.withContainedElement(vm2);
MonitoredElementMonitoringSnapshot vm1MonSnapshot = new MonitoredElementMonitoringSnapshot(vm);
MonitoredElementMonitoringSnapshot vm2MonSnapshot = new MonitoredElementMonitoringSnapshot(vm2);
MonitoredElementMonitoringSnapshot unitMonSnapshpot = new MonitoredElementMonitoringSnapshot(unit);
unitMonSnapshpot.addChild(vm1MonSnapshot);
unitMonSnapshpot.addChild(vm2MonSnapshot);
MonitoredElementMonitoringSnapshot topologyMonSnapshpot = new MonitoredElementMonitoringSnapshot(topology);
topologyMonSnapshpot.addChild(unitMonSnapshpot);
MonitoredElementMonitoringSnapshot serviceMonSnapshpot = new MonitoredElementMonitoringSnapshot(service);
serviceMonSnapshpot.addChild(topologyMonSnapshpot);
monitoringSnapshot2.addMonitoredData(vm2MonSnapshot);
monitoringSnapshot2.addMonitoredData(vm1MonSnapshot);
monitoringSnapshot2.addMonitoredData(unitMonSnapshpot);
monitoringSnapshot2.addMonitoredData(topologyMonSnapshpot);
monitoringSnapshot2.addMonitoredData(serviceMonSnapshpot);
monitoringSnapshot2 = monitoringSnapshot2.clone();
generalAccess.writeInTimestamp(monitoringSnapshot2.getTimestamp(), service, service.getId());
generalAccess.writeStructuredMonitoringData(monitoringSnapshot2.getTimestamp(), monitoringSnapshot2, service.getId());
}
//test2
LifetimeEnrichedSnapshot totalUsageSnapshot1 = costEvalEngine.updateTotalUsageSoFarWithCompleteStructureIncludingServicesAsCloudOfferedService(cloudProvidersMap, totalUsageSnapshot, monitoringSnapshot2);
{
CompositionRulesBlock totalCostRules = costEvalEngine.createCompositionRulesForTotalCostIncludingServicesAsCloudOfferedService(cloudProvidersMap, totalUsageSnapshot1, totalUsageSnapshot1.getSnapshot().getTimestamp());
ServiceMonitoringSnapshot totalCostEnrichedSnapshot = costEvalEngine.applyCompositionRules(totalCostRules, totalUsageSnapshot1.getSnapshot());
{
assertEquals(new MetricValue(2.0), totalCostEnrichedSnapshot.getMonitoredData(service).getMetricValue(ELEMENT_COST_METRIC));
}
}
//scaling in instance 2
ServiceMonitoringSnapshot monitoringSnapshot3 = new ServiceMonitoringSnapshot().withTimestamp("3000");
{
unit.getContainedElements().clear();
unit.withContainedElement(vm);
MonitoredElementMonitoringSnapshot vm1MonSnapshot = new MonitoredElementMonitoringSnapshot(vm);
MonitoredElementMonitoringSnapshot unitMonSnapshpot = new MonitoredElementMonitoringSnapshot(unit);
unitMonSnapshpot.addChild(vm1MonSnapshot);
MonitoredElementMonitoringSnapshot topologyMonSnapshpot = new MonitoredElementMonitoringSnapshot(topology);
topologyMonSnapshpot.addChild(unitMonSnapshpot);
MonitoredElementMonitoringSnapshot serviceMonSnapshpot = new MonitoredElementMonitoringSnapshot(service);
serviceMonSnapshpot.addChild(topologyMonSnapshpot);
monitoringSnapshot3.addMonitoredData(vm1MonSnapshot);
monitoringSnapshot3.addMonitoredData(unitMonSnapshpot);
monitoringSnapshot3.addMonitoredData(topologyMonSnapshpot);
monitoringSnapshot3.addMonitoredData(serviceMonSnapshpot);
monitoringSnapshot3 = monitoringSnapshot3.clone();
generalAccess.writeInTimestamp(monitoringSnapshot3.getTimestamp(), service, service.getId());
generalAccess.writeStructuredMonitoringData(monitoringSnapshot3.getTimestamp(), monitoringSnapshot3, service.getId());
}
//test3
LifetimeEnrichedSnapshot totalUsageSnapshot_2 = costEvalEngine.updateTotalUsageSoFarWithCompleteStructureIncludingServicesAsCloudOfferedService(cloudProvidersMap, totalUsageSnapshot1, monitoringSnapshot3);
{
CompositionRulesBlock totalCostRules = costEvalEngine.createCompositionRulesForTotalCostIncludingServicesAsCloudOfferedService(cloudProvidersMap, totalUsageSnapshot_2, totalUsageSnapshot_2.getSnapshot().getTimestamp());
ServiceMonitoringSnapshot totalCostEnrichedSnapshot = costEvalEngine.applyCompositionRules(totalCostRules, totalUsageSnapshot_2.getSnapshot());
{
assertEquals(new MetricValue(2.0), totalCostEnrichedSnapshot.getMonitoredData(service).getMetricValue(ELEMENT_COST_METRIC));
}
}
//scaling out with instance with same IP
ServiceMonitoringSnapshot monitoringSnapshot4 = new ServiceMonitoringSnapshot().withTimestamp("4000");
{
MonitoredElement vm2 = new MonitoredElement("10.0.0.2(1)").withName("10.0.0.2").withLevel(MonitoredElement.MonitoredElementLevel.VM)
.withCloudOfferedService(new UsedCloudOfferedService()
.withCloudProviderID(UUID.fromString("251ed7c7-aa4d-49d4-b42b-7efefd970d6b"))
.withCloudProviderName("Amazon")
.withInstanceUUID(UUID.fromString("98400000-8cf0-11bd-b23e-00000000000" + instanceIndex++))
.withId(UUID.fromString("38400000-8cf0-11bd-b23e-000000000000"))
.withName("m1.small")
);
unit.getContainedElements().clear();
unit.withContainedElement(vm);
unit.withContainedElement(vm2);
MonitoredElementMonitoringSnapshot vm1MonSnapshot = new MonitoredElementMonitoringSnapshot(vm);
MonitoredElementMonitoringSnapshot vm2MonSnapshot = new MonitoredElementMonitoringSnapshot(vm2);
MonitoredElementMonitoringSnapshot unitMonSnapshpot = new MonitoredElementMonitoringSnapshot(unit);
unitMonSnapshpot.addChild(vm1MonSnapshot);
unitMonSnapshpot.addChild(vm2MonSnapshot);
MonitoredElementMonitoringSnapshot topologyMonSnapshpot = new MonitoredElementMonitoringSnapshot(topology);
topologyMonSnapshpot.addChild(unitMonSnapshpot);
MonitoredElementMonitoringSnapshot serviceMonSnapshpot = new MonitoredElementMonitoringSnapshot(service);
serviceMonSnapshpot.addChild(topologyMonSnapshpot);
monitoringSnapshot4.addMonitoredData(vm1MonSnapshot);
monitoringSnapshot4.addMonitoredData(vm2MonSnapshot);
monitoringSnapshot4.addMonitoredData(unitMonSnapshpot);
monitoringSnapshot4.addMonitoredData(topologyMonSnapshpot);
monitoringSnapshot4.addMonitoredData(serviceMonSnapshpot);
monitoringSnapshot4 = monitoringSnapshot4.clone();
generalAccess.writeInTimestamp(monitoringSnapshot4.getTimestamp(), service, service.getId());
generalAccess.writeStructuredMonitoringData(monitoringSnapshot4.getTimestamp(), monitoringSnapshot4, service.getId());
}
//test4
LifetimeEnrichedSnapshot totalUsageSnapshot3 = costEvalEngine.updateTotalUsageSoFarWithCompleteStructureIncludingServicesAsCloudOfferedService(cloudProvidersMap, totalUsageSnapshot_2, monitoringSnapshot4);
{
CompositionRulesBlock totalCostRules = costEvalEngine.createCompositionRulesForTotalCostIncludingServicesAsCloudOfferedService(cloudProvidersMap, totalUsageSnapshot3, totalUsageSnapshot3.getSnapshot().getTimestamp());
ServiceMonitoringSnapshot totalCostEnrichedSnapshot = costEvalEngine.applyCompositionRules(totalCostRules, totalUsageSnapshot3.getSnapshot());
{
assertEquals(new MetricValue(3.0), totalCostEnrichedSnapshot.getMonitoredData(service).getMetricValue(ELEMENT_COST_METRIC));
}
}
//scaling in instance 2
ServiceMonitoringSnapshot monitoringSnapshot5 = new ServiceMonitoringSnapshot().withTimestamp("3700000");
{
MonitoredElement vm2 = new MonitoredElement("10.0.0.2(1)").withName("10.0.0.2").withLevel(MonitoredElement.MonitoredElementLevel.VM)
.withCloudOfferedService(new UsedCloudOfferedService()
.withCloudProviderID(UUID.fromString("251ed7c7-aa4d-49d4-b42b-7efefd970d6b"))
.withCloudProviderName("Amazon")
.withInstanceUUID(UUID.fromString("98400000-8cf0-11bd-b23e-00000000000" + instanceIndex))
.withId(UUID.fromString("38400000-8cf0-11bd-b23e-000000000000"))
.withName("m1.small")
);
unit.getContainedElements().clear();
unit.withContainedElement(vm);
unit.withContainedElement(vm2);
MonitoredElementMonitoringSnapshot vm1MonSnapshot = new MonitoredElementMonitoringSnapshot(vm);
MonitoredElementMonitoringSnapshot vm2MonSnapshot = new MonitoredElementMonitoringSnapshot(vm2);
MonitoredElementMonitoringSnapshot unitMonSnapshpot = new MonitoredElementMonitoringSnapshot(unit);
unitMonSnapshpot.addChild(vm1MonSnapshot);
unitMonSnapshpot.addChild(vm2MonSnapshot);
MonitoredElementMonitoringSnapshot topologyMonSnapshpot = new MonitoredElementMonitoringSnapshot(topology);
topologyMonSnapshpot.addChild(unitMonSnapshpot);
MonitoredElementMonitoringSnapshot serviceMonSnapshpot = new MonitoredElementMonitoringSnapshot(service);
serviceMonSnapshpot.addChild(topologyMonSnapshpot);
monitoringSnapshot5.addMonitoredData(vm1MonSnapshot);
monitoringSnapshot5.addMonitoredData(vm2MonSnapshot);
monitoringSnapshot5.addMonitoredData(unitMonSnapshpot);
monitoringSnapshot5.addMonitoredData(topologyMonSnapshpot);
monitoringSnapshot5.addMonitoredData(serviceMonSnapshpot);
monitoringSnapshot5 = monitoringSnapshot5.clone();
generalAccess.writeInTimestamp(monitoringSnapshot5.getTimestamp(), service, service.getId());
generalAccess.writeStructuredMonitoringData(monitoringSnapshot5.getTimestamp(), monitoringSnapshot5, service.getId());
}
//test4
LifetimeEnrichedSnapshot totalUsageSnapshot4 = costEvalEngine.updateTotalUsageSoFarWithCompleteStructureIncludingServicesAsCloudOfferedService(cloudProvidersMap, totalUsageSnapshot3, monitoringSnapshot5);
{
CompositionRulesBlock totalCostRules = costEvalEngine.createCompositionRulesForTotalCostIncludingServicesAsCloudOfferedService(cloudProvidersMap, totalUsageSnapshot4, totalUsageSnapshot4.getSnapshot().getTimestamp());
ServiceMonitoringSnapshot totalCostEnrichedSnapshot = costEvalEngine.applyCompositionRules(totalCostRules, totalUsageSnapshot4.getSnapshot());
{
assertEquals(new MetricValue(5.0), totalCostEnrichedSnapshot.getMonitoredData(service).getMetricValue(ELEMENT_COST_METRIC));
log.debug(CostJSONConverter.convertMonitoringSnapshot(totalCostEnrichedSnapshot));
}
{//cost of last added VM
MonitoredElement usedCloudServiceMonitoredElement = new MonitoredElement()
.withId(UUID.fromString("98400000-8cf0-11bd-b23e-00000000000" + --instanceIndex).toString())
.withName("m1.small")
.withLevel(MonitoredElement.MonitoredElementLevel.CLOUD_OFFERED_SERVICE);
assertEquals(new MetricValue(2.0), totalCostEnrichedSnapshot.getMonitoredData(usedCloudServiceMonitoredElement).getMetricValue(ELEMENT_COST_METRIC));
}
{
//cost of scaled IN VM. must differ than of last added
MonitoredElement usedCloudServiceMonitoredElement = new MonitoredElement()
.withId(UUID.fromString("98400000-8cf0-11bd-b23e-00000000000" + --instanceIndex).toString())
.withName("m1.small")
.withLevel(MonitoredElement.MonitoredElementLevel.CLOUD_OFFERED_SERVICE);
assertEquals(new MetricValue(1.0), totalCostEnrichedSnapshot.getMonitoredData(usedCloudServiceMonitoredElement).getMetricValue(ELEMENT_COST_METRIC));
}
}
}
}
| tuwiendsg/MELA | MELA-Extensions/MELA-ComplexCostEvaluationService/src/test/java/at/ac/tuwien/dsg/mela/costeval/engines/CostTotalEvalTest.java | Java | apache-2.0 | 25,347 |
package com.tixon.portlab.app;
import java.util.ArrayList;
public class FunctionsDefinitions {
public ArrayList<String> arguments;
public String expression;
}
| TikhonOsipov/portlab | app/src/main/java/com/tixon/portlab/app/FunctionsDefinitions.java | Java | apache-2.0 | 177 |
package com.sequenceiq.cloudbreak.service.stack;
import static com.sequenceiq.cloudbreak.common.network.NetworkConstants.SUBNET_IDS;
import static org.mockito.Mockito.when;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.inject.Inject;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import com.sequenceiq.cloudbreak.cloud.model.CloudSubnet;
import com.sequenceiq.cloudbreak.cloud.service.ResourceRetriever;
import com.sequenceiq.cloudbreak.common.json.Json;
import com.sequenceiq.cloudbreak.common.service.TransactionService;
import com.sequenceiq.cloudbreak.core.flow2.dto.NetworkScaleDetails;
import com.sequenceiq.cloudbreak.domain.stack.Stack;
import com.sequenceiq.cloudbreak.domain.stack.instance.InstanceGroup;
import com.sequenceiq.cloudbreak.domain.stack.instance.InstanceMetaData;
import com.sequenceiq.cloudbreak.domain.stack.instance.network.InstanceGroupNetwork;
import com.sequenceiq.cloudbreak.repository.InstanceMetaDataRepository;
import com.sequenceiq.cloudbreak.service.environment.EnvironmentClientService;
import com.sequenceiq.environment.api.v1.environment.model.response.DetailedEnvironmentResponse;
import com.sequenceiq.environment.api.v1.environment.model.response.EnvironmentNetworkResponse;
@SpringBootTest(classes = InstanceMetadataServiceComponentTest.TestConfig.class, webEnvironment = SpringBootTest.WebEnvironment.NONE)
@ExtendWith(SpringExtension.class)
public class InstanceMetadataServiceComponentTest {
private static final String ENV_CRN = "envCrn";
@Inject
private InstanceMetaDataService instanceMetaDataService;
@Inject
private EnvironmentClientService environmentClientService;
@Test
public void saveInstanceAndGetUpdatedStack() {
DetailedEnvironmentResponse detailedEnvResponse = DetailedEnvironmentResponse.builder()
.withCrn(ENV_CRN)
.withNetwork(EnvironmentNetworkResponse.builder()
.withSubnetMetas(Map.of(
"sub1", cloudSubnet("az", "sub1"),
"sub2", cloudSubnet("az", "sub2"),
"sub3", cloudSubnet("az", "sub3"),
"sub4", cloudSubnet("az1", "sub4")
))
.build())
.build();
Stack stack = new Stack();
stack.setEnvironmentCrn(ENV_CRN);
InstanceGroup workerInstanceGroup = new InstanceGroup();
workerInstanceGroup.setGroupName("worker");
InstanceGroupNetwork instanceGroupNetwork = new InstanceGroupNetwork();
instanceGroupNetwork.setCloudPlatform("AWS");
instanceGroupNetwork.setAttributes(new Json(Map.of(SUBNET_IDS, List.of("sub1", "sub2", "sub3", "sub4"))));
workerInstanceGroup.setInstanceGroupNetwork(instanceGroupNetwork);
stack.setInstanceGroups(Set.of(workerInstanceGroup));
when(environmentClientService.getByCrn(ENV_CRN)).thenReturn(detailedEnvResponse);
instanceMetaDataService.saveInstanceAndGetUpdatedStack(stack, Map.of("worker", 42), Map.of(), false, false, null);
Map<String, List<InstanceMetaData>> groupBySub = workerInstanceGroup.getInstanceMetaDataSet().stream()
.collect(Collectors.groupingBy(
InstanceMetaData::getSubnetId,
Collectors.mapping(Function.identity(), Collectors.toList())));
Map<String, List<InstanceMetaData>> groupByAz = workerInstanceGroup.getInstanceMetaDataSet().stream()
.collect(Collectors.groupingBy(
InstanceMetaData::getAvailabilityZone,
Collectors.mapping(Function.identity(), Collectors.toList())));
Assertions.assertEquals(2, groupByAz.size());
Assertions.assertEquals(21, groupByAz.get("az").size());
Assertions.assertEquals(21, groupByAz.get("az1").size());
Assertions.assertEquals(4, groupBySub.size());
Assertions.assertEquals(7, groupBySub.get("sub1").size());
Assertions.assertEquals(7, groupBySub.get("sub2").size());
Assertions.assertEquals(7, groupBySub.get("sub3").size());
Assertions.assertEquals(21, groupBySub.get("sub4").size());
}
@Test
public void saveInstanceAndGetUpdatedStackWhenPreferredSubnetSet() {
DetailedEnvironmentResponse detailedEnvResponse = DetailedEnvironmentResponse.builder()
.withCrn(ENV_CRN)
.withNetwork(EnvironmentNetworkResponse.builder()
.withSubnetMetas(Map.of(
"sub1", cloudSubnet("az", "sub1"),
"sub2", cloudSubnet("az", "sub2"),
"sub3", cloudSubnet("az", "sub3"),
"sub4", cloudSubnet("az1", "sub4")
))
.build())
.build();
Stack stack = new Stack();
stack.setEnvironmentCrn(ENV_CRN);
InstanceGroup workerInstanceGroup = new InstanceGroup();
workerInstanceGroup.setGroupName("worker");
InstanceGroupNetwork instanceGroupNetwork = new InstanceGroupNetwork();
instanceGroupNetwork.setCloudPlatform("AWS");
instanceGroupNetwork.setAttributes(new Json(Map.of(SUBNET_IDS, List.of("sub1", "sub2", "sub3", "sub4"))));
workerInstanceGroup.setInstanceGroupNetwork(instanceGroupNetwork);
stack.setInstanceGroups(Set.of(workerInstanceGroup));
when(environmentClientService.getByCrn(ENV_CRN)).thenReturn(detailedEnvResponse);
NetworkScaleDetails networkScaleDetails = new NetworkScaleDetails(List.of("sub1", "sub2"));
instanceMetaDataService.saveInstanceAndGetUpdatedStack(stack, Map.of("worker", 42), Map.of(), false, false, networkScaleDetails);
Map<String, List<InstanceMetaData>> groupBySub = workerInstanceGroup.getInstanceMetaDataSet().stream()
.collect(Collectors.groupingBy(
InstanceMetaData::getSubnetId,
Collectors.mapping(Function.identity(), Collectors.toList())));
Map<String, List<InstanceMetaData>> groupByAz = workerInstanceGroup.getInstanceMetaDataSet().stream()
.collect(Collectors.groupingBy(
InstanceMetaData::getAvailabilityZone,
Collectors.mapping(Function.identity(), Collectors.toList())));
Assertions.assertEquals(1, groupByAz.size());
Assertions.assertEquals(42, groupByAz.get("az").size());
Assertions.assertNull(groupByAz.get("az1"));
Assertions.assertEquals(2, groupBySub.size());
Assertions.assertEquals(21, groupBySub.get("sub1").size());
Assertions.assertEquals(21, groupBySub.get("sub2").size());
Assertions.assertNull(groupBySub.get("sub3"));
Assertions.assertNull(groupBySub.get("sub4"));
}
private CloudSubnet cloudSubnet(String az, String sub) {
CloudSubnet cloudSubnet = new CloudSubnet();
cloudSubnet.setAvailabilityZone(az);
cloudSubnet.setId(sub);
return cloudSubnet;
}
@ComponentScan(
basePackages = {
"com.sequenceiq.cloudbreak.service.stack",
"com.sequenceiq.cloudbreak.service.multiaz",
"com.sequenceiq.cloudbreak.controller.validation.network"},
excludeFilters = {@ComponentScan.Filter(
type = FilterType.REGEX,
pattern = "(?!.*\\.InstanceMetaDataService)com\\.sequenceiq\\.cloudbreak\\.service\\.stack\\..*")}
)
public static class TestConfig {
@MockBean
private InstanceMetaDataRepository repository;
@MockBean
private TransactionService transactionService;
@MockBean
private EnvironmentClientService environmentClientService;
@MockBean
private ResourceRetriever resourceRetriever;
}
}
| hortonworks/cloudbreak | core/src/test/java/com/sequenceiq/cloudbreak/service/stack/InstanceMetadataServiceComponentTest.java | Java | apache-2.0 | 8,475 |
/*******************************************************************************
* DARPA XDATA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright 2013 Raytheon BBN Technologies Corp. All Rights Reserved.
******************************************************************************/
package com.bbn.c2s2.pint;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import junit.framework.TestCase;
import com.bbn.c2s2.pint.testdata.ProcessModelFactory;
import com.bbn.c2s2.pint.vocab.RNRM;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
public class ProcessMultiplexerTest extends TestCase {
Resource processWithObservable1, processWithoutObservable1,
processWithObservables1;
ProcessMultiplexer m1, m2, m3, m4;
protected void setUp() throws Exception {
super.setUp();
processWithObservable1 = ProcessModelFactory
.createSingleActivityProcessWithObservable();
processWithoutObservable1 = ProcessModelFactory
.createSingleActivityProcess();
processWithObservables1 = ProcessModelFactory
.createSerialProcessWithObservables();
m1 = new ProcessMultiplexer(processWithObservable1.getModel());
m1.initialize();
m2 = new ProcessMultiplexer(processWithoutObservable1.getModel());
m2.initialize();
m3 = new ProcessMultiplexer(processWithoutObservable1.getModel());
// NOTE: m3 is not initialized.
m4 = new ProcessMultiplexer(processWithObservables1.getModel());
m4.initialize();
}
public void testInitialize() throws Exception {
assertTrue(m1.initialized);
assertTrue(m2.initialized);
assertFalse(m3.initialized);
// ignore single activity process test.
assertTrue(m1.getProcesses().size() == 0);
// test for process with no observables.
assertTrue(m2.getProcesses().size() == 0);
assertTrue(m4.getProcesses().size() == 1);
Model multiProcessModel = ModelFactory.createDefaultModel();
multiProcessModel.add(this.processWithObservable1.getModel());
multiProcessModel.add(this.processWithoutObservable1.getModel());
multiProcessModel.add(this.processWithObservables1.getModel());
ProcessMultiplexer m5 = new ProcessMultiplexer(multiProcessModel);
m5.initialize();
assertTrue(m5.getProcesses().size() == 1);
}
public void testGetProcessAssignments() {
Model m = m4.rnrm.getModel();
StmtIterator i = m.listStatements((Resource) null, RNRM.hasObservable,
(Resource) null);
Set<String> uniqueObservables = new HashSet<String>();
while (i.hasNext()) {
Statement s = i.nextStatement();
uniqueObservables.add(s.getObject().toString());
}
int count = 0;
Collection<Observation> os = new ArrayList<Observation>();
for (String obsUri : uniqueObservables) {
Observation o = new Observation("o" + count++, obsUri, 0, 0,
new Date(0));
os.add(o);
}
Collection<ProcessAssignment> pas = null;
pas = m4.getProcessAssignments(os);
ProcessAssignment pa = pas.iterator().next();
assertTrue(pa._candidates.bindingCount() == 2);
os = new ArrayList<Observation>();
pas = m4.getProcessAssignments(os);
assertTrue(pas.size() == 0);
}
}
| plamenbbn/XDATA | pint/process-alignment/test/com/bbn/c2s2/pint/ProcessMultiplexerTest.java | Java | apache-2.0 | 3,839 |
/**
*
*/
/**
* @author nestquick
*
*/
package dao; | nestquick/shical | src/main/java/dao/package-info.java | Java | apache-2.0 | 56 |
package com.reactbbcl;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import org.apache.http.client.HttpClient;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.StatusLine;
public class HTTPClient {
// constructor
public HTTPClient() {}
public String getJSON(String address){
//StringBuilder builder = new StringBuilder();
String builder = "";
HttpClient client = new DefaultHttpClient();
HttpGet httpGet = new HttpGet(address);
try{
InputStream content;
BufferedReader reader;
HttpResponse response = client.execute(httpGet);
StatusLine statusLine = response.getStatusLine();
int statusCode = statusLine.getStatusCode();
if(statusCode == 200){
HttpEntity entity = response.getEntity();
content = entity.getContent();
reader = new BufferedReader(new InputStreamReader(content));
String line;
while((line = reader.readLine()) != null){
//builder.append(line);
builder += line;
}
} else {
}
}catch(ClientProtocolException e){
e.printStackTrace();
} catch (IOException e){
e.printStackTrace();
}
return builder;
//return "{\"person\":{\"name\":\"A\",\"age\":30,\"children\":[{\"name\":\"B\",\"age\":5}," + "\"name\":\"C\",\"age\":7},{\"name\":\"D\",\"age\":9}]}}";
//return builder.toString();
}
}
| EstebanFuentealba/React-Native-BBCL | android/app/src/main/java/com/reactbbcl/HTTPClient.java | Java | apache-2.0 | 1,782 |
package com.dx.jwfm.framework.web.tag;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.List;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.JspWriter;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.log4j.Logger;
import com.dx.jwfm.framework.core.RequestContext;
import com.dx.jwfm.framework.core.SystemContext;
import com.dx.jwfm.framework.core.model.FastModel;
import com.dx.jwfm.framework.core.model.MapObject;
import com.dx.jwfm.framework.core.model.search.SearchColumn;
import com.dx.jwfm.framework.util.FastUtil;
/**
* @author 宋帅杰
* 查询条件项目标签,显示所有查询条件
*/
public class SearchItemTag extends BaseViewTag {
/** */
private static final long serialVersionUID = 1L;
static Logger logger = Logger.getLogger(SearchItemTag.class);
/**
* 菜单地址,非必要属性。用于在页面中获取某个菜单项的功能按钮权限
* 使用举例(如字典表的权限操作,字典子表的操作权限需要与字典主表的操作权限一致,因此需要传入字典主表的菜单地址,以获取和主表相同的操作功能)
*/
private String menuUrl;
@Override
public int doEndTag() throws JspException {
FastModel model = RequestContext.getFastModel();
JspWriter out = pageContext.getOut();
if(model==null){
return super.doEndTag();
}
System.out.println(this);
try{
StringWriter buff = new StringWriter();
PrintWriter jsOut = new PrintWriter(buff);
List<SearchColumn> items = model.getModelStructure().getSearch().getSearchColumns();
for(SearchColumn col:items){
out.print("<span class=\"searchItem\"> ");
out.print(col.getVcTitle());
out.print(replaceVars(getEditorHtml(col, "search", getValue(col))));
out.println("</span>");
if(FastUtil.isNotBlank(col.getVcEditorJs())){
jsOut.println(col.getVcEditorJs());
}
}
jsOut.flush();
if(buff.getBuffer().length()>0){
out.println("<script>");
out.println(buff.toString());
out.println("</script>");
}
} catch (IOException ex) {
logger.error(ex);
}
return super.doEndTag();
}
public String getEditorHtml(SearchColumn col, String prefix,String value){
//日期型的需要特殊处理
if("dateRange".equals(col.getSqlSearchType())){//日期范围
String format = null;
if(col.getVcEditorType()!=null && col.getVcEditorType().startsWith("date:")){
format = col.getVcEditorType().substring(5);
}
if(FastUtil.isBlank(format)){
format = "yyyy-MM-dd";
}
int width = format.length()*6+25;
StringBuffer buff = new StringBuffer();
buff.append("<input type=hidden").append(HtmlUtil.createIdAndName(prefix, col.getVcCode()))
.append(" value=1 /><input type=text").append(HtmlUtil.createIdAndName(prefix, col.getVcCode()+"Begin"))
.append(" class=\"Wdate\" onfocus=\"WdatePicker({dateFmt:'").append(format).append("'})\" value=\"")
.append(FastUtil.nvl((String)RequestContext.getBeanValue(prefix+"."+col.getVcCode()+"Begin"),""))
.append("\" style=\"width:").append(width).append("px;\" /> 至 <input type=text").append(HtmlUtil.createIdAndName(prefix, col.getVcCode()+"End"))
.append(" class=\"Wdate\" onfocus=\"WdatePicker({dateFmt:'").append(format).append("'})\" value=\"")
.append(FastUtil.nvl((String)RequestContext.getBeanValue(prefix+"."+col.getVcCode()+"End"),"")).append("\" style=\"width:").append(width).append("px;\" />");
return buff.toString();
}
return HtmlUtil.createEditorHtml(prefix, col.getVcCode(), col.getVcEditorType(), value);
}
private String getValue(SearchColumn col){
Object action = RequestContext.getRequestAction();
String val = null;
if(action!=null){//先从Action的search属性中取值
try {
Object srh = PropertyUtils.getProperty(action, "search");
if(srh instanceof MapObject){//如果search属性是MAP对象,则按MAP对象取值
MapObject map = (MapObject) srh;
if(map.containsKey(col.getVcCode())){
val = FastUtil.nvl(FastUtil.format(map.get(col.getVcCode()),null), "");
}
}//否则按一般对象取属性值
else if(srh!=null && PropertyUtils.isReadable(srh, col.getVcCode())){
val = FastUtil.nvl(BeanUtils.getProperty(srh, col.getVcCode()), "");
}
} catch (Exception e) {
logger.error(e);
}
}
if(val==null){//如果未取到值,从用户请求的参数中取值
val = RequestContext.getParameter("search."+col.getVcCode());
}
if(val==null && col.getDefaults()!=null && col.getDefaults().indexOf("${")>=0){//如果请求参数中不包含相应的值,则取默认值
val = SystemContext.replaceMacroString(col.getDefaults());
}
return val;
}
public String getMenuUrl() {
return menuUrl;
}
public void setMenuUrl(String menuUrl) {
this.menuUrl = menuUrl;
}
}
| shuaijie506/jwfm | jwfm/src/main/java/com/dx/jwfm/framework/web/tag/SearchItemTag.java | Java | apache-2.0 | 5,114 |
/*
* Copyright 2014 defrac inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package defrac.intellij.run;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterators;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.application.ApplicationConfiguration;
import com.intellij.execution.configurations.JavaParameters;
import com.intellij.execution.configurations.JavaRunConfigurationModule;
import com.intellij.execution.configurations.ParametersList;
import com.intellij.execution.filters.TextConsoleBuilderFactory;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.util.JavaParametersUtil;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.PathsList;
import defrac.intellij.DefracBundle;
import defrac.intellij.config.DefracConfigOracle;
import defrac.intellij.facet.DefracFacet;
import defrac.intellij.sdk.DefracVersion;
import org.jetbrains.annotations.NotNull;
import java.util.Iterator;
import static com.google.common.base.Preconditions.checkNotNull;
/**
*
*/
public final class DefracJvmRunningState extends ApplicationConfiguration.JavaApplicationCommandLineState<DefracRunConfiguration> {
@NotNull
private final DefracFacet facet;
public DefracJvmRunningState(@NotNull final ExecutionEnvironment environment,
@NotNull final DefracRunConfiguration configuration,
@NotNull final DefracFacet facet) {
super(configuration, environment);
this.facet = facet;
setConsoleBuilder(TextConsoleBuilderFactory.getInstance().
createBuilder(configuration.getProject(), configuration.getConfigurationModule().getSearchScope()));
}
@Override
protected JavaParameters createJavaParameters() throws ExecutionException {
final DefracVersion defrac = facet.getDefracVersion();
if(defrac == null) {
throw new ExecutionException(DefracBundle.message("facet.error.noVersion"));
}
final JavaParameters params = new JavaParameters();
final JavaRunConfigurationModule module = myConfiguration.getConfigurationModule();
final int classPathType = JavaParametersUtil.getClasspathType(module, myConfiguration.MAIN_CLASS_NAME, false);
final String jreHome = myConfiguration.ALTERNATIVE_JRE_PATH_ENABLED ? myConfiguration.ALTERNATIVE_JRE_PATH : null;
final String projectBasePath = FileUtil.toSystemDependentName(checkNotNull(module.getProject().getBaseDir().getCanonicalPath()));
final String defracHomePath = FileUtil.toSystemDependentName(checkNotNull(checkNotNull(facet.getDefracSdk()).getHomePath()));
final VirtualFile settingsFile = facet.getVirtualSettingsFile();
final VirtualFile nat1ve = defrac.getNative(), nativeJvmDir, nativeJvmPlatformDir;
final VirtualFile target, targetJvmDir;
final DefracConfigOracle config = facet.getConfigOracle();
if(config == null) {
throw new ExecutionException(DefracBundle.message("facet.error.noSettings"));
}
final String nativeLibs;
if(settingsFile == null) {
throw new ExecutionException(DefracBundle.message("facet.error.noSettings"));
}
target = settingsFile.getParent().findChild("target");
if(target == null) {
throw new ExecutionException("Couldn't find target directory");
}
targetJvmDir = target.findChild("jvm");
if(targetJvmDir == null) {
throw new ExecutionException("Couldn't find JVM directory");
}
if(nat1ve == null) {
throw new ExecutionException("Couldn't find native libraries");
}
nativeJvmDir = nat1ve.findChild("jvm");
if(nativeJvmDir == null) {
throw new ExecutionException("Couldn't find native JVM libraries");
}
if(SystemInfo.isLinux) {
nativeLibs = "linux";
} else if(SystemInfo.isWindows) {
nativeLibs = "win";
} else if(SystemInfo.isMac) {
nativeLibs = "mac";
} else {
throw new ExecutionException("Unsupported OS");
}
nativeJvmPlatformDir = nativeJvmDir.findChild(nativeLibs);
if(nativeJvmPlatformDir == null) {
throw new ExecutionException("Couldn't find native platform libraries");
}
// let intellij do its stuff
JavaParametersUtil.configureModule(module, params, classPathType, jreHome);
params.setMainClass(myConfiguration.MAIN_CLASS_NAME);
setupJavaParameters(params);
// now get rid of all the stuff we don't want from intellij
// - this includes any classpath inside the project
// - or any path in the defrac sdk
final PathsList pathList = params.getClassPath();
for(final String path : pathList.getPathList()) {
if(path.startsWith(projectBasePath) || path.startsWith(defracHomePath)) {
pathList.remove(path);
}
}
// add the classpath of everything we just compiled
params.getClassPath().add(targetJvmDir);
// now add the actual runtime dependencies
for(final VirtualFile runtimeLibrary: defrac.getRuntimeJars()) {
params.getClassPath().add(runtimeLibrary);
}
final ParametersList vmParametersList = params.getVMParametersList();
if(getConfiguration().isDebug()) {
// enable assertions if the user wants to debug the app
// because it is the same behaviour of jvm:run
vmParametersList.add("-ea");
}
vmParametersList.add("-Ddefrac.hotswap=false");
vmParametersList.add("-Djava.library.path="+nativeJvmPlatformDir.getCanonicalPath());
vmParametersList.add("-Xms512M");
vmParametersList.add("-XX:+TieredCompilation");
final Iterator<String> resourcePaths = Iterators.transform(
config.getResources(facet).iterator(),
new Function<VirtualFile, String>() {
@Override
public String apply(final VirtualFile virtualFile) {
return virtualFile.getCanonicalPath();
}
}
);
vmParametersList.add("-Ddefrac.resources="+Joiner.on('$').join(resourcePaths));
return params;
}
}
| defrac/defrac-plugin-intellij | src/defrac/intellij/run/DefracJvmRunningState.java | Java | apache-2.0 | 6,640 |
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.service.coordinator;
import java.util.concurrent.TimeUnit;
/**
* A distributed semaphore interface
*/
public interface DistributedSemaphore {
/**
* Try to acquire the semaphore
*
* @param time the duration to wait for the semaphore
* @param unit the duration unit
* @return the lease
*/
default DistributedLease acquire(long time, TimeUnit unit) throws Exception {
return acquire(1, time, unit);
}
/**
* Try to acquire multiple permits in the semaphore
*
* @param permits the number of permits to acquire, must be a positive integer
* @param time the duration to wait for the semaphore
* @param unit the duration unit
* @return the lease
*/
DistributedLease acquire(int permits, long time, TimeUnit unit) throws Exception;
/**
* Determine the number of currently outstanding permits.
* @return number of permits
*/
boolean hasOutstandingPermits();
/**
* Register a listener that is updated every time this semaphore changes.
*
* This is a weak registration. If the requester no longer exists, the semaphore won't a reference to the listener.
*
* return true if successfully registered listener
*/
boolean registerUpdateListener(UpdateListener listener);
/**
* Listener for when a semaphore has changed state.
*/
interface UpdateListener {
/**
* Informed when the semaphore has changed (increased or decreased).
*/
void updated();
}
/**
* The semaphore lease
*/
interface DistributedLease extends AutoCloseable {}
}
| dremio/dremio-oss | services/coordinator/src/main/java/com/dremio/service/coordinator/DistributedSemaphore.java | Java | apache-2.0 | 2,183 |
package com.thayer.idsservice.bean.mapping;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import com.thayer.idsservice.bean.mapping.xml.HotelMappingXml;
import com.thayer.idsservice.bean.mapping.xml.PlanMappingXml;
import com.thayer.idsservice.bean.mapping.xml.RateMappingXml;
import com.thayer.idsservice.bean.mapping.xml.RoomMappingXml;
import com.thayer.idsservice.bean.mapping.xml.XmlPathMapping;
/**
* MappingFactory.java
*
*
* @author waterborn
*/
@SuppressWarnings("unchecked")
public class MappingFactory {
private static MappingFactory instance = new MappingFactory();
private MappingFactory() {
InputStream stream = null;
try {
XmlPathMapping pathXmlMapping = new XmlPathMapping();
stream = MappingFactory.class.getClassLoader().getResourceAsStream("propXmlPathMapping.xml");
pathXmlMapping.parseXml(stream);
xmlPathMapping = pathXmlMapping;
for (MappingEnum type : MappingEnum.values()) {
HotelMappingXml mappingXml = new HotelMappingXml();
String xml = getHotelInfoMappingXmlName(type, pathXmlMapping);
if (mappingXml != null && xml != null) {
mappingXml.parseXml(xml);
mapHotelMapping.put(type.toString(), mappingXml);
}
}
for (MappingEnum type : MappingEnum.values()) {
RoomMappingXml mappingXml = new RoomMappingXml();
String xml = getHotelInfoMappingXmlName(type, pathXmlMapping);
if (mappingXml != null && xml != null) {
mappingXml.parseXml(xml);
mapRoomMapping.put(type.toString(), mappingXml);
}
}
for (MappingEnum type : MappingEnum.values()) {
RateMappingXml mappingXml = new RateMappingXml();
String xml = getHotelInfoMappingXmlName(type, pathXmlMapping);
if (mappingXml != null && xml != null) {
mappingXml.parseXml(xml);
mapRateMapping.put(type.toString(), mappingXml);
}
}
for (MappingEnum type : MappingEnum.values()) {
PlanMappingXml mappingXml = new PlanMappingXml();
String xml = getHotelInfoMappingXmlName(type, pathXmlMapping);
if (mappingXml != null && xml != null) {
mappingXml.parseXml(xml);
mapPlanMapping.put(type.toString(), mappingXml);
}
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
try {
stream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
private Map<String, HotelMappingXml> mapHotelMapping = new HashMap<String, HotelMappingXml>();
private Map<String, RoomMappingXml> mapRoomMapping = new HashMap<String, RoomMappingXml>();
private Map<String, RateMappingXml> mapRateMapping = new HashMap<String, RateMappingXml>();
private Map<String, PlanMappingXml> mapPlanMapping = new HashMap<String, PlanMappingXml>();
private XmlPathMapping xmlPathMapping = new XmlPathMapping();
// private String getHotelMappingXmlName(Enum type) {
// String xml = null;
// if (MappingEnum.MAPPING_GREENTREE == type) {
// xml = "properties/greentree/Hotel_mapping.xml";
// } else if (MappingEnum.MAPPING_HOMEINNS == type) {
// xml = "properties/homeinn/Hotel_mapping.xml";
// } else if (MappingEnum.MAPPING_JJZX == type) {
// xml = "properties/jjzx/Hotel_mapping.xml";
// } else if (MappingEnum.MAPPING_SEVENDAYS == type) {
// xml = "properties/sevendays/Hotel_mapping.xml";
// }
// return xml;
// }
private String getHotelInfoMappingXmlName(Enum type, XmlPathMapping pathXmlMapping) {
String xml = null;
if (pathXmlMapping.getXmlBeanByEnum(type.toString()) != null) {
xml = (String) pathXmlMapping.getXmlBeanByEnum(type.toString()).getPathMapping().get("hotelInfoMapping");
}
return xml;
}
public static HotelMappingXml getHotelMapping(String type) {
return instance.mapHotelMapping.get(type.toString());
}
public static RoomMappingXml getRoomMapping(Enum type) {
return instance.mapRoomMapping.get(type.toString());
}
public static RateMappingXml getRateMapping(Enum type) {
return instance.mapRateMapping.get(type.toString());
}
public static PlanMappingXml getPlanMapping(Enum type) {
return instance.mapPlanMapping.get(type.toString());
}
public static XmlPathMapping getXmlMapping() {
return instance.xmlPathMapping;
}
public static boolean flashMap() {
try {
instance.mapHotelMapping.clear();
instance.mapRoomMapping.clear();
instance.mapRateMapping.clear();
instance = new MappingFactory();
return true;
} catch (Exception e) {
System.out.println("error: " + e.getMessage());
return false;
}
}
public XmlPathMapping getXmlPathMapping() {
return xmlPathMapping;
}
public void setXmlPathMapping(XmlPathMapping xmlPathMapping) {
this.xmlPathMapping = xmlPathMapping;
}
public static void main(String[] args) {
System.out.println(MappingFactory.getHotelMapping((MappingEnum.AGODA).toString()).getHotelCodeMap());
System.out.println(MappingFactory.getRoomMapping(MappingEnum.AGODA).getHotelRoomCodeMap());
// Map<String, PropMappingBean> propBeanMap = MappingFactory.getHotelMapping(MappingEnum.AGODA).getPropBeanMap();
// PropMappingBean propMappingBean = propBeanMap.get("1346");
Map<String, PropMappingBean> propRoomBeanMap = MappingFactory.getRoomMapping(MappingEnum.AGODA).getPropRoomBeanMap();
PropMappingBean propMappingBean2 = propRoomBeanMap.get("1932#DD");
Map<String, PropMappingBean> propRateBeanMap = MappingFactory.getRateMapping(MappingEnum.AGODA).getPropRateBeanMap();
PropMappingBean propMappingBean3 = propRateBeanMap.get("1346#BAR1");
Map<String, PropMappingBean> propPlanBeanMap = MappingFactory.getPlanMapping(MappingEnum.AGODA).getPropPlanBeanMap();
PropMappingBean propMappingBean4 = propPlanBeanMap.get("1346#SQ#BAR1");
System.out.println();
// System.out.println(MappingFactory.getRoomMapping(MappingEnum.MAPPING_HOMEINNS).getFogPropRoomMap());
}
}
| lingdongrushui/International-Distribution-System | ids/src/com/thayer/idsservice/bean/mapping/MappingFactory.java | Java | apache-2.0 | 5,894 |
/*
* Copyright 2015 Norbert
*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package norbert.mynemo.dataimport;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import norbert.mynemo.dataimport.fileformat.MynemoRating;
import norbert.mynemo.dataimport.fileformat.input.CkRatingImporter;
import norbert.mynemo.dataimport.fileformat.input.MovieLensRatingImporter;
import norbert.mynemo.dataimport.fileformat.input.MynemoRatingImporter;
import norbert.mynemo.dataimport.fileformat.input.RatingImporter;
import norbert.mynemo.dataimport.fileformat.input.TenMillionRatingImporter;
import norbert.mynemo.dataimport.fileformat.output.DuplicateRemover;
import norbert.mynemo.dataimport.fileformat.output.MaxNeighborUserFilter;
import norbert.mynemo.dataimport.fileformat.output.MaxUserFilter;
import norbert.mynemo.dataimport.fileformat.output.MinCommonRatingFilter;
import norbert.mynemo.dataimport.fileformat.output.MinRatingByMovieFilter;
import norbert.mynemo.dataimport.fileformat.output.RatingFileWriter;
import norbert.mynemo.dataimport.fileformat.output.RatingWriter;
import norbert.mynemo.dataimport.fileformat.output.ScaleValueWriter;
import norbert.mynemo.dataimport.fileformat.output.UnivalueRemover;
import norbert.mynemo.dataimport.fileformat.output.UserSimilarityType;
import com.google.common.base.Optional;
/**
* This importer can merge, convert and filter ratings from files. It produces one output file from
* several input files. The format of the output file is the Mynemo file format. The format of an
* input file is automatically detected. The ratings can be filtered.
*/
public class FileImporter {
private static final String DEFAULT_USER_ID = Integer.toString(Integer.MAX_VALUE);
/**
* Converts the given rating files to the Mynemo format. Handles the files generated from
* MovieLens, and the 10 million ratings from a MovieLens data set. If the latter is amongst the
* rating files, a movie file must be provided. Handles also the files generated by the scraping
* of CK. CK rating files need a mapping file in be imported.
*
* <p>
* The mapping files must contains the equivalence between MovieLens ids and IMDb ids, or the
* equivalence between CK identifiers and IMDb ids. The files are automatically recognized.
*
* <p>
* If an input file contains ratings without user id, the given <code>user</code> is used. If
* <code>user</code> is absent, then a default user id is used. No check is done to find if the id
* already exists.
*
* <p>
* The output file must not exist. At least one existing rating file must be provided.
*
* @param outputFilepath the file where the imported ratings are written
* @param inputFilepaths the rating files to convert
* @param movieFilepath the file containing the equivalences between ids
* @param user the user id used for the input ratings without user
* @param maxUsers maximum number of users, the output file will contain the ratings of at most
* this number of users
* @param minRatingsByMovie minimum ratings by movie, the output file won't contain movies that
* have less than this number of ratings
* @param similarityType type of similarity used to find the nearest users of the target user
*/
public static void convert(String outputFilepath, Collection<String> inputFilepaths,
Collection<String> movieFilepath, Optional<String> user, Optional<Integer> maxUsers,
Optional<Integer> minRatingsByMovie, Optional<Integer> minCommonRatings,
Optional<UserSimilarityType> similarityType) throws IOException {
checkNotNull(outputFilepath);
checkNotNull(inputFilepaths);
checkArgument(!inputFilepaths.isEmpty(), "At least one input file must be given.");
checkArgument(!new File(outputFilepath).exists(), "The output file must not exist.");
for (String filepath : inputFilepaths) {
checkArgument(new File(filepath).exists(), "The input file must exist.");
}
for (String filepath : movieFilepath) {
checkArgument(new File(filepath).exists(), "The movie file must exist.");
}
RatingWriter writer =
createFilters(new RatingFileWriter(outputFilepath), maxUsers, minRatingsByMovie,
similarityType, minCommonRatings, user);
for (String ratingFilepath : inputFilepaths) {
RatingImporter importableFile = getFile(ratingFilepath, movieFilepath, user);
for (MynemoRating rating : importableFile) {
writer.write(rating);
}
}
writer.close();
}
/**
* Interposes the necessary filters before the last writer, according to the given parameters.
*/
private static RatingWriter createFilters(RatingWriter lastWriter, Optional<Integer> maxUsers,
Optional<Integer> minRatingsByMovie, Optional<UserSimilarityType> similarityType,
Optional<Integer> minCommonRatings, Optional<String> targetUser) {
RatingWriter nextWriter = lastWriter;
if (minRatingsByMovie.isPresent()) {
nextWriter = new MinRatingByMovieFilter(nextWriter, minRatingsByMovie.get());
}
if (maxUsers.isPresent()) {
if (similarityType.isPresent()) {
nextWriter =
new MaxNeighborUserFilter(nextWriter, targetUser.get(), maxUsers.get(),
similarityType.get());
} else {
nextWriter = new MaxUserFilter(nextWriter, maxUsers.get());
}
}
if (minCommonRatings.isPresent()) {
checkArgument(targetUser.isPresent(), "The user parameter is missing.");
nextWriter = new MinCommonRatingFilter(nextWriter, targetUser.get(), minCommonRatings.get());
}
nextWriter = new ScaleValueWriter(nextWriter);
nextWriter = new DuplicateRemover(nextWriter);
nextWriter = new UnivalueRemover(nextWriter);
return nextWriter;
}
/**
* Returns a rating file that can parse the ratings contained in the given file.
*
* @throws UnsupportedOperationException if the file cannot be parsed
*/
private static RatingImporter getFile(String ratingFilepath, Collection<String> mappingFilepaths,
Optional<String> user) throws IOException {
for (String mappingFilepath : mappingFilepaths) {
if (CkRatingImporter.canImport(ratingFilepath, mappingFilepath)) {
return new CkRatingImporter(ratingFilepath, mappingFilepath);
}
if (TenMillionRatingImporter.canImport(ratingFilepath, mappingFilepath)) {
return new TenMillionRatingImporter(ratingFilepath, mappingFilepath);
}
}
if (MovieLensRatingImporter.canImport(ratingFilepath)) {
return new MovieLensRatingImporter(ratingFilepath, user.or(DEFAULT_USER_ID));
}
// the Mynemo rating file must stay last because it does not use headers, so the parser can
// parse several file formats.
if (MynemoRatingImporter.canImport(ratingFilepath)) {
return new MynemoRatingImporter(ratingFilepath);
}
throw new UnsupportedOperationException("Unable to convert the file \"" + ratingFilepath
+ "\".");
}
/**
* Instantiates a new object. Private to prevents instantiation.
*/
private FileImporter() {
throw new AssertionError();
}
}
| norbertdev/mynemo | src/main/java/norbert/mynemo/dataimport/FileImporter.java | Java | apache-2.0 | 8,061 |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dmdl.java;
import java.util.Locale;
import com.asakusafw.dmdl.source.DmdlSourceRepository;
import com.asakusafw.utils.java.model.syntax.ModelFactory;
import com.asakusafw.utils.java.model.syntax.Name;
import com.asakusafw.utils.java.model.util.Emitter;
/**
* Configurations for DMDL Java emitter.
*/
public class Configuration {
private final ModelFactory factory;
private final DmdlSourceRepository source;
private final Name basePackage;
private final Emitter output;
private final ClassLoader serviceClassLoader;
private final Locale locale;
/**
* Creates and returns a new instance.
* @param factory Java DOM element factory
* @param source DMDL source repository
* @param basePackage the Java base package
* @param output the Java DOM emitter
* @param serviceClassLoader the class loader to load external services
* @param locale the locale information to generate programs
* @throws IllegalArgumentException if some parameters were {@code null}
*/
public Configuration(
ModelFactory factory,
DmdlSourceRepository source,
Name basePackage,
Emitter output,
ClassLoader serviceClassLoader,
Locale locale) {
if (factory == null) {
throw new IllegalArgumentException("factory must not be null"); //$NON-NLS-1$
}
if (source == null) {
throw new IllegalArgumentException("source must not be null"); //$NON-NLS-1$
}
if (basePackage == null) {
throw new IllegalArgumentException("basePackage must not be null"); //$NON-NLS-1$
}
if (output == null) {
throw new IllegalArgumentException("output must not be null"); //$NON-NLS-1$
}
if (serviceClassLoader == null) {
throw new IllegalArgumentException("serviceClassLoader must not be null"); //$NON-NLS-1$
}
if (locale == null) {
throw new IllegalArgumentException("locale must not be null"); //$NON-NLS-1$
}
this.factory = factory;
this.source = source;
this.basePackage = basePackage;
this.output = output;
this.serviceClassLoader = serviceClassLoader;
this.locale = locale;
}
/**
* Returns Java DOM element factory.
* @return the element factory
*/
public ModelFactory getFactory() {
return factory;
}
/**
* Returns DMDL source repository.
* @return the source repository
*/
public DmdlSourceRepository getSource() {
return source;
}
/**
* Returns the Java base package.
* @return the base package
*/
public Name getBasePackage() {
return basePackage;
}
/**
* Returns the Java DOM emitter.
* @return the Java DOM emitter
*/
public Emitter getOutput() {
return output;
}
/**
* Returns the service class loader.
* @return the class loader to load the external service classes
*/
public ClassLoader getServiceClassLoader() {
return serviceClassLoader;
}
/**
* Returns the locale information to generate programs.
* @return the locale
*/
public Locale getLocale() {
return locale;
}
}
| asakusafw/asakusafw | dmdl-project/asakusa-dmdl-java/src/main/java/com/asakusafw/dmdl/java/Configuration.java | Java | apache-2.0 | 3,959 |