code
stringlengths
5
1.04M
repo_name
stringlengths
7
108
path
stringlengths
6
299
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1.04M
/* Copyright 1995-2014 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. For additional information, contact: Environmental Systems Research Institute, Inc. Attn: Contracts Dept 380 New York Street Redlands, California, USA 92373 email: contracts@esri.com */ package com.esri.geoevent.adapter.kml; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import com.esri.core.geometry.Geometry.Type; import com.esri.core.geometry.MapGeometry; import com.esri.ges.core.geoevent.FieldDefinition; import com.esri.ges.core.geoevent.GeoEvent; import com.esri.ges.core.geoevent.GeoEventDefinition; import com.esri.ges.framework.i18n.BundleLogger; import com.esri.ges.framework.i18n.BundleLoggerFactory; public class KmlGeneratorBase { private static final BundleLogger LOGGER = BundleLoggerFactory.getLogger(KmlGeneratorBase.class); private String altitudeTag; private String kmlLabelFieldTag; private String headingTag; private String rollTag; private String tiltTag; private String styleUrl; private String modelUrl; @SuppressWarnings("deprecation") protected Placemark createPlacemark(GeoEvent geoevent, boolean updateMode, KmlRequestParameters params, String styleUrl, String modelUrl) { GeoEventDefinition geoEventDefinition = geoevent.getGeoEventDefinition(); String pointStyleId = params.getPointStyleId(); Placemark pm = new Placemark(); // pm.setName(geoevent.getTrackId()); pm.setName(getLabelFieldValue(geoevent)); if (updateMode) pm.setTargetId(geoevent.getTrackId()); else pm.setId(geoevent.getTrackId()); Point pt = new Point(); ArrayList<Data> extendedData = new ArrayList<Data>(); Data data; Object[] attributes = geoevent.getAllFields(); for (int i = 0; i < attributes.length; i++) { Object obj = attributes[i]; if (obj != null) { FieldDefinition fieldDefinition = geoEventDefinition.getFieldDefinitions().get(i); String fieldName = fieldDefinition.getName(); if (i == geoevent.getGeoEventDefinition().getGeometryId()) { MapGeometry geom = geoevent.getGeometry(); if( geom != null && geom.getGeometry() != null && geom.getGeometry().getType() == Type.Point ) { com.esri.core.geometry.Point point = (com.esri.core.geometry.Point) geom.getGeometry(); if(params.isUse3DModel()) { Model model = createModel(geoevent,point, params, modelUrl); pm.setModel(model); } else { double zvalue = point.getZ(); int index = geoevent.getGeoEventDefinition().getIndexOf(altitudeTag); if(index > -1) { zvalue=Double.parseDouble(geoevent.getAllFields()[index].toString()); } pt.setCoordinates(point.getX() + "," + point.getY() + "," + zvalue); pt.setAltitudeMode(params.getAltitudeMode()); pm.setPoint(pt); } } else { LOGGER.info("Unsupported Geometry Type."); } } else if (obj instanceof Date) { SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss aa"); data = new Data(fieldName, formatter.format((Date) obj)); extendedData.add(data); } else { String attribute = obj.toString(); if (attribute.contains(",")) attribute = "\"" + attribute + "\""; data = new Data(fieldName, attribute.toString()); extendedData.add(data); if (params.getPointStyleField().length() > 0 && params.getPointStyleField().equals(fieldName)) { pointStyleId = attribute.toString(); } } } } pm.setStyleUrl(styleUrl + pointStyleId); pm.setData(extendedData); return pm; } @SuppressWarnings("deprecation") protected Placemark createPlacemarkForTrack(GeoEvent geoevent, List<GeoEvent> geoevents, boolean updateMode, String altitudeMode, String styleUrl, String defaultLineStyleId, String lineStyleField) { GeoEventDefinition geoEventDefinition = geoevent.getGeoEventDefinition(); String lineStyleId = defaultLineStyleId; Placemark pm = new Placemark(); // pm.setName(geoevent.getTrackId() + "_track"); pm.setName(getLabelFieldValue(geoevent) + "_track"); if (updateMode) pm.setTargetId(geoevent.getTrackId() + "_track"); else pm.setId(geoevent.getTrackId() + "_track"); LineString line = new LineString(); ArrayList<Data> extendedData = new ArrayList<Data>(); Data data; Object[] attributes = geoevent.getAllFields(); for (int i = 0; i < attributes.length; i++) { Object obj = attributes[i]; if (obj != null) { FieldDefinition fieldDefinition = geoEventDefinition.getFieldDefinitions().get(i); String fieldName = fieldDefinition.getName(); if (i == geoevent.getGeoEventDefinition().getGeometryId()) { line.setExtrude(true); line.setTessellate(true); line.setAltitudeMode(altitudeMode); line.setCoordinates(getTrackLineString(geoevents, true)); } else if (obj instanceof Date) { SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss aa"); data = new Data(fieldName, formatter.format((Date) obj)); extendedData.add(data); } else { String attribute = obj.toString(); if (attribute.contains(",")) attribute = "\"" + attribute + "\""; data = new Data(fieldName, attribute.toString()); extendedData.add(data); if (lineStyleField.length() > 0 && lineStyleField.equals(fieldName)) { lineStyleId = attribute.toString(); } } } } pm.setStyleUrl(styleUrl + lineStyleId); pm.setData(extendedData); pm.setLineString(line); return pm; } protected String getTrackLineString(List<GeoEvent> geoevents, boolean useZValue) { String strCoords = ""; String ptStr = ""; for (int j = geoevents.size() - 1; j >= 0; j--) { GeoEvent geoEvent = geoevents.get(j); if( geoEvent.getGeometry() != null && geoEvent.getGeometry().getGeometry() != null && geoEvent.getGeometry().getGeometry().getType() == Type.Point ) { com.esri.core.geometry.Point pt = (com.esri.core.geometry.Point) geoEvent.getGeometry().getGeometry(); if (useZValue) { // Workaround: currently geomtry doesn't give us z value. We have to make sure that // the geoevent has a field that is tagged as ALTITUDE which supplies the z value double zvalue = pt.getZ(); int index = geoevents.get(j).getGeoEventDefinition().getIndexOf(altitudeTag); if(index > -1) { zvalue=Double.parseDouble(geoevents.get(j).getAllFields()[index].toString()); } ptStr = pt.getX() + "," + pt.getY() + "," + zvalue; } else { ptStr = pt.getX() + "," + pt.getY() + ",NaN"; } if (strCoords.length() == 0) { strCoords = ptStr; } else { strCoords = strCoords + " " + ptStr; } } } return strCoords; } protected boolean validateAltitudeMode(String input) { if (input.equalsIgnoreCase(AltitudeMode.ABSOLUTE) || input.equalsIgnoreCase(AltitudeMode.CLAMPTOSEAFLOOR) || input.equalsIgnoreCase(AltitudeMode.CLAMTOGROUND) || input.equalsIgnoreCase(AltitudeMode.RELATIVETOGROUND) || input.equalsIgnoreCase(AltitudeMode.RELATIVETOSEAFLOOR)) { return true; } return false; } protected String getStyleFileUrl(String styleFilename) { try { new URL(styleFilename); return styleFilename + "#"; } catch (MalformedURLException e) { URI u; try { u = new URI(styleUrl); return u.toString() + "/" + styleFilename + ".xml#"; } catch (URISyntaxException error) { LOGGER.error(error.getMessage(), error); } } catch (Exception error) { LOGGER.error(error.getMessage(), error); } return ""; } protected String getModelUrl(String modelPath) { try { new URL(modelPath); return modelPath; } catch (MalformedURLException e) { URI u; try { u = new URI(modelUrl); return u.toString(); } catch (URISyntaxException error) { LOGGER.error(error.getMessage(), error); } } catch (Exception error) { LOGGER.error(error.getMessage(), error); } return ""; } protected boolean isGeoEventDefinitionInRequest(String requestString, String geDef) { String[] geDefs = requestString.split(","); if (geDefs.length > 0) { for (int i = 0; i < geDefs.length; i++) { if (geDefs[i].equals(geDef)) { return true; } } } return false; } protected Model createModel(GeoEvent geoevent, com.esri.core.geometry.Point point, KmlRequestParameters params, String modelPath) { Model model = new Model(); Location location = new Location(); Orientation orientation = new Orientation(); Scale scale = new Scale(); Link link = new Link(); //location.setId("Location_" + geoevent.getTrackId()); location.setLongitude(point.getX()); location.setLatitude(point.getY()); // Workaround: currently geomtry doesn't give us z value. We have to make sure that // the geoevent has a field that is tagged as ALTITUDE which supplies the z value double zvalue = point.getZ(); int index = geoevent.getGeoEventDefinition().getIndexOf(altitudeTag); if(index > -1) { zvalue=Double.parseDouble(geoevent.getAllFields()[index].toString()); } location.setAltitude(zvalue); int i; i=geoevent.getGeoEventDefinition().getIndexOf(headingTag); if(i>-1) { orientation.setHeading(Double.parseDouble(geoevent.getAllFields()[i].toString())); } else { orientation.setHeading(0); } i=geoevent.getGeoEventDefinition().getIndexOf(rollTag); if(i>-1) { orientation.setRoll(Double.parseDouble(geoevent.getAllFields()[i].toString())); } else { orientation.setRoll(0); } i=geoevent.getGeoEventDefinition().getIndexOf(tiltTag); if(i>-1) { orientation.setTilt(Double.parseDouble(geoevent.getAllFields()[i].toString())); } else { orientation.setTilt(0); } //orientation.setId("Orientation_" + geoevent.getTrackId()); //scale.setId("Scale_" + geoevent.getTrackId()); scale.setX(params.getModelScale()); scale.setY(params.getModelScale()); scale.setZ(params.getModelScale()); if(params.getModelField().length()>0) { int indexModelField = geoevent.getGeoEventDefinition().getIndexOf(params.getModelField()); if(indexModelField>-1 && indexModelField<geoevent.getAllFields().length) { String value = geoevent.getAllFields()[geoevent.getGeoEventDefinition().getIndexOf(params.getModelField())].toString(); link.setHref(modelPath + "/" + value + ".dae"); } // else // { // link.setHref(modelPath + "/" + params.getModelId() + ".dae"); // } } else { link.setHref(modelPath + "/" + params.getModelId() + ".dae"); } //link.setId("Link_" + geoevent.getTrackId()); model.setAltitudeMode(params.getAltitudeMode()); model.setLocation(location); model.setOrientation(orientation); model.setScale(scale); model.setLink(link); //model.setId("Model_" + geoevent.getTrackId()); return model; } private String getLabelFieldValue(GeoEvent geoevent) { String value = ""; int i; i = geoevent.getGeoEventDefinition().getIndexOf(kmlLabelFieldTag); if (i > -1) { value = geoevent.getAllFields()[i].toString(); } else { value = geoevent.getTrackId(); } return value==null?"":value; } public void setAltitudeTag(String altitudeTag) { this.altitudeTag = altitudeTag; } public void setKmlLabelFieldTag(String kmlLabelFieldTag) { this.kmlLabelFieldTag = kmlLabelFieldTag; } public void setHeadingTag(String headingTag) { this.headingTag = headingTag; } public void setRollTag(String rollTag) { this.rollTag = rollTag; } public void setTiltTag(String tiltTag) { this.tiltTag = tiltTag; } public void setModelUrl(String modelUrl) { this.modelUrl = modelUrl; } public void setStyleUrl(String styleUrl) { this.styleUrl = styleUrl; } }
Esri/kml-for-geoevent
kml-adapter/src/main/java/com/esri/geoevent/adapter/kml/KmlGeneratorBase.java
Java
apache-2.0
13,616
package com.adms.kpireport.service; import java.util.List; import org.hibernate.criterion.DetachedCriteria; import com.adms.entity.KpiCategorySetup; public interface KpiCategorySetupService { public List<KpiCategorySetup> findAll() throws Exception; public KpiCategorySetup add(KpiCategorySetup example, String userLogin) throws Exception; public KpiCategorySetup update(KpiCategorySetup example, String userLogin) throws Exception; public void delete(KpiCategorySetup example) throws Exception; public List<KpiCategorySetup> find(KpiCategorySetup example) throws Exception; public List<KpiCategorySetup> findByHql(String hql, Object...vals) throws Exception; public List<KpiCategorySetup> findByNamedQuery(String namedQuery, Object...vals) throws Exception; public List<KpiCategorySetup> findByCriteria(DetachedCriteria detachedCriteria) throws Exception; public int deleteByHql(String hql, Object... vals) throws Exception; }
AEGONTH/kpi-report-service
src/main/java/com/adms/kpireport/service/KpiCategorySetupService.java
Java
apache-2.0
953
/* * Copyright 2014 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.apache.phoenix.util.TestUtil.*; import static org.junit.Assert.*; import java.sql.*; import java.util.Properties; import org.junit.Test; import org.apache.phoenix.util.PhoenixRuntime; public class CoalesceFunctionTest extends BaseClientManagedTimeTest { @Test public void testCoalesce() throws Exception { long ts = nextTimestamp(); String tenantId = getOrganizationId(); initATableValues(tenantId, getDefaultSplits(tenantId), null, ts); Properties props = new Properties(); props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 1)); // Execute at timestamp 1 Connection conn = DriverManager.getConnection(PHOENIX_JDBC_URL, props); String query = "SELECT entity_id, a_integer + COALESCE(x_integer,1) FROM ATABLE WHERE organization_id = ? AND a_integer >= 6 AND a_integer <= 7"; PreparedStatement statement = conn.prepareStatement(query); statement.setString(1, tenantId); ResultSet rs = statement.executeQuery(); assertTrue (rs.next()); assertEquals(ROW6, rs.getString(1)); assertEquals(7, rs.getInt(2)); assertTrue (rs.next()); assertEquals(ROW7, rs.getString(1)); assertEquals(12, rs.getInt(2)); assertFalse(rs.next()); conn.close(); } }
ramkrish86/incubator-phoenix
phoenix-core/src/test/java/org/apache/phoenix/end2end/CoalesceFunctionTest.java
Java
apache-2.0
2,256
package day2; public class DuckTest { public static void main(String[] args) { new DuckTest().test(); } public void test() { Duck md = new MallardDuck(); Duck rd = new RedheadDuck(); Duck rubber = new RubberDuck(); md.display(); rd.display(); Flyable flyableDuck1 = (Flyable)md; Flyable flyableDuck2 = (Flyable)rd; Flyable flyableDuck3 = (Flyable)rubber; flyableDuck1.fly(); flyableDuck2.fly(); flyableDuck3.fly(); } }
nnoco/playground
design-patterns/HeadFirst Design Patterns/src/day2/DuckTest.java
Java
apache-2.0
465
/* * Copyright 2010-2016 OrientDB LTD (http://orientdb.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.server.distributed; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.orient.setup.ServerRun; import java.util.HashSet; import java.util.Set; import org.assertj.core.api.Assertions; import org.junit.Assert; import org.junit.Test; /** * Checks the listeners are correctly invoked at every status change of databases even after a * restart. */ public class DistributedListenerIT extends AbstractServerClusterTxTest { private static final int SERVERS = 2; private volatile boolean restartExecuted = false; private final Set<String> afterRestartdbOnline = new HashSet<>(); @Test public void test() throws Exception { startupNodesInSequence = true; count = 10; maxRetries = 10; init(SERVERS); prepare(false); execute(); } @Override protected void onServerStarted(final ServerRun server) { super.onServerStarted(server); // INSTALL ON FIRST SERVER ONLY THE SERVER MONITOR TO CHECK IF HAS BEEN RESTARTED server .server .getDistributedManager() .registerLifecycleListener( new ODistributedLifecycleListener() { @Override public boolean onNodeJoining(String iNode) { return true; } @Override public void onNodeJoined(String iNode) {} @Override public void onNodeLeft(String iNode) {} public void onDatabaseChangeStatus( String iNode, String iDatabaseName, ODistributedServerManager.DB_STATUS iNewStatus) { OLogManager.instance() .info(this, "Node %s DB %s Status %s", null, iNode, iDatabaseName, iNewStatus); if (iNewStatus == ODistributedServerManager.DB_STATUS.ONLINE) { final String dbName = iNode + ":" + iDatabaseName; if (restartExecuted) afterRestartdbOnline.add(dbName); } } }); } @Override protected void onAfterExecution() throws Exception { restartExecuted = true; // BACKUP LAST SERVER, RUN ASYNCHRONOUSLY serverInstance.get(0).shutdownServer(); serverInstance.get(1).shutdownServer(); banner("RESTART OF SERVERS"); try { startServers(); } catch (Exception e) { Assertions.fail(e.toString()); } waitForDatabaseIsOnline( 0, serverInstance.get(0).getServerInstance().getDistributedManager().getLocalNodeName(), getDatabaseName(), 30000); waitForDatabaseIsOnline( 1, serverInstance.get(1).getServerInstance().getDistributedManager().getLocalNodeName(), getDatabaseName(), 30000); Assert.assertTrue( "DB online after restart " + afterRestartdbOnline, !afterRestartdbOnline.isEmpty()); } @Override public String getDatabaseName() { return "distributed-listener"; } }
orientechnologies/orientdb
distributed/src/test/java/com/orientechnologies/orient/server/distributed/DistributedListenerIT.java
Java
apache-2.0
3,623
package upparse.corpus; /** * @author eponvert@utexas.edu (Elias Ponvert) */ public enum OutputType { CLUMP, NPS, TREEBANKRB, TREEBANKPREC, TREEBANKFLAT, UNDERSCORE, NONE, UNDERSCORE4CCL, PUNC, PPS; public static String outputTypesHelp() { return "Evaluation types:\n" + " CLUMP\n" + " NPS\n"+ " TREEBANKPREC\n"+ " TREEBANKFLAT\n"+ " TREEBANKRB\n"+ " UNDERSCORE\n" + " NONE"; } }
eponvert/upparse
java/upparse/corpus/OutputType.java
Java
apache-2.0
438
package hu.akarnokd.enumerables; final class IEMaxInt<T> implements IEnumerable<Integer> { final IEnumerable<T> source; IEMaxInt(IEnumerable<T> source) { this.source = source; } @Override public IEnumerator<Integer> enumerator() { return new MaxIntEnumerator<>(source.enumerator()); } static final class MaxIntEnumerator<T> extends BasicEnumerator<Integer> { final IEnumerator<T> source; boolean once; MaxIntEnumerator(IEnumerator<T> source) { this.source = source; } @Override public boolean moveNext() { if (!once) { once = true; IEnumerator<T> src = source; int c = 0; if (src.moveNext()) { c = ((Number)src.current()).intValue(); while (src.moveNext()) { c = Math.max(c, ((Number)src.current()).intValue()); } value = c; return true; } } value = null; return false; } } }
akarnokd/akarnokd-misc
src/main/java/hu/akarnokd/enumerables/IEMaxInt.java
Java
apache-2.0
1,153
package com.aeclarke.typewriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import javax.xml.bind.annotation.adapters.HexBinaryAdapter; import android.R.string; import android.app.Activity; import android.app.Fragment; import android.content.Context; import android.os.Bundle; import android.text.method.ScrollingMovementMethod; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import com.aeclarke.typewriter.R; import com.google.typography.font.sfntly.Font; import com.google.typography.font.sfntly.FontFactory; import com.google.typography.font.sfntly.Tag; import com.google.typography.font.sfntly.Font.MacintoshEncodingId; import com.google.typography.font.sfntly.Font.PlatformId; import com.google.typography.font.sfntly.data.ReadableFontData; import com.google.typography.font.sfntly.data.WritableFontData; import com.google.typography.font.sfntly.table.Header; import com.google.typography.font.sfntly.table.Table; import com.google.typography.font.sfntly.table.Table.Builder; import com.google.typography.font.sfntly.table.core.CMap; import com.google.typography.font.sfntly.table.core.CMapFormat4; import com.google.typography.font.sfntly.table.core.CMapTable; import com.google.typography.font.sfntly.table.core.CMapTable.CMapId; import com.google.typography.font.sfntly.table.truetype.Glyph; import com.google.typography.font.sfntly.table.truetype.GlyphTable; import com.google.typography.font.sfntly.table.truetype.LocaTable; import com.google.typography.font.tools.subsetter.CMapTableBuilder; public class MainActivity extends Activity { private static final String ROBOTO_PATH = "/system/fonts/Roboto-Regular.ttf"; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); if (savedInstanceState == null) { getFragmentManager().beginTransaction().add(R.id.container, new PlaceholderFragment()).commit(); } ((TextView) findViewById(R.id.main_content_text_1)).setText("Starting..."); try { FontManipulator fontManipulator = new FontManipulator(ROBOTO_PATH); fontManipulator.backupFont(); Font font = fontManipulator.loadFont(new File(ROBOTO_PATH))[0]; listFontPlatformInformation(font, (TextView) findViewById(R.id.main_content_text_2)); listTablesTagListToView(font, (TextView) findViewById(R.id.main_content_text_4)); listSampleCmapEntries(font, (TextView) findViewById(R.id.main_content_text_3)); // fontManipulator.saveFontAs(fontManipulator.applyCaesarCypherToFont(font), ROBOTO_PATH, this); } catch (IOException e) { e.printStackTrace(); } } private CMapTable listFontPlatformInformation(Font font, TextView findViewById) { CMapTable cmapTable = font.getTable(Tag.cmap); Iterator<CMap> cmapSubtableIterator = cmapTable.iterator(); StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("PlatformID/EncodingID combinations present in Roboto-Regular.ttf:"); stringBuilder.append("\n"); for (; cmapSubtableIterator.hasNext();) { CMap next = cmapSubtableIterator.next(); stringBuilder.append(Integer.toString(next.platformId()) + " - " + Integer.toString(next.encodingId()) + "\n"); } stringBuilder.append("\nID Platform Description\n"); stringBuilder.append("0 Unicode\n"); stringBuilder.append("1 Macintosh\n"); stringBuilder.append("2 ISO [deprecated]\n"); stringBuilder.append("3 Windows\n"); stringBuilder.append("4 Custom\n"); stringBuilder.append("\nId Encoding Description\n"); stringBuilder.append("0 Unicode 1.0 semantics" + "\n" + "1 Unicode 1.1 semantics" + "\n" + "2 ISO/IEC 10646 semantics" + "\n" + "3 Unicode 2.0 and onwards semantics, Unicode BMP only (cmap subtable formats 0, 4, 6)." + "\n" + "4 Unicode 2.0 and onwards semantics, Unicode full repertoire (cmap subtable formats 0, 4, 6, 10, 12)." + "\n" + "5 Unicode Variation Sequences (cmap subtable format 14)." + "\n" + "6 Unicode full repertoire (cmap subtable formats 0, 4, 6, 10, 12, 13)."); stringBuilder.append("\n\n\nBelow are some mapped characters:\n"); findViewById.setText(stringBuilder); return cmapTable; } private Map<Integer, Integer> listSampleCmapEntries(Font font, TextView cmappingTextView) { Map<Integer, Integer> characterMapping = new HashMap<Integer, Integer>(); CMapTable cmapTable = font.getTable(Tag.cmap); CMap cmap = cmapTable.cmap(CMapId.getInstance(0,3)); Iterator<Integer> cmapIterator = cmap.iterator(); StringBuilder mappingStringBuilder = new StringBuilder(); mappingStringBuilder.append("\nThe cmap table format is: " + cmap.format() + "\n"); for (; cmapIterator.hasNext();) { int characterId = cmapIterator.next(); int glyphId = cmap.glyphId(characterId); if(characterId < 100) { characterMapping.put(characterId, glyphId + 1); mappingStringBuilder.append(Integer.toHexString(characterId) + ":" + Integer.toString(glyphId) + "\n"); } else { characterMapping.put(characterId, glyphId); } } cmappingTextView.setText(mappingStringBuilder); return characterMapping; } private void listTablesTagListToView(Font font, TextView tableTagsView) { StringBuilder tableListBuilder = new StringBuilder(); tableListBuilder.append("Below are the tag names of each table in the font:\n"); for (Iterator<? extends Table> tableIterator = font.iterator(); tableIterator.hasNext();) { Table table = tableIterator.next(); tableListBuilder.append(Tag.stringValue(table.headerTag()) + "\n"); } tableTagsView.setText(tableListBuilder); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } /** * A placeholder fragment containing a simple view. */ public static class PlaceholderFragment extends Fragment { public PlaceholderFragment() { } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_main, container, false); return rootView; } } }
andreweskeclarke/typewriter
src/com/aeclarke/typewriter/MainActivity.java
Java
apache-2.0
6,903
/* package ru.studentProject.dao; import ru.studentProject.model.Tag; public class TagDaoImpl extends GenericDao<Tag> { } */
fev0ks/NCStudentsProject
src/main/java/ru/studentProject/dao/TagDaoImpl.java
Java
apache-2.0
128
package io.syndesis.qe.bdd.validation; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Fail.fail; import io.syndesis.qe.TestConfiguration; import io.syndesis.qe.utils.OpenShiftUtils; import org.apache.commons.io.FileUtils; import org.w3c.dom.Document; import org.w3c.dom.Node; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import java.io.File; import cucumber.api.java.en.Then; import lombok.extern.slf4j.Slf4j; /** * Validation methods for validating productized builds. */ @Slf4j public class ProdValidationSteps { private Document pom; private void loadIntegrationXml() { if (pom != null) { return; } final String integrationPodName = OpenShiftUtils.getPodByPartialName("i-").get().getMetadata().getName(); log.info(integrationPodName); // It adds quotes around the command for exec and oc client doesn't understand that, so rsync the file instead OpenShiftUtils.binary().execute( "rsync", "-n", TestConfiguration.openShiftNamespace(), integrationPodName + ":/tmp/src/pom.xml", "/tmp" ); DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance(); try { pom = builderFactory.newDocumentBuilder().parse(FileUtils.openInputStream(new File("/tmp/pom.xml"))); } catch (Exception e) { fail("Unable to parse integration's pom.xml: ", e); } } @Then("^check that integration pom contains productized version in property \"([^\"]*)\"$") public void checkProductizedVersionIn(String property) { loadIntegrationXml(); final XPath xPath = XPathFactory.newInstance().newXPath(); try { assertThat(((Node) xPath.compile("//project/properties/" + property).evaluate(pom, XPathConstants.NODE)).getTextContent()) .contains("redhat"); } catch (XPathExpressionException e) { fail("Unable to compile xpath expression: ", e); } } }
mcada/syndesis-qe
utilities/src/main/java/io/syndesis/qe/bdd/validation/ProdValidationSteps.java
Java
apache-2.0
2,223
/* * Copyright (c) 2016. Sunghyouk Bae <sunghyouk.bae@gmail.com> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package debop4k.data.orm.springdata.examples.caching; import debop4k.data.orm.jpa.config.databases.JpaTestConfiguration; import debop4k.redisson.spring.cache.RedissonCacheKeyGenerator; import debop4k.redisson.spring.cache.RedissonCacheManager; import org.redisson.Redisson; import org.redisson.api.RedissonClient; import org.springframework.cache.annotation.EnableCaching; import org.springframework.cache.interceptor.KeyGenerator; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.transaction.annotation.EnableTransactionManagement; @Configuration @EnableTransactionManagement @EnableCaching @EnableJpaRepositories(basePackageClasses = {UserRepository.class}) public class CachingConfiguration extends JpaTestConfiguration { @Override public String[] getMappedPackageNames() { return new String[]{User.class.getPackage().getName()}; } @Bean public RedissonClient redissonClient() { org.redisson.config.Config config = new org.redisson.config.Config(); config.useSingleServer().setAddress("localhost:6379"); return Redisson.create(config); } @Bean public RedissonCacheManager redissonCacheManager(RedissonClient redissonClient) { RedissonCacheManager cm = new RedissonCacheManager(redissonClient); cm.setDefaultExpiryInMillis(60 * 1000); // 60 sec return cm; } @Bean public KeyGenerator cacheKeyGenerator() { return new RedissonCacheKeyGenerator(); } }
debop/debop4k
debop4k-data-orm/src/test/java/debop4k/data/orm/springdata/examples/caching/CachingConfiguration.java
Java
apache-2.0
2,216
package org.web3j.abi.datatypes.generated; import java.math.BigInteger; import org.web3j.abi.datatypes.Int; /** * Auto generated code. * <p><strong>Do not modifiy!</strong> * <p>Please use org.web3j.codegen.AbiTypesGenerator in the * <a href="https://github.com/web3j/web3j/tree/master/codegen">codegen module</a> to update. */ public class Int136 extends Int { public static final Int136 DEFAULT = new Int136(BigInteger.ZERO); public Int136(BigInteger value) { super(136, value); } public Int136(long value) { this(BigInteger.valueOf(value)); } }
web3j/web3j
abi/src/main/java/org/web3j/abi/datatypes/generated/Int136.java
Java
apache-2.0
594
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.config.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.config.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * DescribeConformancePackComplianceRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class DescribeConformancePackComplianceRequestProtocolMarshaller implements Marshaller<Request<DescribeConformancePackComplianceRequest>, DescribeConformancePackComplianceRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true) .operationIdentifier("StarlingDoveService.DescribeConformancePackCompliance").serviceName("AmazonConfig").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public DescribeConformancePackComplianceRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<DescribeConformancePackComplianceRequest> marshall(DescribeConformancePackComplianceRequest describeConformancePackComplianceRequest) { if (describeConformancePackComplianceRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<DescribeConformancePackComplianceRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller( SDK_OPERATION_BINDING, describeConformancePackComplianceRequest); protocolMarshaller.startMarshalling(); DescribeConformancePackComplianceRequestMarshaller.getInstance().marshall(describeConformancePackComplianceRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
aws/aws-sdk-java
aws-java-sdk-config/src/main/java/com/amazonaws/services/config/model/transform/DescribeConformancePackComplianceRequestProtocolMarshaller.java
Java
apache-2.0
2,957
package com.w11k.lsql; import java.sql.PreparedStatement; import java.sql.SQLException; public interface QueryParameter { void set(PreparedStatement ps, int index) throws SQLException; }
w11k/lsql
lsql-core/src/main/java/com/w11k/lsql/QueryParameter.java
Java
apache-2.0
195
/* * Copyright (c) 2008-2017 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.haulmont.cuba.desktop.test.ui; import com.google.common.collect.ImmutableMap; import com.haulmont.cuba.core.sys.AppContext; import com.haulmont.cuba.desktop.gui.DesktopComponentsFactory; import com.haulmont.cuba.desktop.gui.components.DesktopFieldGroup; import com.haulmont.cuba.desktop.gui.executors.impl.DesktopBackgroundWorker; import com.haulmont.cuba.gui.components.ComponentGenerationStrategy; import com.haulmont.cuba.gui.components.factories.DefaultComponentGenerationStrategy; import com.haulmont.cuba.gui.components.FieldGroup; import com.haulmont.cuba.gui.components.FieldGroupTest; import com.haulmont.cuba.gui.xml.layout.ComponentsFactory; import mockit.Mock; import mockit.MockUp; import mockit.Expectations; import java.util.Collections; import java.util.List; import java.util.Locale; public class DesktopFieldGroupTest extends FieldGroupTest { @Override protected void initExpectations() { super.initExpectations(); new MockUp<DesktopBackgroundWorker>() { @Mock public void checkSwingUIAccess() { } }; new Expectations() { { globalConfig.getAvailableLocales(); result = ImmutableMap.of("en", Locale.ENGLISH); minTimes = 0; AppContext.getProperty("cuba.mainMessagePack"); result = "com.haulmont.cuba.desktop"; minTimes = 0; } }; } @Override protected ComponentsFactory createComponentsFactory() { return new DesktopComponentsFactory() { @Override public List<ComponentGenerationStrategy> getComponentGenerationStrategies() { DefaultComponentGenerationStrategy strategy = new DefaultComponentGenerationStrategy(messages); strategy.setComponentsFactory(this); return Collections.singletonList(strategy); } }; } @Override protected int getGridRows(FieldGroup fieldGroup) { return ((DesktopFieldGroup) fieldGroup).getRows(); } @Override protected int getGridColumns(FieldGroup fieldGroup) { return fieldGroup.getColumns(); } @Override protected Object getGridCellComposition(FieldGroup fieldGroup, int col, int row) { return ((DesktopFieldGroup) fieldGroup).getCellComponent(col, row); } }
dimone-kun/cuba
modules/desktop/test/com/haulmont/cuba/desktop/test/ui/DesktopFieldGroupTest.java
Java
apache-2.0
2,939
package com.foolish.a2de.physics; import com.foolish.a2de.graphics.Shape; public interface IPhysics2D { public void applyPhysics(Shape shape); }
foolish314159/OpenGL-2D-Engine
src/com/foolish/a2de/physics/IPhysics2D.java
Java
apache-2.0
151
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.protocolPB; import static com.google.common.base.Preconditions.checkNotNull; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; import java.util.List; import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclEntryScope; import org.apache.hadoop.fs.permission.AclEntryType; import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.StorageType; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats; import org.apache.hadoop.hdfs.protocol.CachePoolEntry; import org.apache.hadoop.hdfs.protocol.CachePoolInfo; import org.apache.hadoop.hdfs.protocol.CachePoolStats; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates; import org.apache.hadoop.hdfs.protocol.DatanodeLocalInfo; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.FsAclPermission; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.RollingUpgradeAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo; import org.apache.hadoop.hdfs.protocol.RollingUpgradeStatus; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffReportEntry; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffType; import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus; import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto; import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto.AclEntryScopeProto; import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto.AclEntryTypeProto; import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto.FsActionProto; import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclStatusProto; import org.apache.hadoop.hdfs.protocol.proto.AclProtos.GetAclStatusResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveEntryProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveStatsProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheFlagProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolEntryProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolStatsProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateFlagProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DatanodeReportTypeProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFsStatsResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RollingUpgradeActionProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RollingUpgradeInfoProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SafeModeActionProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ShortCircuitShmIdProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ShortCircuitShmSlotProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BalancerBandwidthCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockRecoveryCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeRegistrationProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeStorageProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeStorageProto.StorageState; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.FinalizeCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.NNHAStatusHeartbeatProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.ReceivedDeletedBlockInfoProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.RegisterCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.StorageReportProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfosProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeLocalInfoProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DirectoryListingProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExportedBlockKeysProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsPermissionProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FsServerDefaultsProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.Builder; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlocksProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeCommandProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamenodeRegistrationProto.NamenodeRoleProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.NamespaceInfoProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RecoveringBlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogManifestProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RemoteEditLogProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ReplicaStateProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.RollingUpgradeStatusProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.SnapshotDiffReportEntryProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.SnapshotDiffReportProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.SnapshottableDirectoryListingProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.SnapshottableDirectoryStatusProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageUuidsProto; import org.apache.hadoop.hdfs.protocol.proto.JournalProtocolProtos.JournalInfoProto; import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsResponseProto; import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsResponseProto; import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto; import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto.XAttrNamespaceProto; import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrSetFlagProto; import org.apache.hadoop.hdfs.security.token.block.BlockKey; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature; import org.apache.hadoop.hdfs.server.namenode.INodeId; import org.apache.hadoop.hdfs.server.protocol.BalancerBandwidthCommand; import org.apache.hadoop.hdfs.server.protocol.BlockCommand; import org.apache.hadoop.hdfs.server.protocol.BlockIdCommand; import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand; import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock; import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations; import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations; import org.apache.hadoop.hdfs.server.protocol.CheckpointCommand; import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand; import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage.State; import org.apache.hadoop.hdfs.server.protocol.FinalizeCommand; import org.apache.hadoop.hdfs.server.protocol.JournalInfo; import org.apache.hadoop.hdfs.server.protocol.KeyUpdateCommand; import org.apache.hadoop.hdfs.server.protocol.NNHAStatusHeartbeat; import org.apache.hadoop.hdfs.server.protocol.NamenodeCommand; import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo; import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo.BlockStatus; import org.apache.hadoop.hdfs.server.protocol.RegisterCommand; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.ShmId; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId; import org.apache.hadoop.hdfs.util.ExactSizeInputStream; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.primitives.Shorts; import com.google.protobuf.ByteString; import com.google.protobuf.CodedInputStream; /** * Utilities for converting protobuf classes to and from implementation classes * and other helper utilities to help in dealing with protobuf. * * Note that when converting from an internal type to protobuf type, the * converter never return null for protobuf type. The check for internal type * being null must be done before calling the convert() method. */ public class PBHelper { private static final RegisterCommandProto REG_CMD_PROTO = RegisterCommandProto.newBuilder().build(); private static final RegisterCommand REG_CMD = new RegisterCommand(); private static final AclEntryScope[] ACL_ENTRY_SCOPE_VALUES = AclEntryScope.values(); private static final AclEntryType[] ACL_ENTRY_TYPE_VALUES = AclEntryType.values(); private static final FsAction[] FSACTION_VALUES = FsAction.values(); private static final XAttr.NameSpace[] XATTR_NAMESPACE_VALUES = XAttr.NameSpace.values(); private PBHelper() { /** Hidden constructor */ } public static ByteString getByteString(byte[] bytes) { return ByteString.copyFrom(bytes); } private static <T extends Enum<T>, U extends Enum<U>> U castEnum(T from, U[] to) { return to[from.ordinal()]; } public static NamenodeRole convert(NamenodeRoleProto role) { switch (role) { case NAMENODE: return NamenodeRole.NAMENODE; case BACKUP: return NamenodeRole.BACKUP; case CHECKPOINT: return NamenodeRole.CHECKPOINT; } return null; } public static NamenodeRoleProto convert(NamenodeRole role) { switch (role) { case NAMENODE: return NamenodeRoleProto.NAMENODE; case BACKUP: return NamenodeRoleProto.BACKUP; case CHECKPOINT: return NamenodeRoleProto.CHECKPOINT; } return null; } public static StorageInfoProto convert(StorageInfo info) { return StorageInfoProto.newBuilder().setClusterID(info.getClusterID()) .setCTime(info.getCTime()).setLayoutVersion(info.getLayoutVersion()) .setNamespceID(info.getNamespaceID()).build(); } public static StorageInfo convert(StorageInfoProto info, NodeType type) { return new StorageInfo(info.getLayoutVersion(), info.getNamespceID(), info.getClusterID(), info.getCTime(), type); } public static NamenodeRegistrationProto convert(NamenodeRegistration reg) { return NamenodeRegistrationProto.newBuilder() .setHttpAddress(reg.getHttpAddress()).setRole(convert(reg.getRole())) .setRpcAddress(reg.getAddress()) .setStorageInfo(convert((StorageInfo) reg)).build(); } public static NamenodeRegistration convert(NamenodeRegistrationProto reg) { StorageInfo si = convert(reg.getStorageInfo(), NodeType.NAME_NODE); return new NamenodeRegistration(reg.getRpcAddress(), reg.getHttpAddress(), si, convert(reg.getRole())); } // DatanodeId public static DatanodeID convert(DatanodeIDProto dn) { return new DatanodeID(dn.getIpAddr(), dn.getHostName(), dn.getDatanodeUuid(), dn.getXferPort(), dn.getInfoPort(), dn.hasInfoSecurePort() ? dn .getInfoSecurePort() : 0, dn.getIpcPort()); } public static DatanodeIDProto convert(DatanodeID dn) { // For wire compatibility with older versions we transmit the StorageID // which is the same as the DatanodeUuid. Since StorageID is a required // field we pass the empty string if the DatanodeUuid is not yet known. return DatanodeIDProto.newBuilder() .setIpAddr(dn.getIpAddr()) .setHostName(dn.getHostName()) .setXferPort(dn.getXferPort()) .setDatanodeUuid(dn.getDatanodeUuid() != null ? dn.getDatanodeUuid() : "") .setInfoPort(dn.getInfoPort()) .setInfoSecurePort(dn.getInfoSecurePort()) .setIpcPort(dn.getIpcPort()).build(); } // Arrays of DatanodeId public static DatanodeIDProto[] convert(DatanodeID[] did) { if (did == null) return null; final int len = did.length; DatanodeIDProto[] result = new DatanodeIDProto[len]; for (int i = 0; i < len; ++i) { result[i] = convert(did[i]); } return result; } public static DatanodeID[] convert(DatanodeIDProto[] did) { if (did == null) return null; final int len = did.length; DatanodeID[] result = new DatanodeID[len]; for (int i = 0; i < len; ++i) { result[i] = convert(did[i]); } return result; } // Block public static BlockProto convert(Block b) { return BlockProto.newBuilder().setBlockId(b.getBlockId()) .setGenStamp(b.getGenerationStamp()).setNumBytes(b.getNumBytes()) .build(); } public static Block convert(BlockProto b) { return new Block(b.getBlockId(), b.getNumBytes(), b.getGenStamp()); } public static BlockWithLocationsProto convert(BlockWithLocations blk) { return BlockWithLocationsProto.newBuilder() .setBlock(convert(blk.getBlock())) .addAllDatanodeUuids(Arrays.asList(blk.getDatanodeUuids())) .addAllStorageUuids(Arrays.asList(blk.getStorageIDs())).build(); } public static BlockWithLocations convert(BlockWithLocationsProto b) { final List<String> datanodeUuids = b.getDatanodeUuidsList(); final List<String> storageUuids = b.getStorageUuidsList(); return new BlockWithLocations(convert(b.getBlock()), datanodeUuids.toArray(new String[datanodeUuids.size()]), storageUuids.toArray(new String[storageUuids.size()])); } public static BlocksWithLocationsProto convert(BlocksWithLocations blks) { BlocksWithLocationsProto.Builder builder = BlocksWithLocationsProto .newBuilder(); for (BlockWithLocations b : blks.getBlocks()) { builder.addBlocks(convert(b)); } return builder.build(); } public static BlocksWithLocations convert(BlocksWithLocationsProto blocks) { List<BlockWithLocationsProto> b = blocks.getBlocksList(); BlockWithLocations[] ret = new BlockWithLocations[b.size()]; int i = 0; for (BlockWithLocationsProto entry : b) { ret[i++] = convert(entry); } return new BlocksWithLocations(ret); } public static BlockKeyProto convert(BlockKey key) { byte[] encodedKey = key.getEncodedKey(); ByteString keyBytes = ByteString.copyFrom(encodedKey == null ? DFSUtil.EMPTY_BYTES : encodedKey); return BlockKeyProto.newBuilder().setKeyId(key.getKeyId()) .setKeyBytes(keyBytes).setExpiryDate(key.getExpiryDate()).build(); } public static BlockKey convert(BlockKeyProto k) { return new BlockKey(k.getKeyId(), k.getExpiryDate(), k.getKeyBytes() .toByteArray()); } public static ExportedBlockKeysProto convert(ExportedBlockKeys keys) { ExportedBlockKeysProto.Builder builder = ExportedBlockKeysProto .newBuilder(); builder.setIsBlockTokenEnabled(keys.isBlockTokenEnabled()) .setKeyUpdateInterval(keys.getKeyUpdateInterval()) .setTokenLifeTime(keys.getTokenLifetime()) .setCurrentKey(convert(keys.getCurrentKey())); for (BlockKey k : keys.getAllKeys()) { builder.addAllKeys(convert(k)); } return builder.build(); } public static ExportedBlockKeys convert(ExportedBlockKeysProto keys) { return new ExportedBlockKeys(keys.getIsBlockTokenEnabled(), keys.getKeyUpdateInterval(), keys.getTokenLifeTime(), convert(keys.getCurrentKey()), convertBlockKeys(keys.getAllKeysList())); } public static CheckpointSignatureProto convert(CheckpointSignature s) { return CheckpointSignatureProto.newBuilder() .setBlockPoolId(s.getBlockpoolID()) .setCurSegmentTxId(s.getCurSegmentTxId()) .setMostRecentCheckpointTxId(s.getMostRecentCheckpointTxId()) .setStorageInfo(PBHelper.convert((StorageInfo) s)).build(); } public static CheckpointSignature convert(CheckpointSignatureProto s) { StorageInfo si = PBHelper.convert(s.getStorageInfo(), NodeType.NAME_NODE); return new CheckpointSignature(si, s.getBlockPoolId(), s.getMostRecentCheckpointTxId(), s.getCurSegmentTxId()); } public static RemoteEditLogProto convert(RemoteEditLog log) { return RemoteEditLogProto.newBuilder() .setStartTxId(log.getStartTxId()) .setEndTxId(log.getEndTxId()) .setIsInProgress(log.isInProgress()).build(); } public static RemoteEditLog convert(RemoteEditLogProto l) { return new RemoteEditLog(l.getStartTxId(), l.getEndTxId(), l.getIsInProgress()); } public static RemoteEditLogManifestProto convert( RemoteEditLogManifest manifest) { RemoteEditLogManifestProto.Builder builder = RemoteEditLogManifestProto .newBuilder(); for (RemoteEditLog log : manifest.getLogs()) { builder.addLogs(convert(log)); } return builder.build(); } public static RemoteEditLogManifest convert( RemoteEditLogManifestProto manifest) { List<RemoteEditLog> logs = new ArrayList<RemoteEditLog>(manifest .getLogsList().size()); for (RemoteEditLogProto l : manifest.getLogsList()) { logs.add(convert(l)); } return new RemoteEditLogManifest(logs); } public static CheckpointCommandProto convert(CheckpointCommand cmd) { return CheckpointCommandProto.newBuilder() .setSignature(convert(cmd.getSignature())) .setNeedToReturnImage(cmd.needToReturnImage()).build(); } public static NamenodeCommandProto convert(NamenodeCommand cmd) { if (cmd instanceof CheckpointCommand) { return NamenodeCommandProto.newBuilder().setAction(cmd.getAction()) .setType(NamenodeCommandProto.Type.CheckPointCommand) .setCheckpointCmd(convert((CheckpointCommand) cmd)).build(); } return NamenodeCommandProto.newBuilder() .setType(NamenodeCommandProto.Type.NamenodeCommand) .setAction(cmd.getAction()).build(); } public static BlockKey[] convertBlockKeys(List<BlockKeyProto> list) { BlockKey[] ret = new BlockKey[list.size()]; int i = 0; for (BlockKeyProto k : list) { ret[i++] = convert(k); } return ret; } public static NamespaceInfo convert(NamespaceInfoProto info) { StorageInfoProto storage = info.getStorageInfo(); return new NamespaceInfo(storage.getNamespceID(), storage.getClusterID(), info.getBlockPoolID(), storage.getCTime(), info.getBuildVersion(), info.getSoftwareVersion()); } public static NamenodeCommand convert(NamenodeCommandProto cmd) { if (cmd == null) return null; switch (cmd.getType()) { case CheckPointCommand: CheckpointCommandProto chkPt = cmd.getCheckpointCmd(); return new CheckpointCommand(PBHelper.convert(chkPt.getSignature()), chkPt.getNeedToReturnImage()); default: return new NamenodeCommand(cmd.getAction()); } } public static ExtendedBlock convert(ExtendedBlockProto eb) { if (eb == null) return null; return new ExtendedBlock( eb.getPoolId(), eb.getBlockId(), eb.getNumBytes(), eb.getGenerationStamp()); } public static ExtendedBlockProto convert(final ExtendedBlock b) { if (b == null) return null; return ExtendedBlockProto.newBuilder(). setPoolId(b.getBlockPoolId()). setBlockId(b.getBlockId()). setNumBytes(b.getNumBytes()). setGenerationStamp(b.getGenerationStamp()). build(); } public static RecoveringBlockProto convert(RecoveringBlock b) { if (b == null) { return null; } LocatedBlockProto lb = PBHelper.convert((LocatedBlock)b); return RecoveringBlockProto.newBuilder().setBlock(lb) .setNewGenStamp(b.getNewGenerationStamp()).build(); } public static RecoveringBlock convert(RecoveringBlockProto b) { ExtendedBlock block = convert(b.getBlock().getB()); DatanodeInfo[] locs = convert(b.getBlock().getLocsList()); return new RecoveringBlock(block, locs, b.getNewGenStamp()); } public static DatanodeInfoProto.AdminState convert( final DatanodeInfo.AdminStates inAs) { switch (inAs) { case NORMAL: return DatanodeInfoProto.AdminState.NORMAL; case DECOMMISSION_INPROGRESS: return DatanodeInfoProto.AdminState.DECOMMISSION_INPROGRESS; case DECOMMISSIONED: return DatanodeInfoProto.AdminState.DECOMMISSIONED; default: return DatanodeInfoProto.AdminState.NORMAL; } } static public DatanodeInfo convert(DatanodeInfoProto di) { if (di == null) return null; return new DatanodeInfo( PBHelper.convert(di.getId()), di.hasLocation() ? di.getLocation() : null , di.getCapacity(), di.getDfsUsed(), di.getRemaining(), di.getBlockPoolUsed(), di.getCacheCapacity(), di.getCacheUsed(), di.getLastUpdate(), di.getXceiverCount(), PBHelper.convert(di.getAdminState())); } static public DatanodeInfoProto convertDatanodeInfo(DatanodeInfo di) { if (di == null) return null; return convert(di); } static public DatanodeInfo[] convert(DatanodeInfoProto di[]) { if (di == null) return null; DatanodeInfo[] result = new DatanodeInfo[di.length]; for (int i = 0; i < di.length; i++) { result[i] = convert(di[i]); } return result; } public static List<? extends HdfsProtos.DatanodeInfoProto> convert( DatanodeInfo[] dnInfos) { return convert(dnInfos, 0); } /** * Copy from {@code dnInfos} to a target of list of same size starting at * {@code startIdx}. */ public static List<? extends HdfsProtos.DatanodeInfoProto> convert( DatanodeInfo[] dnInfos, int startIdx) { if (dnInfos == null) return null; ArrayList<HdfsProtos.DatanodeInfoProto> protos = Lists .newArrayListWithCapacity(dnInfos.length); for (int i = startIdx; i < dnInfos.length; i++) { protos.add(convert(dnInfos[i])); } return protos; } public static DatanodeInfo[] convert(List<DatanodeInfoProto> list) { DatanodeInfo[] info = new DatanodeInfo[list.size()]; for (int i = 0; i < info.length; i++) { info[i] = convert(list.get(i)); } return info; } public static DatanodeInfoProto convert(DatanodeInfo info) { DatanodeInfoProto.Builder builder = DatanodeInfoProto.newBuilder(); if (info.getNetworkLocation() != null) { builder.setLocation(info.getNetworkLocation()); } builder .setId(PBHelper.convert((DatanodeID)info)) .setCapacity(info.getCapacity()) .setDfsUsed(info.getDfsUsed()) .setRemaining(info.getRemaining()) .setBlockPoolUsed(info.getBlockPoolUsed()) .setCacheCapacity(info.getCacheCapacity()) .setCacheUsed(info.getCacheUsed()) .setLastUpdate(info.getLastUpdate()) .setXceiverCount(info.getXceiverCount()) .setAdminState(PBHelper.convert(info.getAdminState())) .build(); return builder.build(); } public static AdminStates convert(AdminState adminState) { switch(adminState) { case DECOMMISSION_INPROGRESS: return AdminStates.DECOMMISSION_INPROGRESS; case DECOMMISSIONED: return AdminStates.DECOMMISSIONED; case NORMAL: default: return AdminStates.NORMAL; } } public static LocatedBlockProto convert(LocatedBlock b) { if (b == null) return null; Builder builder = LocatedBlockProto.newBuilder(); DatanodeInfo[] locs = b.getLocations(); List<DatanodeInfo> cachedLocs = Lists.newLinkedList(Arrays.asList(b.getCachedLocations())); for (int i = 0; i < locs.length; i++) { DatanodeInfo loc = locs[i]; builder.addLocs(i, PBHelper.convert(loc)); boolean locIsCached = cachedLocs.contains(loc); builder.addIsCached(locIsCached); if (locIsCached) { cachedLocs.remove(loc); } } Preconditions.checkArgument(cachedLocs.size() == 0, "Found additional cached replica locations that are not in the set of" + " storage-backed locations!"); StorageType[] storageTypes = b.getStorageTypes(); if (storageTypes != null) { for (int i = 0; i < storageTypes.length; ++i) { builder.addStorageTypes(PBHelper.convertStorageType(storageTypes[i])); } } final String[] storageIDs = b.getStorageIDs(); if (storageIDs != null) { builder.addAllStorageIDs(Arrays.asList(storageIDs)); } return builder.setB(PBHelper.convert(b.getBlock())) .setBlockToken(PBHelper.convert(b.getBlockToken())) .setCorrupt(b.isCorrupt()).setOffset(b.getStartOffset()).build(); } public static LocatedBlock convert(LocatedBlockProto proto) { if (proto == null) return null; List<DatanodeInfoProto> locs = proto.getLocsList(); DatanodeInfo[] targets = new DatanodeInfo[locs.size()]; for (int i = 0; i < locs.size(); i++) { targets[i] = PBHelper.convert(locs.get(i)); } final int storageTypesCount = proto.getStorageTypesCount(); final StorageType[] storageTypes; if (storageTypesCount == 0) { storageTypes = null; } else { Preconditions.checkState(storageTypesCount == locs.size()); storageTypes = convertStorageTypeProtos(proto.getStorageTypesList()); } final int storageIDsCount = proto.getStorageIDsCount(); final String[] storageIDs; if (storageIDsCount == 0) { storageIDs = null; } else { Preconditions.checkState(storageIDsCount == locs.size()); storageIDs = proto.getStorageIDsList().toArray(new String[storageIDsCount]); } // Set values from the isCached list, re-using references from loc List<DatanodeInfo> cachedLocs = new ArrayList<DatanodeInfo>(locs.size()); List<Boolean> isCachedList = proto.getIsCachedList(); for (int i=0; i<isCachedList.size(); i++) { if (isCachedList.get(i)) { cachedLocs.add(targets[i]); } } LocatedBlock lb = new LocatedBlock(PBHelper.convert(proto.getB()), targets, storageIDs, storageTypes, proto.getOffset(), proto.getCorrupt(), cachedLocs.toArray(new DatanodeInfo[0])); lb.setBlockToken(PBHelper.convert(proto.getBlockToken())); return lb; } public static TokenProto convert(Token<?> tok) { return TokenProto.newBuilder(). setIdentifier(ByteString.copyFrom(tok.getIdentifier())). setPassword(ByteString.copyFrom(tok.getPassword())). setKind(tok.getKind().toString()). setService(tok.getService().toString()).build(); } public static Token<BlockTokenIdentifier> convert( TokenProto blockToken) { return new Token<BlockTokenIdentifier>(blockToken.getIdentifier() .toByteArray(), blockToken.getPassword().toByteArray(), new Text( blockToken.getKind()), new Text(blockToken.getService())); } public static Token<DelegationTokenIdentifier> convertDelegationToken( TokenProto blockToken) { return new Token<DelegationTokenIdentifier>(blockToken.getIdentifier() .toByteArray(), blockToken.getPassword().toByteArray(), new Text( blockToken.getKind()), new Text(blockToken.getService())); } public static ReplicaState convert(ReplicaStateProto state) { switch (state) { case RBW: return ReplicaState.RBW; case RUR: return ReplicaState.RUR; case RWR: return ReplicaState.RWR; case TEMPORARY: return ReplicaState.TEMPORARY; case FINALIZED: default: return ReplicaState.FINALIZED; } } public static ReplicaStateProto convert(ReplicaState state) { switch (state) { case RBW: return ReplicaStateProto.RBW; case RUR: return ReplicaStateProto.RUR; case RWR: return ReplicaStateProto.RWR; case TEMPORARY: return ReplicaStateProto.TEMPORARY; case FINALIZED: default: return ReplicaStateProto.FINALIZED; } } public static DatanodeRegistrationProto convert( DatanodeRegistration registration) { DatanodeRegistrationProto.Builder builder = DatanodeRegistrationProto .newBuilder(); return builder.setDatanodeID(PBHelper.convert((DatanodeID) registration)) .setStorageInfo(PBHelper.convert(registration.getStorageInfo())) .setKeys(PBHelper.convert(registration.getExportedKeys())) .setSoftwareVersion(registration.getSoftwareVersion()).build(); } public static DatanodeRegistration convert(DatanodeRegistrationProto proto) { StorageInfo si = convert(proto.getStorageInfo(), NodeType.DATA_NODE); return new DatanodeRegistration(PBHelper.convert(proto.getDatanodeID()), si, PBHelper.convert(proto.getKeys()), proto.getSoftwareVersion()); } public static DatanodeCommand convert(DatanodeCommandProto proto) { switch (proto.getCmdType()) { case BalancerBandwidthCommand: return PBHelper.convert(proto.getBalancerCmd()); case BlockCommand: return PBHelper.convert(proto.getBlkCmd()); case BlockRecoveryCommand: return PBHelper.convert(proto.getRecoveryCmd()); case FinalizeCommand: return PBHelper.convert(proto.getFinalizeCmd()); case KeyUpdateCommand: return PBHelper.convert(proto.getKeyUpdateCmd()); case RegisterCommand: return REG_CMD; case BlockIdCommand: return PBHelper.convert(proto.getBlkIdCmd()); default: return null; } } public static BalancerBandwidthCommandProto convert( BalancerBandwidthCommand bbCmd) { return BalancerBandwidthCommandProto.newBuilder() .setBandwidth(bbCmd.getBalancerBandwidthValue()).build(); } public static KeyUpdateCommandProto convert(KeyUpdateCommand cmd) { return KeyUpdateCommandProto.newBuilder() .setKeys(PBHelper.convert(cmd.getExportedKeys())).build(); } public static BlockRecoveryCommandProto convert(BlockRecoveryCommand cmd) { BlockRecoveryCommandProto.Builder builder = BlockRecoveryCommandProto .newBuilder(); for (RecoveringBlock b : cmd.getRecoveringBlocks()) { builder.addBlocks(PBHelper.convert(b)); } return builder.build(); } public static FinalizeCommandProto convert(FinalizeCommand cmd) { return FinalizeCommandProto.newBuilder() .setBlockPoolId(cmd.getBlockPoolId()).build(); } public static BlockCommandProto convert(BlockCommand cmd) { BlockCommandProto.Builder builder = BlockCommandProto.newBuilder() .setBlockPoolId(cmd.getBlockPoolId()); switch (cmd.getAction()) { case DatanodeProtocol.DNA_TRANSFER: builder.setAction(BlockCommandProto.Action.TRANSFER); break; case DatanodeProtocol.DNA_INVALIDATE: builder.setAction(BlockCommandProto.Action.INVALIDATE); break; case DatanodeProtocol.DNA_SHUTDOWN: builder.setAction(BlockCommandProto.Action.SHUTDOWN); break; default: throw new AssertionError("Invalid action"); } Block[] blocks = cmd.getBlocks(); for (int i = 0; i < blocks.length; i++) { builder.addBlocks(PBHelper.convert(blocks[i])); } builder.addAllTargets(convert(cmd.getTargets())) .addAllTargetStorageUuids(convert(cmd.getTargetStorageIDs())); return builder.build(); } public static BlockIdCommandProto convert(BlockIdCommand cmd) { BlockIdCommandProto.Builder builder = BlockIdCommandProto.newBuilder() .setBlockPoolId(cmd.getBlockPoolId()); switch (cmd.getAction()) { case DatanodeProtocol.DNA_CACHE: builder.setAction(BlockIdCommandProto.Action.CACHE); break; case DatanodeProtocol.DNA_UNCACHE: builder.setAction(BlockIdCommandProto.Action.UNCACHE); break; default: throw new AssertionError("Invalid action"); } long[] blockIds = cmd.getBlockIds(); for (int i = 0; i < blockIds.length; i++) { builder.addBlockIds(blockIds[i]); } return builder.build(); } private static List<DatanodeInfosProto> convert(DatanodeInfo[][] targets) { DatanodeInfosProto[] ret = new DatanodeInfosProto[targets.length]; for (int i = 0; i < targets.length; i++) { ret[i] = DatanodeInfosProto.newBuilder() .addAllDatanodes(PBHelper.convert(targets[i])).build(); } return Arrays.asList(ret); } private static List<StorageUuidsProto> convert(String[][] targetStorageUuids) { StorageUuidsProto[] ret = new StorageUuidsProto[targetStorageUuids.length]; for (int i = 0; i < targetStorageUuids.length; i++) { ret[i] = StorageUuidsProto.newBuilder() .addAllStorageUuids(Arrays.asList(targetStorageUuids[i])).build(); } return Arrays.asList(ret); } public static DatanodeCommandProto convert(DatanodeCommand datanodeCommand) { DatanodeCommandProto.Builder builder = DatanodeCommandProto.newBuilder(); if (datanodeCommand == null) { return builder.setCmdType(DatanodeCommandProto.Type.NullDatanodeCommand) .build(); } switch (datanodeCommand.getAction()) { case DatanodeProtocol.DNA_BALANCERBANDWIDTHUPDATE: builder.setCmdType(DatanodeCommandProto.Type.BalancerBandwidthCommand) .setBalancerCmd( PBHelper.convert((BalancerBandwidthCommand) datanodeCommand)); break; case DatanodeProtocol.DNA_ACCESSKEYUPDATE: builder .setCmdType(DatanodeCommandProto.Type.KeyUpdateCommand) .setKeyUpdateCmd(PBHelper.convert((KeyUpdateCommand) datanodeCommand)); break; case DatanodeProtocol.DNA_RECOVERBLOCK: builder.setCmdType(DatanodeCommandProto.Type.BlockRecoveryCommand) .setRecoveryCmd( PBHelper.convert((BlockRecoveryCommand) datanodeCommand)); break; case DatanodeProtocol.DNA_FINALIZE: builder.setCmdType(DatanodeCommandProto.Type.FinalizeCommand) .setFinalizeCmd(PBHelper.convert((FinalizeCommand) datanodeCommand)); break; case DatanodeProtocol.DNA_REGISTER: builder.setCmdType(DatanodeCommandProto.Type.RegisterCommand) .setRegisterCmd(REG_CMD_PROTO); break; case DatanodeProtocol.DNA_TRANSFER: case DatanodeProtocol.DNA_INVALIDATE: case DatanodeProtocol.DNA_SHUTDOWN: builder.setCmdType(DatanodeCommandProto.Type.BlockCommand). setBlkCmd(PBHelper.convert((BlockCommand) datanodeCommand)); break; case DatanodeProtocol.DNA_CACHE: case DatanodeProtocol.DNA_UNCACHE: builder.setCmdType(DatanodeCommandProto.Type.BlockIdCommand). setBlkIdCmd(PBHelper.convert((BlockIdCommand) datanodeCommand)); break; case DatanodeProtocol.DNA_UNKNOWN: //Not expected default: builder.setCmdType(DatanodeCommandProto.Type.NullDatanodeCommand); } return builder.build(); } public static KeyUpdateCommand convert(KeyUpdateCommandProto keyUpdateCmd) { return new KeyUpdateCommand(PBHelper.convert(keyUpdateCmd.getKeys())); } public static FinalizeCommand convert(FinalizeCommandProto finalizeCmd) { return new FinalizeCommand(finalizeCmd.getBlockPoolId()); } public static BlockRecoveryCommand convert( BlockRecoveryCommandProto recoveryCmd) { List<RecoveringBlockProto> list = recoveryCmd.getBlocksList(); List<RecoveringBlock> recoveringBlocks = new ArrayList<RecoveringBlock>( list.size()); for (RecoveringBlockProto rbp : list) { recoveringBlocks.add(PBHelper.convert(rbp)); } return new BlockRecoveryCommand(recoveringBlocks); } public static BlockCommand convert(BlockCommandProto blkCmd) { List<BlockProto> blockProtoList = blkCmd.getBlocksList(); Block[] blocks = new Block[blockProtoList.size()]; for (int i = 0; i < blockProtoList.size(); i++) { blocks[i] = PBHelper.convert(blockProtoList.get(i)); } List<DatanodeInfosProto> targetList = blkCmd.getTargetsList(); DatanodeInfo[][] targets = new DatanodeInfo[targetList.size()][]; for (int i = 0; i < targetList.size(); i++) { targets[i] = PBHelper.convert(targetList.get(i)); } List<StorageUuidsProto> targetStorageUuidsList = blkCmd.getTargetStorageUuidsList(); String[][] targetStorageIDs = new String[targetStorageUuidsList.size()][]; for(int i = 0; i < targetStorageIDs.length; i++) { List<String> storageIDs = targetStorageUuidsList.get(i).getStorageUuidsList(); targetStorageIDs[i] = storageIDs.toArray(new String[storageIDs.size()]); } int action = DatanodeProtocol.DNA_UNKNOWN; switch (blkCmd.getAction()) { case TRANSFER: action = DatanodeProtocol.DNA_TRANSFER; break; case INVALIDATE: action = DatanodeProtocol.DNA_INVALIDATE; break; case SHUTDOWN: action = DatanodeProtocol.DNA_SHUTDOWN; break; default: throw new AssertionError("Unknown action type: " + blkCmd.getAction()); } return new BlockCommand(action, blkCmd.getBlockPoolId(), blocks, targets, targetStorageIDs); } public static BlockIdCommand convert(BlockIdCommandProto blkIdCmd) { int numBlockIds = blkIdCmd.getBlockIdsCount(); long blockIds[] = new long[numBlockIds]; for (int i = 0; i < numBlockIds; i++) { blockIds[i] = blkIdCmd.getBlockIds(i); } int action = DatanodeProtocol.DNA_UNKNOWN; switch (blkIdCmd.getAction()) { case CACHE: action = DatanodeProtocol.DNA_CACHE; break; case UNCACHE: action = DatanodeProtocol.DNA_UNCACHE; break; default: throw new AssertionError("Unknown action type: " + blkIdCmd.getAction()); } return new BlockIdCommand(action, blkIdCmd.getBlockPoolId(), blockIds); } public static DatanodeInfo[] convert(DatanodeInfosProto datanodeInfosProto) { List<DatanodeInfoProto> proto = datanodeInfosProto.getDatanodesList(); DatanodeInfo[] infos = new DatanodeInfo[proto.size()]; for (int i = 0; i < infos.length; i++) { infos[i] = PBHelper.convert(proto.get(i)); } return infos; } public static BalancerBandwidthCommand convert( BalancerBandwidthCommandProto balancerCmd) { return new BalancerBandwidthCommand(balancerCmd.getBandwidth()); } public static ReceivedDeletedBlockInfoProto convert( ReceivedDeletedBlockInfo receivedDeletedBlockInfo) { ReceivedDeletedBlockInfoProto.Builder builder = ReceivedDeletedBlockInfoProto.newBuilder(); ReceivedDeletedBlockInfoProto.BlockStatus status; switch (receivedDeletedBlockInfo.getStatus()) { case RECEIVING_BLOCK: status = ReceivedDeletedBlockInfoProto.BlockStatus.RECEIVING; break; case RECEIVED_BLOCK: status = ReceivedDeletedBlockInfoProto.BlockStatus.RECEIVED; break; case DELETED_BLOCK: status = ReceivedDeletedBlockInfoProto.BlockStatus.DELETED; break; default: throw new IllegalArgumentException("Bad status: " + receivedDeletedBlockInfo.getStatus()); } builder.setStatus(status); if (receivedDeletedBlockInfo.getDelHints() != null) { builder.setDeleteHint(receivedDeletedBlockInfo.getDelHints()); } return builder.setBlock(PBHelper.convert(receivedDeletedBlockInfo.getBlock())) .build(); } public static ReceivedDeletedBlockInfo convert( ReceivedDeletedBlockInfoProto proto) { ReceivedDeletedBlockInfo.BlockStatus status = null; switch (proto.getStatus()) { case RECEIVING: status = BlockStatus.RECEIVING_BLOCK; break; case RECEIVED: status = BlockStatus.RECEIVED_BLOCK; break; case DELETED: status = BlockStatus.DELETED_BLOCK; break; } return new ReceivedDeletedBlockInfo( PBHelper.convert(proto.getBlock()), status, proto.hasDeleteHint() ? proto.getDeleteHint() : null); } public static NamespaceInfoProto convert(NamespaceInfo info) { return NamespaceInfoProto.newBuilder() .setBlockPoolID(info.getBlockPoolID()) .setBuildVersion(info.getBuildVersion()) .setUnused(0) .setStorageInfo(PBHelper.convert((StorageInfo)info)) .setSoftwareVersion(info.getSoftwareVersion()).build(); } // Located Block Arrays and Lists public static LocatedBlockProto[] convertLocatedBlock(LocatedBlock[] lb) { if (lb == null) return null; return convertLocatedBlock2(Arrays.asList(lb)).toArray( new LocatedBlockProto[lb.length]); } public static LocatedBlock[] convertLocatedBlock(LocatedBlockProto[] lb) { if (lb == null) return null; return convertLocatedBlock(Arrays.asList(lb)).toArray( new LocatedBlock[lb.length]); } public static List<LocatedBlock> convertLocatedBlock( List<LocatedBlockProto> lb) { if (lb == null) return null; final int len = lb.size(); List<LocatedBlock> result = new ArrayList<LocatedBlock>(len); for (int i = 0; i < len; ++i) { result.add(PBHelper.convert(lb.get(i))); } return result; } public static List<LocatedBlockProto> convertLocatedBlock2(List<LocatedBlock> lb) { if (lb == null) return null; final int len = lb.size(); List<LocatedBlockProto> result = new ArrayList<LocatedBlockProto>(len); for (int i = 0; i < len; ++i) { result.add(PBHelper.convert(lb.get(i))); } return result; } // LocatedBlocks public static LocatedBlocks convert(LocatedBlocksProto lb) { return new LocatedBlocks( lb.getFileLength(), lb.getUnderConstruction(), PBHelper.convertLocatedBlock(lb.getBlocksList()), lb.hasLastBlock() ? PBHelper.convert(lb.getLastBlock()) : null, lb.getIsLastBlockComplete()); } public static LocatedBlocksProto convert(LocatedBlocks lb) { if (lb == null) { return null; } LocatedBlocksProto.Builder builder = LocatedBlocksProto.newBuilder(); if (lb.getLastLocatedBlock() != null) { builder.setLastBlock(PBHelper.convert(lb.getLastLocatedBlock())); } return builder.setFileLength(lb.getFileLength()) .setUnderConstruction(lb.isUnderConstruction()) .addAllBlocks(PBHelper.convertLocatedBlock2(lb.getLocatedBlocks())) .setIsLastBlockComplete(lb.isLastBlockComplete()).build(); } // DataEncryptionKey public static DataEncryptionKey convert(DataEncryptionKeyProto bet) { String encryptionAlgorithm = bet.getEncryptionAlgorithm(); return new DataEncryptionKey(bet.getKeyId(), bet.getBlockPoolId(), bet.getNonce().toByteArray(), bet.getEncryptionKey().toByteArray(), bet.getExpiryDate(), encryptionAlgorithm.isEmpty() ? null : encryptionAlgorithm); } public static DataEncryptionKeyProto convert(DataEncryptionKey bet) { DataEncryptionKeyProto.Builder b = DataEncryptionKeyProto.newBuilder() .setKeyId(bet.keyId) .setBlockPoolId(bet.blockPoolId) .setNonce(ByteString.copyFrom(bet.nonce)) .setEncryptionKey(ByteString.copyFrom(bet.encryptionKey)) .setExpiryDate(bet.expiryDate); if (bet.encryptionAlgorithm != null) { b.setEncryptionAlgorithm(bet.encryptionAlgorithm); } return b.build(); } public static FsServerDefaults convert(FsServerDefaultsProto fs) { if (fs == null) return null; return new FsServerDefaults( fs.getBlockSize(), fs.getBytesPerChecksum(), fs.getWritePacketSize(), (short) fs.getReplication(), fs.getFileBufferSize(), fs.getEncryptDataTransfer(), fs.getTrashInterval(), PBHelper.convert(fs.getChecksumType())); } public static FsServerDefaultsProto convert(FsServerDefaults fs) { if (fs == null) return null; return FsServerDefaultsProto.newBuilder(). setBlockSize(fs.getBlockSize()). setBytesPerChecksum(fs.getBytesPerChecksum()). setWritePacketSize(fs.getWritePacketSize()) .setReplication(fs.getReplication()) .setFileBufferSize(fs.getFileBufferSize()) .setEncryptDataTransfer(fs.getEncryptDataTransfer()) .setTrashInterval(fs.getTrashInterval()) .setChecksumType(PBHelper.convert(fs.getChecksumType())) .build(); } public static FsPermissionProto convert(FsPermission p) { return FsPermissionProto.newBuilder().setPerm(p.toExtendedShort()).build(); } public static FsPermission convert(FsPermissionProto p) { return new FsAclPermission((short)p.getPerm()); } // The creatFlag field in PB is a bitmask whose values are the same a the // emum values of CreateFlag public static int convertCreateFlag(EnumSetWritable<CreateFlag> flag) { int value = 0; if (flag.contains(CreateFlag.APPEND)) { value |= CreateFlagProto.APPEND.getNumber(); } if (flag.contains(CreateFlag.CREATE)) { value |= CreateFlagProto.CREATE.getNumber(); } if (flag.contains(CreateFlag.OVERWRITE)) { value |= CreateFlagProto.OVERWRITE.getNumber(); } return value; } public static EnumSetWritable<CreateFlag> convertCreateFlag(int flag) { EnumSet<CreateFlag> result = EnumSet.noneOf(CreateFlag.class); if ((flag & CreateFlagProto.APPEND_VALUE) == CreateFlagProto.APPEND_VALUE) { result.add(CreateFlag.APPEND); } if ((flag & CreateFlagProto.CREATE_VALUE) == CreateFlagProto.CREATE_VALUE) { result.add(CreateFlag.CREATE); } if ((flag & CreateFlagProto.OVERWRITE_VALUE) == CreateFlagProto.OVERWRITE_VALUE) { result.add(CreateFlag.OVERWRITE); } return new EnumSetWritable<CreateFlag>(result); } public static int convertCacheFlags(EnumSet<CacheFlag> flags) { int value = 0; if (flags.contains(CacheFlag.FORCE)) { value |= CacheFlagProto.FORCE.getNumber(); } return value; } public static EnumSet<CacheFlag> convertCacheFlags(int flags) { EnumSet<CacheFlag> result = EnumSet.noneOf(CacheFlag.class); if ((flags & CacheFlagProto.FORCE_VALUE) == CacheFlagProto.FORCE_VALUE) { result.add(CacheFlag.FORCE); } return result; } public static HdfsFileStatus convert(HdfsFileStatusProto fs) { if (fs == null) return null; return new HdfsLocatedFileStatus( fs.getLength(), fs.getFileType().equals(FileType.IS_DIR), fs.getBlockReplication(), fs.getBlocksize(), fs.getModificationTime(), fs.getAccessTime(), PBHelper.convert(fs.getPermission()), fs.getOwner(), fs.getGroup(), fs.getFileType().equals(FileType.IS_SYMLINK) ? fs.getSymlink().toByteArray() : null, fs.getPath().toByteArray(), fs.hasFileId()? fs.getFileId(): INodeId.GRANDFATHER_INODE_ID, fs.hasLocations() ? PBHelper.convert(fs.getLocations()) : null, fs.hasChildrenNum() ? fs.getChildrenNum() : -1); } public static SnapshottableDirectoryStatus convert( SnapshottableDirectoryStatusProto sdirStatusProto) { if (sdirStatusProto == null) { return null; } final HdfsFileStatusProto status = sdirStatusProto.getDirStatus(); return new SnapshottableDirectoryStatus( status.getModificationTime(), status.getAccessTime(), PBHelper.convert(status.getPermission()), status.getOwner(), status.getGroup(), status.getPath().toByteArray(), status.getFileId(), status.getChildrenNum(), sdirStatusProto.getSnapshotNumber(), sdirStatusProto.getSnapshotQuota(), sdirStatusProto.getParentFullpath().toByteArray()); } public static HdfsFileStatusProto convert(HdfsFileStatus fs) { if (fs == null) return null; FileType fType = FileType.IS_FILE; if (fs.isDir()) { fType = FileType.IS_DIR; } else if (fs.isSymlink()) { fType = FileType.IS_SYMLINK; } HdfsFileStatusProto.Builder builder = HdfsFileStatusProto.newBuilder(). setLength(fs.getLen()). setFileType(fType). setBlockReplication(fs.getReplication()). setBlocksize(fs.getBlockSize()). setModificationTime(fs.getModificationTime()). setAccessTime(fs.getAccessTime()). setPermission(PBHelper.convert(fs.getPermission())). setOwner(fs.getOwner()). setGroup(fs.getGroup()). setFileId(fs.getFileId()). setChildrenNum(fs.getChildrenNum()). setPath(ByteString.copyFrom(fs.getLocalNameInBytes())); if (fs.isSymlink()) { builder.setSymlink(ByteString.copyFrom(fs.getSymlinkInBytes())); } if (fs instanceof HdfsLocatedFileStatus) { LocatedBlocks locations = ((HdfsLocatedFileStatus)fs).getBlockLocations(); if (locations != null) { builder.setLocations(PBHelper.convert(locations)); } } return builder.build(); } public static SnapshottableDirectoryStatusProto convert( SnapshottableDirectoryStatus status) { if (status == null) { return null; } int snapshotNumber = status.getSnapshotNumber(); int snapshotQuota = status.getSnapshotQuota(); byte[] parentFullPath = status.getParentFullPath(); ByteString parentFullPathBytes = ByteString.copyFrom( parentFullPath == null ? DFSUtil.EMPTY_BYTES : parentFullPath); HdfsFileStatusProto fs = convert(status.getDirStatus()); SnapshottableDirectoryStatusProto.Builder builder = SnapshottableDirectoryStatusProto .newBuilder().setSnapshotNumber(snapshotNumber) .setSnapshotQuota(snapshotQuota).setParentFullpath(parentFullPathBytes) .setDirStatus(fs); return builder.build(); } public static HdfsFileStatusProto[] convert(HdfsFileStatus[] fs) { if (fs == null) return null; final int len = fs.length; HdfsFileStatusProto[] result = new HdfsFileStatusProto[len]; for (int i = 0; i < len; ++i) { result[i] = PBHelper.convert(fs[i]); } return result; } public static HdfsFileStatus[] convert(HdfsFileStatusProto[] fs) { if (fs == null) return null; final int len = fs.length; HdfsFileStatus[] result = new HdfsFileStatus[len]; for (int i = 0; i < len; ++i) { result[i] = PBHelper.convert(fs[i]); } return result; } public static DirectoryListing convert(DirectoryListingProto dl) { if (dl == null) return null; List<HdfsFileStatusProto> partList = dl.getPartialListingList(); return new DirectoryListing( partList.isEmpty() ? new HdfsLocatedFileStatus[0] : PBHelper.convert( partList.toArray(new HdfsFileStatusProto[partList.size()])), dl.getRemainingEntries()); } public static DirectoryListingProto convert(DirectoryListing d) { if (d == null) return null; return DirectoryListingProto.newBuilder(). addAllPartialListing(Arrays.asList( PBHelper.convert(d.getPartialListing()))). setRemainingEntries(d.getRemainingEntries()). build(); } public static long[] convert(GetFsStatsResponseProto res) { long[] result = new long[6]; result[ClientProtocol.GET_STATS_CAPACITY_IDX] = res.getCapacity(); result[ClientProtocol.GET_STATS_USED_IDX] = res.getUsed(); result[ClientProtocol.GET_STATS_REMAINING_IDX] = res.getRemaining(); result[ClientProtocol.GET_STATS_UNDER_REPLICATED_IDX] = res.getUnderReplicated(); result[ClientProtocol.GET_STATS_CORRUPT_BLOCKS_IDX] = res.getCorruptBlocks(); result[ClientProtocol.GET_STATS_MISSING_BLOCKS_IDX] = res.getMissingBlocks(); return result; } public static GetFsStatsResponseProto convert(long[] fsStats) { GetFsStatsResponseProto.Builder result = GetFsStatsResponseProto .newBuilder(); if (fsStats.length >= ClientProtocol.GET_STATS_CAPACITY_IDX + 1) result.setCapacity(fsStats[ClientProtocol.GET_STATS_CAPACITY_IDX]); if (fsStats.length >= ClientProtocol.GET_STATS_USED_IDX + 1) result.setUsed(fsStats[ClientProtocol.GET_STATS_USED_IDX]); if (fsStats.length >= ClientProtocol.GET_STATS_REMAINING_IDX + 1) result.setRemaining(fsStats[ClientProtocol.GET_STATS_REMAINING_IDX]); if (fsStats.length >= ClientProtocol.GET_STATS_UNDER_REPLICATED_IDX + 1) result.setUnderReplicated( fsStats[ClientProtocol.GET_STATS_UNDER_REPLICATED_IDX]); if (fsStats.length >= ClientProtocol.GET_STATS_CORRUPT_BLOCKS_IDX + 1) result.setCorruptBlocks( fsStats[ClientProtocol.GET_STATS_CORRUPT_BLOCKS_IDX]); if (fsStats.length >= ClientProtocol.GET_STATS_MISSING_BLOCKS_IDX + 1) result.setMissingBlocks( fsStats[ClientProtocol.GET_STATS_MISSING_BLOCKS_IDX]); return result.build(); } public static DatanodeReportTypeProto convert(DatanodeReportType t) { switch (t) { case ALL: return DatanodeReportTypeProto.ALL; case LIVE: return DatanodeReportTypeProto.LIVE; case DEAD: return DatanodeReportTypeProto.DEAD; case DECOMMISSIONING: return DatanodeReportTypeProto.DECOMMISSIONING; default: throw new IllegalArgumentException("Unexpected data type report:" + t); } } public static DatanodeReportType convert(DatanodeReportTypeProto t) { switch (t) { case ALL: return DatanodeReportType.ALL; case LIVE: return DatanodeReportType.LIVE; case DEAD: return DatanodeReportType.DEAD; case DECOMMISSIONING: return DatanodeReportType.DECOMMISSIONING; default: throw new IllegalArgumentException("Unexpected data type report:" + t); } } public static SafeModeActionProto convert( SafeModeAction a) { switch (a) { case SAFEMODE_LEAVE: return SafeModeActionProto.SAFEMODE_LEAVE; case SAFEMODE_ENTER: return SafeModeActionProto.SAFEMODE_ENTER; case SAFEMODE_GET: return SafeModeActionProto.SAFEMODE_GET; default: throw new IllegalArgumentException("Unexpected SafeModeAction :" + a); } } public static SafeModeAction convert( ClientNamenodeProtocolProtos.SafeModeActionProto a) { switch (a) { case SAFEMODE_LEAVE: return SafeModeAction.SAFEMODE_LEAVE; case SAFEMODE_ENTER: return SafeModeAction.SAFEMODE_ENTER; case SAFEMODE_GET: return SafeModeAction.SAFEMODE_GET; default: throw new IllegalArgumentException("Unexpected SafeModeAction :" + a); } } public static RollingUpgradeActionProto convert(RollingUpgradeAction a) { switch (a) { case QUERY: return RollingUpgradeActionProto.QUERY; case PREPARE: return RollingUpgradeActionProto.START; case FINALIZE: return RollingUpgradeActionProto.FINALIZE; default: throw new IllegalArgumentException("Unexpected value: " + a); } } public static RollingUpgradeAction convert(RollingUpgradeActionProto a) { switch (a) { case QUERY: return RollingUpgradeAction.QUERY; case START: return RollingUpgradeAction.PREPARE; case FINALIZE: return RollingUpgradeAction.FINALIZE; default: throw new IllegalArgumentException("Unexpected value: " + a); } } public static RollingUpgradeStatusProto convertRollingUpgradeStatus( RollingUpgradeStatus status) { return RollingUpgradeStatusProto.newBuilder() .setBlockPoolId(status.getBlockPoolId()) .build(); } public static RollingUpgradeStatus convert(RollingUpgradeStatusProto proto) { return new RollingUpgradeStatus(proto.getBlockPoolId()); } public static RollingUpgradeInfoProto convert(RollingUpgradeInfo info) { return RollingUpgradeInfoProto.newBuilder() .setStatus(convertRollingUpgradeStatus(info)) .setCreatedRollbackImages(info.createdRollbackImages()) .setStartTime(info.getStartTime()) .setFinalizeTime(info.getFinalizeTime()) .build(); } public static RollingUpgradeInfo convert(RollingUpgradeInfoProto proto) { RollingUpgradeStatusProto status = proto.getStatus(); return new RollingUpgradeInfo(status.getBlockPoolId(), proto.getCreatedRollbackImages(), proto.getStartTime(), proto.getFinalizeTime()); } public static CorruptFileBlocks convert(CorruptFileBlocksProto c) { if (c == null) return null; List<String> fileList = c.getFilesList(); return new CorruptFileBlocks(fileList.toArray(new String[fileList.size()]), c.getCookie()); } public static CorruptFileBlocksProto convert(CorruptFileBlocks c) { if (c == null) return null; return CorruptFileBlocksProto.newBuilder(). addAllFiles(Arrays.asList(c.getFiles())). setCookie(c.getCookie()). build(); } public static ContentSummary convert(ContentSummaryProto cs) { if (cs == null) return null; return new ContentSummary( cs.getLength(), cs.getFileCount(), cs.getDirectoryCount(), cs.getQuota(), cs.getSpaceConsumed(), cs.getSpaceQuota()); } public static ContentSummaryProto convert(ContentSummary cs) { if (cs == null) return null; return ContentSummaryProto.newBuilder(). setLength(cs.getLength()). setFileCount(cs.getFileCount()). setDirectoryCount(cs.getDirectoryCount()). setQuota(cs.getQuota()). setSpaceConsumed(cs.getSpaceConsumed()). setSpaceQuota(cs.getSpaceQuota()). build(); } public static NNHAStatusHeartbeat convert(NNHAStatusHeartbeatProto s) { if (s == null) return null; switch (s.getState()) { case ACTIVE: return new NNHAStatusHeartbeat(HAServiceState.ACTIVE, s.getTxid()); case STANDBY: return new NNHAStatusHeartbeat(HAServiceState.STANDBY, s.getTxid()); default: throw new IllegalArgumentException("Unexpected NNHAStatusHeartbeat.State:" + s.getState()); } } public static NNHAStatusHeartbeatProto convert(NNHAStatusHeartbeat hb) { if (hb == null) return null; NNHAStatusHeartbeatProto.Builder builder = NNHAStatusHeartbeatProto.newBuilder(); switch (hb.getState()) { case ACTIVE: builder.setState(NNHAStatusHeartbeatProto.State.ACTIVE); break; case STANDBY: builder.setState(NNHAStatusHeartbeatProto.State.STANDBY); break; default: throw new IllegalArgumentException("Unexpected NNHAStatusHeartbeat.State:" + hb.getState()); } builder.setTxid(hb.getTxId()); return builder.build(); } public static DatanodeStorageProto convert(DatanodeStorage s) { return DatanodeStorageProto.newBuilder() .setState(PBHelper.convertState(s.getState())) .setStorageType(PBHelper.convertStorageType(s.getStorageType())) .setStorageUuid(s.getStorageID()).build(); } private static StorageState convertState(State state) { switch(state) { case READ_ONLY_SHARED: return StorageState.READ_ONLY_SHARED; case NORMAL: default: return StorageState.NORMAL; } } private static StorageTypeProto convertStorageType( StorageType type) { switch(type) { case DISK: return StorageTypeProto.DISK; case SSD: return StorageTypeProto.SSD; default: throw new IllegalStateException( "BUG: StorageType not found, type=" + type); } } public static DatanodeStorage convert(DatanodeStorageProto s) { return new DatanodeStorage(s.getStorageUuid(), PBHelper.convertState(s.getState()), PBHelper.convertType(s.getStorageType())); } private static State convertState(StorageState state) { switch(state) { case READ_ONLY_SHARED: return DatanodeStorage.State.READ_ONLY_SHARED; case NORMAL: default: return DatanodeStorage.State.NORMAL; } } private static StorageType convertType(StorageTypeProto type) { switch(type) { case DISK: return StorageType.DISK; case SSD: return StorageType.SSD; default: throw new IllegalStateException( "BUG: StorageTypeProto not found, type=" + type); } } private static StorageType[] convertStorageTypeProtos( List<StorageTypeProto> storageTypesList) { final StorageType[] storageTypes = new StorageType[storageTypesList.size()]; for (int i = 0; i < storageTypes.length; ++i) { storageTypes[i] = PBHelper.convertType(storageTypesList.get(i)); } return storageTypes; } public static StorageReportProto convert(StorageReport r) { StorageReportProto.Builder builder = StorageReportProto.newBuilder() .setBlockPoolUsed(r.getBlockPoolUsed()).setCapacity(r.getCapacity()) .setDfsUsed(r.getDfsUsed()).setRemaining(r.getRemaining()) .setStorageUuid(r.getStorage().getStorageID()) .setStorage(convert(r.getStorage())); return builder.build(); } public static StorageReport convert(StorageReportProto p) { return new StorageReport( p.hasStorage() ? convert(p.getStorage()) : new DatanodeStorage(p.getStorageUuid()), p.getFailed(), p.getCapacity(), p.getDfsUsed(), p.getRemaining(), p.getBlockPoolUsed()); } public static StorageReport[] convertStorageReports( List<StorageReportProto> list) { final StorageReport[] report = new StorageReport[list.size()]; for (int i = 0; i < report.length; i++) { report[i] = convert(list.get(i)); } return report; } public static JournalInfo convert(JournalInfoProto info) { int lv = info.hasLayoutVersion() ? info.getLayoutVersion() : 0; int nsID = info.hasNamespaceID() ? info.getNamespaceID() : 0; return new JournalInfo(lv, info.getClusterID(), nsID); } /** * Method used for converting {@link JournalInfoProto} sent from Namenode * to Journal receivers to {@link NamenodeRegistration}. */ public static JournalInfoProto convert(JournalInfo j) { return JournalInfoProto.newBuilder().setClusterID(j.getClusterId()) .setLayoutVersion(j.getLayoutVersion()) .setNamespaceID(j.getNamespaceId()).build(); } public static SnapshottableDirectoryStatus[] convert( SnapshottableDirectoryListingProto sdlp) { if (sdlp == null) return null; List<SnapshottableDirectoryStatusProto> list = sdlp .getSnapshottableDirListingList(); if (list.isEmpty()) { return new SnapshottableDirectoryStatus[0]; } else { SnapshottableDirectoryStatus[] result = new SnapshottableDirectoryStatus[list.size()]; for (int i = 0; i < list.size(); i++) { result[i] = PBHelper.convert(list.get(i)); } return result; } } public static SnapshottableDirectoryListingProto convert( SnapshottableDirectoryStatus[] status) { if (status == null) return null; SnapshottableDirectoryStatusProto[] protos = new SnapshottableDirectoryStatusProto[status.length]; for (int i = 0; i < status.length; i++) { protos[i] = PBHelper.convert(status[i]); } List<SnapshottableDirectoryStatusProto> protoList = Arrays.asList(protos); return SnapshottableDirectoryListingProto.newBuilder() .addAllSnapshottableDirListing(protoList).build(); } public static DiffReportEntry convert(SnapshotDiffReportEntryProto entry) { if (entry == null) { return null; } DiffType type = DiffType.getTypeFromLabel(entry .getModificationLabel()); return type == null ? null : new DiffReportEntry(type, entry.getFullpath() .toByteArray(), entry.hasTargetPath() ? entry.getTargetPath() .toByteArray() : null); } public static SnapshotDiffReportEntryProto convert(DiffReportEntry entry) { if (entry == null) { return null; } ByteString sourcePath = ByteString .copyFrom(entry.getSourcePath() == null ? DFSUtil.EMPTY_BYTES : entry .getSourcePath()); String modification = entry.getType().getLabel(); SnapshotDiffReportEntryProto.Builder builder = SnapshotDiffReportEntryProto .newBuilder().setFullpath(sourcePath) .setModificationLabel(modification); if (entry.getType() == DiffType.RENAME) { ByteString targetPath = ByteString .copyFrom(entry.getTargetPath() == null ? DFSUtil.EMPTY_BYTES : entry .getTargetPath()); builder.setTargetPath(targetPath); } return builder.build(); } public static SnapshotDiffReport convert(SnapshotDiffReportProto reportProto) { if (reportProto == null) { return null; } String snapshotDir = reportProto.getSnapshotRoot(); String fromSnapshot = reportProto.getFromSnapshot(); String toSnapshot = reportProto.getToSnapshot(); List<SnapshotDiffReportEntryProto> list = reportProto .getDiffReportEntriesList(); List<DiffReportEntry> entries = new ArrayList<DiffReportEntry>(); for (SnapshotDiffReportEntryProto entryProto : list) { DiffReportEntry entry = convert(entryProto); if (entry != null) entries.add(entry); } return new SnapshotDiffReport(snapshotDir, fromSnapshot, toSnapshot, entries); } public static SnapshotDiffReportProto convert(SnapshotDiffReport report) { if (report == null) { return null; } List<DiffReportEntry> entries = report.getDiffList(); List<SnapshotDiffReportEntryProto> entryProtos = new ArrayList<SnapshotDiffReportEntryProto>(); for (DiffReportEntry entry : entries) { SnapshotDiffReportEntryProto entryProto = convert(entry); if (entryProto != null) entryProtos.add(entryProto); } SnapshotDiffReportProto reportProto = SnapshotDiffReportProto.newBuilder() .setSnapshotRoot(report.getSnapshotRoot()) .setFromSnapshot(report.getFromSnapshot()) .setToSnapshot(report.getLaterSnapshotName()) .addAllDiffReportEntries(entryProtos).build(); return reportProto; } public static DataChecksum.Type convert(HdfsProtos.ChecksumTypeProto type) { return DataChecksum.Type.valueOf(type.getNumber()); } public static CacheDirectiveInfoProto convert (CacheDirectiveInfo info) { CacheDirectiveInfoProto.Builder builder = CacheDirectiveInfoProto.newBuilder(); if (info.getId() != null) { builder.setId(info.getId()); } if (info.getPath() != null) { builder.setPath(info.getPath().toUri().getPath()); } if (info.getReplication() != null) { builder.setReplication(info.getReplication()); } if (info.getPool() != null) { builder.setPool(info.getPool()); } if (info.getExpiration() != null) { builder.setExpiration(convert(info.getExpiration())); } return builder.build(); } public static CacheDirectiveInfo convert (CacheDirectiveInfoProto proto) { CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(); if (proto.hasId()) { builder.setId(proto.getId()); } if (proto.hasPath()) { builder.setPath(new Path(proto.getPath())); } if (proto.hasReplication()) { builder.setReplication(Shorts.checkedCast( proto.getReplication())); } if (proto.hasPool()) { builder.setPool(proto.getPool()); } if (proto.hasExpiration()) { builder.setExpiration(convert(proto.getExpiration())); } return builder.build(); } public static CacheDirectiveInfoExpirationProto convert( CacheDirectiveInfo.Expiration expiration) { return CacheDirectiveInfoExpirationProto.newBuilder() .setIsRelative(expiration.isRelative()) .setMillis(expiration.getMillis()) .build(); } public static CacheDirectiveInfo.Expiration convert( CacheDirectiveInfoExpirationProto proto) { if (proto.getIsRelative()) { return CacheDirectiveInfo.Expiration.newRelative(proto.getMillis()); } return CacheDirectiveInfo.Expiration.newAbsolute(proto.getMillis()); } public static CacheDirectiveStatsProto convert(CacheDirectiveStats stats) { CacheDirectiveStatsProto.Builder builder = CacheDirectiveStatsProto.newBuilder(); builder.setBytesNeeded(stats.getBytesNeeded()); builder.setBytesCached(stats.getBytesCached()); builder.setFilesNeeded(stats.getFilesNeeded()); builder.setFilesCached(stats.getFilesCached()); builder.setHasExpired(stats.hasExpired()); return builder.build(); } public static CacheDirectiveStats convert(CacheDirectiveStatsProto proto) { CacheDirectiveStats.Builder builder = new CacheDirectiveStats.Builder(); builder.setBytesNeeded(proto.getBytesNeeded()); builder.setBytesCached(proto.getBytesCached()); builder.setFilesNeeded(proto.getFilesNeeded()); builder.setFilesCached(proto.getFilesCached()); builder.setHasExpired(proto.getHasExpired()); return builder.build(); } public static CacheDirectiveEntryProto convert(CacheDirectiveEntry entry) { CacheDirectiveEntryProto.Builder builder = CacheDirectiveEntryProto.newBuilder(); builder.setInfo(PBHelper.convert(entry.getInfo())); builder.setStats(PBHelper.convert(entry.getStats())); return builder.build(); } public static CacheDirectiveEntry convert(CacheDirectiveEntryProto proto) { CacheDirectiveInfo info = PBHelper.convert(proto.getInfo()); CacheDirectiveStats stats = PBHelper.convert(proto.getStats()); return new CacheDirectiveEntry(info, stats); } public static CachePoolInfoProto convert(CachePoolInfo info) { CachePoolInfoProto.Builder builder = CachePoolInfoProto.newBuilder(); builder.setPoolName(info.getPoolName()); if (info.getOwnerName() != null) { builder.setOwnerName(info.getOwnerName()); } if (info.getGroupName() != null) { builder.setGroupName(info.getGroupName()); } if (info.getMode() != null) { builder.setMode(info.getMode().toShort()); } if (info.getLimit() != null) { builder.setLimit(info.getLimit()); } if (info.getMaxRelativeExpiryMs() != null) { builder.setMaxRelativeExpiry(info.getMaxRelativeExpiryMs()); } return builder.build(); } public static CachePoolInfo convert (CachePoolInfoProto proto) { // Pool name is a required field, the rest are optional String poolName = checkNotNull(proto.getPoolName()); CachePoolInfo info = new CachePoolInfo(poolName); if (proto.hasOwnerName()) { info.setOwnerName(proto.getOwnerName()); } if (proto.hasGroupName()) { info.setGroupName(proto.getGroupName()); } if (proto.hasMode()) { info.setMode(new FsPermission((short)proto.getMode())); } if (proto.hasLimit()) { info.setLimit(proto.getLimit()); } if (proto.hasMaxRelativeExpiry()) { info.setMaxRelativeExpiryMs(proto.getMaxRelativeExpiry()); } return info; } public static CachePoolStatsProto convert(CachePoolStats stats) { CachePoolStatsProto.Builder builder = CachePoolStatsProto.newBuilder(); builder.setBytesNeeded(stats.getBytesNeeded()); builder.setBytesCached(stats.getBytesCached()); builder.setBytesOverlimit(stats.getBytesOverlimit()); builder.setFilesNeeded(stats.getFilesNeeded()); builder.setFilesCached(stats.getFilesCached()); return builder.build(); } public static CachePoolStats convert (CachePoolStatsProto proto) { CachePoolStats.Builder builder = new CachePoolStats.Builder(); builder.setBytesNeeded(proto.getBytesNeeded()); builder.setBytesCached(proto.getBytesCached()); builder.setBytesOverlimit(proto.getBytesOverlimit()); builder.setFilesNeeded(proto.getFilesNeeded()); builder.setFilesCached(proto.getFilesCached()); return builder.build(); } public static CachePoolEntryProto convert(CachePoolEntry entry) { CachePoolEntryProto.Builder builder = CachePoolEntryProto.newBuilder(); builder.setInfo(PBHelper.convert(entry.getInfo())); builder.setStats(PBHelper.convert(entry.getStats())); return builder.build(); } public static CachePoolEntry convert (CachePoolEntryProto proto) { CachePoolInfo info = PBHelper.convert(proto.getInfo()); CachePoolStats stats = PBHelper.convert(proto.getStats()); return new CachePoolEntry(info, stats); } public static HdfsProtos.ChecksumTypeProto convert(DataChecksum.Type type) { return HdfsProtos.ChecksumTypeProto.valueOf(type.id); } public static DatanodeLocalInfoProto convert(DatanodeLocalInfo info) { DatanodeLocalInfoProto.Builder builder = DatanodeLocalInfoProto.newBuilder(); builder.setSoftwareVersion(info.getSoftwareVersion()); builder.setConfigVersion(info.getConfigVersion()); builder.setUptime(info.getUptime()); return builder.build(); } public static DatanodeLocalInfo convert(DatanodeLocalInfoProto proto) { return new DatanodeLocalInfo(proto.getSoftwareVersion(), proto.getConfigVersion(), proto.getUptime()); } public static InputStream vintPrefixed(final InputStream input) throws IOException { final int firstByte = input.read(); if (firstByte == -1) { throw new EOFException("Premature EOF: no length prefix available"); } int size = CodedInputStream.readRawVarint32(firstByte, input); assert size >= 0; return new ExactSizeInputStream(input, size); } private static AclEntryScopeProto convert(AclEntryScope v) { return AclEntryScopeProto.valueOf(v.ordinal()); } private static AclEntryScope convert(AclEntryScopeProto v) { return castEnum(v, ACL_ENTRY_SCOPE_VALUES); } private static AclEntryTypeProto convert(AclEntryType e) { return AclEntryTypeProto.valueOf(e.ordinal()); } private static AclEntryType convert(AclEntryTypeProto v) { return castEnum(v, ACL_ENTRY_TYPE_VALUES); } private static XAttrNamespaceProto convert(XAttr.NameSpace v) { return XAttrNamespaceProto.valueOf(v.ordinal()); } private static XAttr.NameSpace convert(XAttrNamespaceProto v) { return castEnum(v, XATTR_NAMESPACE_VALUES); } private static FsActionProto convert(FsAction v) { return FsActionProto.valueOf(v != null ? v.ordinal() : 0); } private static FsAction convert(FsActionProto v) { return castEnum(v, FSACTION_VALUES); } public static List<AclEntryProto> convertAclEntryProto( List<AclEntry> aclSpec) { ArrayList<AclEntryProto> r = Lists.newArrayListWithCapacity(aclSpec.size()); for (AclEntry e : aclSpec) { AclEntryProto.Builder builder = AclEntryProto.newBuilder(); builder.setType(convert(e.getType())); builder.setScope(convert(e.getScope())); builder.setPermissions(convert(e.getPermission())); if (e.getName() != null) { builder.setName(e.getName()); } r.add(builder.build()); } return r; } public static List<AclEntry> convertAclEntry(List<AclEntryProto> aclSpec) { ArrayList<AclEntry> r = Lists.newArrayListWithCapacity(aclSpec.size()); for (AclEntryProto e : aclSpec) { AclEntry.Builder builder = new AclEntry.Builder(); builder.setType(convert(e.getType())); builder.setScope(convert(e.getScope())); builder.setPermission(convert(e.getPermissions())); if (e.hasName()) { builder.setName(e.getName()); } r.add(builder.build()); } return r; } public static AclStatus convert(GetAclStatusResponseProto e) { AclStatusProto r = e.getResult(); return new AclStatus.Builder().owner(r.getOwner()).group(r.getGroup()) .stickyBit(r.getSticky()) .addEntries(convertAclEntry(r.getEntriesList())).build(); } public static GetAclStatusResponseProto convert(AclStatus e) { AclStatusProto r = AclStatusProto.newBuilder().setOwner(e.getOwner()) .setGroup(e.getGroup()).setSticky(e.isStickyBit()) .addAllEntries(convertAclEntryProto(e.getEntries())).build(); return GetAclStatusResponseProto.newBuilder().setResult(r).build(); } public static XAttrProto convertXAttrProto(XAttr a) { XAttrProto.Builder builder = XAttrProto.newBuilder(); builder.setNamespace(convert(a.getNameSpace())); if (a.getName() != null) { builder.setName(a.getName()); } if (a.getValue() != null) { builder.setValue(getByteString(a.getValue())); } return builder.build(); } public static List<XAttrProto> convertXAttrProto( List<XAttr> xAttrSpec) { if (xAttrSpec == null) { return Lists.newArrayListWithCapacity(0); } ArrayList<XAttrProto> xAttrs = Lists.newArrayListWithCapacity( xAttrSpec.size()); for (XAttr a : xAttrSpec) { XAttrProto.Builder builder = XAttrProto.newBuilder(); builder.setNamespace(convert(a.getNameSpace())); if (a.getName() != null) { builder.setName(a.getName()); } if (a.getValue() != null) { builder.setValue(getByteString(a.getValue())); } xAttrs.add(builder.build()); } return xAttrs; } /** * The flag field in PB is a bitmask whose values are the same a the * emum values of XAttrSetFlag */ public static int convert(EnumSet<XAttrSetFlag> flag) { int value = 0; if (flag.contains(XAttrSetFlag.CREATE)) { value |= XAttrSetFlagProto.XATTR_CREATE.getNumber(); } if (flag.contains(XAttrSetFlag.REPLACE)) { value |= XAttrSetFlagProto.XATTR_REPLACE.getNumber(); } return value; } public static EnumSet<XAttrSetFlag> convert(int flag) { EnumSet<XAttrSetFlag> result = EnumSet.noneOf(XAttrSetFlag.class); if ((flag & XAttrSetFlagProto.XATTR_CREATE_VALUE) == XAttrSetFlagProto.XATTR_CREATE_VALUE) { result.add(XAttrSetFlag.CREATE); } if ((flag & XAttrSetFlagProto.XATTR_REPLACE_VALUE) == XAttrSetFlagProto.XATTR_REPLACE_VALUE) { result.add(XAttrSetFlag.REPLACE); } return result; } public static XAttr convertXAttr(XAttrProto a) { XAttr.Builder builder = new XAttr.Builder(); builder.setNameSpace(convert(a.getNamespace())); if (a.hasName()) { builder.setName(a.getName()); } if (a.hasValue()) { builder.setValue(a.getValue().toByteArray()); } return builder.build(); } public static List<XAttr> convertXAttrs(List<XAttrProto> xAttrSpec) { ArrayList<XAttr> xAttrs = Lists.newArrayListWithCapacity(xAttrSpec.size()); for (XAttrProto a : xAttrSpec) { XAttr.Builder builder = new XAttr.Builder(); builder.setNameSpace(convert(a.getNamespace())); if (a.hasName()) { builder.setName(a.getName()); } if (a.hasValue()) { builder.setValue(a.getValue().toByteArray()); } xAttrs.add(builder.build()); } return xAttrs; } public static List<XAttr> convert(GetXAttrsResponseProto a) { List<XAttrProto> xAttrs = a.getXAttrsList(); return convertXAttrs(xAttrs); } public static GetXAttrsResponseProto convertXAttrsResponse( List<XAttr> xAttrs) { GetXAttrsResponseProto.Builder builder = GetXAttrsResponseProto .newBuilder(); if (xAttrs != null) { builder.addAllXAttrs(convertXAttrProto(xAttrs)); } return builder.build(); } public static List<XAttr> convert(ListXAttrsResponseProto a) { final List<XAttrProto> xAttrs = a.getXAttrsList(); return convertXAttrs(xAttrs); } public static ListXAttrsResponseProto convertListXAttrsResponse( List<XAttr> names) { ListXAttrsResponseProto.Builder builder = ListXAttrsResponseProto.newBuilder(); if (names != null) { builder.addAllXAttrs(convertXAttrProto(names)); } return builder.build(); } public static ShortCircuitShmSlotProto convert(SlotId slotId) { return ShortCircuitShmSlotProto.newBuilder(). setShmId(convert(slotId.getShmId())). setSlotIdx(slotId.getSlotIdx()). build(); } public static ShortCircuitShmIdProto convert(ShmId shmId) { return ShortCircuitShmIdProto.newBuilder(). setHi(shmId.getHi()). setLo(shmId.getLo()). build(); } public static SlotId convert(ShortCircuitShmSlotProto slotId) { return new SlotId(PBHelper.convert(slotId.getShmId()), slotId.getSlotIdx()); } public static ShmId convert(ShortCircuitShmIdProto shmId) { return new ShmId(shmId.getHi(), shmId.getLo()); } }
tseen/Federated-HDFS
tseenliu/FedHDFS-hadoop-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
Java
apache-2.0
85,903
/* * Copyright 2015 LG CNS. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package scouter.client.maria.views; import java.util.List; import org.csstudio.swt.xygraph.dataprovider.CircularBufferDataProvider; import org.csstudio.swt.xygraph.dataprovider.Sample; import org.csstudio.swt.xygraph.figures.Trace; import org.csstudio.swt.xygraph.figures.Trace.PointStyle; import org.csstudio.swt.xygraph.figures.Trace.TraceType; import org.csstudio.swt.xygraph.figures.XYGraph; import org.eclipse.draw2d.FigureCanvas; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ControlEvent; import org.eclipse.swt.events.ControlListener; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.ui.IViewSite; import org.eclipse.ui.PartInitException; import org.eclipse.ui.part.ViewPart; import scouter.client.Images; import scouter.client.listeners.RangeMouseListener; import scouter.client.maria.actions.OpenDigestTableAction; import scouter.client.model.AgentModelThread; import scouter.client.model.RefreshThread; import scouter.client.model.RefreshThread.Refreshable; import scouter.client.net.TcpProxy; import scouter.client.preferences.PManager; import scouter.client.preferences.PreferenceConstants; import scouter.client.server.Server; import scouter.client.server.ServerManager; import scouter.client.util.ChartUtil; import scouter.client.util.ColorUtil; import scouter.client.util.ConsoleProxy; import scouter.client.util.ExUtil; import scouter.client.util.ScouterUtil; import scouter.client.util.TimeUtil; import scouter.lang.counters.CounterConstants; import scouter.lang.pack.MapPack; import scouter.lang.pack.Pack; import scouter.lang.value.ListValue; import scouter.net.RequestCmd; import scouter.util.CastUtil; import scouter.util.DateUtil; public class DbRealtimeTotalResponseView extends ViewPart implements Refreshable { public static final String ID = DbRealtimeTotalResponseView.class.getName(); int serverId; RefreshThread thread; Trace avgTrace; static long TIME_RANGE = DateUtil.MILLIS_PER_FIVE_MINUTE; static int REFRESH_INTERVAL = (int) (DateUtil.MILLIS_PER_SECOND * 2); FigureCanvas canvas; XYGraph xyGraph; public void init(IViewSite site) throws PartInitException { super.init(site); String secId = site.getSecondaryId(); serverId = CastUtil.cint(secId); } public void createPartControl(Composite parent) { Server server = ServerManager.getInstance().getServer(serverId); this.setPartName("ResponseTime[" + server.getName() + "]"); GridLayout layout = new GridLayout(1, true); layout.marginHeight = 5; layout.marginWidth = 5; parent.setLayout(layout); parent.setBackground(ColorUtil.getInstance().getColor(SWT.COLOR_WHITE)); parent.setBackgroundMode(SWT.INHERIT_FORCE); canvas = new FigureCanvas(parent); canvas.setLayoutData(new GridData(GridData.FILL_BOTH)); canvas.setScrollBarVisibility(FigureCanvas.NEVER); canvas.addControlListener(new ControlListener() { public void controlMoved(ControlEvent arg0) { } public void controlResized(ControlEvent arg0) { Rectangle r = canvas.getClientArea(); xyGraph.setSize(r.width, r.height); } }); xyGraph = new XYGraph(); xyGraph.setShowLegend(true); xyGraph.setShowTitle(false); canvas.setContents(xyGraph); xyGraph.primaryXAxis.setDateEnabled(true); xyGraph.primaryXAxis.setShowMajorGrid(true); xyGraph.primaryYAxis.setAutoScale(true); xyGraph.primaryYAxis.setShowMajorGrid(true); xyGraph.primaryXAxis.setFormatPattern("HH:mm:ss"); xyGraph.primaryYAxis.setFormatPattern("#,##0"); xyGraph.primaryXAxis.setTitle(""); xyGraph.primaryYAxis.setTitle(""); xyGraph.primaryYAxis.addMouseListener(new RangeMouseListener(getViewSite().getShell(), xyGraph.primaryYAxis)); CircularBufferDataProvider avgProvider = new CircularBufferDataProvider(true); avgProvider.setBufferSize(((int)(TIME_RANGE / REFRESH_INTERVAL) + 1)); avgProvider.setCurrentXDataArray(new double[] {}); avgProvider.setCurrentYDataArray(new double[] {}); avgTrace = new Trace("Response Time(ms)", xyGraph.primaryXAxis, xyGraph.primaryYAxis, avgProvider); avgTrace.setPointStyle(PointStyle.NONE); avgTrace.setTraceType(TraceType.AREA); avgTrace.setLineWidth(PManager.getInstance().getInt(PreferenceConstants.P_CHART_LINE_WIDTH)); avgTrace.setTraceColor(ColorUtil.getInstance().getColor(SWT.COLOR_DARK_GREEN)); xyGraph.addTrace(avgTrace); ScouterUtil.addHorizontalRangeListener(xyGraph.getPlotArea(), new OpenDigestTableAction(serverId), false); thread = new RefreshThread(this, REFRESH_INTERVAL); thread.start(); } public void setFocus() { } @Override public void dispose() { super.dispose(); if (this.thread != null) { this.thread.shutdown(); } } static double PICO_TO_MS = Math.pow(10, -9); public void refresh() { TcpProxy tcp = TcpProxy.getTcpProxy(serverId); List<Pack> list = null; try { MapPack param = new MapPack(); ListValue objHashLv = AgentModelThread.getInstance().getLiveObjHashLV(serverId, CounterConstants.MARIA_PLUGIN); if (objHashLv.size() > 0) { param.put("objHash", objHashLv); list = tcp.process(RequestCmd.DB_REALTIME_RESPONSE_TIME, param); } } catch (Exception e) { ConsoleProxy.errorSafe(e.toString()); } finally { TcpProxy.putTcpProxy(tcp); } if (list == null || list.size() == 0) { ExUtil.exec(canvas, new Runnable() { public void run() { setTitleImage(Images.inactive); long now = TimeUtil.getCurrentTime(serverId); long stime = now - TIME_RANGE; xyGraph.primaryXAxis.setRange(stime, now + 1); } }); } else { double sum = 0.0d; for (Pack p : list) { MapPack m = (MapPack) p; sum += m.getFloat("value"); } final double avg = sum * PICO_TO_MS / list.size(); ExUtil.exec(canvas, new Runnable() { public void run() { setTitleImage(Images.active); long now = TimeUtil.getCurrentTime(serverId); long stime = now - TIME_RANGE; xyGraph.primaryXAxis.setRange(stime, now + 1); ((CircularBufferDataProvider)avgTrace.getDataProvider()).addSample(new Sample(now, avg)); double max = ChartUtil.getMax(((CircularBufferDataProvider)avgTrace.getDataProvider()).iterator()); xyGraph.primaryYAxis.setRange(0, max); } }); } } }
sncap/scouter
scouter.client/src/scouter/client/maria/views/DbRealtimeTotalResponseView.java
Java
apache-2.0
7,143
/* * (C) Copyright 2015-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributors: * ohun@live.cn (夜色) */ package com.mpush.tools; import com.mpush.tools.thread.NamedThreadFactory; import io.netty.channel.EventLoopGroup; import io.netty.channel.SingleThreadEventLoop; import io.netty.util.concurrent.EventExecutor; import io.netty.util.concurrent.ThreadProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.*; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ThreadPoolExecutor; import java.util.regex.Pattern; /** * Created by ohun on 2015/12/25. * * @author ohun@live.cn */ public final class Utils { private static final Logger LOGGER = LoggerFactory.getLogger(Utils.class); private static String LOCAL_IP; private static final Pattern LOCAL_IP_PATTERN = Pattern.compile("127(\\.\\d{1,3}){3}$"); private static String EXTRANET_IP; private static final NamedThreadFactory NAMED_THREAD_FACTORY = new NamedThreadFactory(); public static Thread newThread(String name, Runnable target) { return NAMED_THREAD_FACTORY.newThread(name, target); } public static boolean isLocalHost(String host) { return host == null || host.length() == 0 || host.equalsIgnoreCase("localhost") || host.equals("0.0.0.0") || (LOCAL_IP_PATTERN.matcher(host).matches()); } public static String lookupLocalIp() { if (LOCAL_IP == null) { LOCAL_IP = getInetAddress(true); } return LOCAL_IP; } public static NetworkInterface getLocalNetworkInterface() { Enumeration<NetworkInterface> interfaces; try { interfaces = NetworkInterface.getNetworkInterfaces(); } catch (SocketException e) { throw new RuntimeException("NetworkInterface not found", e); } while (interfaces.hasMoreElements()) { NetworkInterface networkInterface = interfaces.nextElement(); Enumeration<InetAddress> addresses = networkInterface.getInetAddresses(); while (addresses.hasMoreElements()) { InetAddress address = addresses.nextElement(); if (address.isLoopbackAddress()) continue; if (address.getHostAddress().contains(":")) continue; if (address.isSiteLocalAddress()) return networkInterface; } } throw new RuntimeException("NetworkInterface not found"); } public static InetAddress getInetAddress(String host) { try { return InetAddress.getByName(host); } catch (UnknownHostException e) { throw new IllegalArgumentException("UnknownHost " + host, e); } } /** * 只获取第一块网卡绑定的ip地址 * * @param getLocal 局域网IP * @return ip */ public static String getInetAddress(boolean getLocal) { try { Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces(); while (interfaces.hasMoreElements()) { Enumeration<InetAddress> addresses = interfaces.nextElement().getInetAddresses(); while (addresses.hasMoreElements()) { InetAddress address = addresses.nextElement(); if (address.isLoopbackAddress()) continue; if (address.getHostAddress().contains(":")) continue; if (getLocal) { if (address.isSiteLocalAddress()) { return address.getHostAddress(); } } else { if (!address.isSiteLocalAddress() && !address.isLoopbackAddress()) { return address.getHostAddress(); } } } } LOGGER.debug("getInetAddress is null, getLocal={}", getLocal); return getLocal ? "127.0.0.1" : null; } catch (Throwable e) { LOGGER.error("getInetAddress exception", e); return getLocal ? "127.0.0.1" : null; } } public static String lookupExtranetIp() { if (EXTRANET_IP == null) { EXTRANET_IP = getInetAddress(false); } return EXTRANET_IP; } public static String headerToString(Map<String, String> headers) { if (headers != null && headers.size() > 0) { StringBuilder sb = new StringBuilder(headers.size() * 64); for (Map.Entry<String, String> entry : headers.entrySet()) { sb.append(entry.getKey()) .append(':') .append(entry.getValue()).append('\n'); } return sb.toString(); } return null; } public static Map<String, String> headerFromString(String headersString) { if (headersString == null) return null; Map<String, String> headers = new HashMap<>(); int L = headersString.length(); String name, value = null; for (int i = 0, start = 0; i < L; i++) { char c = headersString.charAt(i); if (c != '\n') continue; if (start >= L - 1) break; String header = headersString.substring(start, i); start = i + 1; int index = header.indexOf(':'); if (index <= 0) continue; name = header.substring(0, index); if (index < header.length() - 1) { value = header.substring(index + 1); } headers.put(name, value); } return headers; } public static boolean checkHealth(String ip, int port) { try { Socket socket = new Socket(); socket.connect(new InetSocketAddress(ip, port), 1000); socket.close(); return true; } catch (IOException e) { return false; } } public static Map<String, Object> getPoolInfo(ThreadPoolExecutor executor) { Map<String, Object> info = new HashMap<>(5); info.put("corePoolSize", executor.getCorePoolSize()); info.put("maxPoolSize", executor.getMaximumPoolSize()); info.put("activeCount(workingThread)", executor.getActiveCount()); info.put("poolSize(workThread)", executor.getPoolSize()); info.put("queueSize(blockedTask)", executor.getQueue().size()); return info; } public static Map<String, Object> getPoolInfo(EventLoopGroup executors) { Map<String, Object> info = new HashMap<>(3); int poolSize = 0, queueSize = 0, activeCount = 0; for (EventExecutor e : executors) { poolSize++; if (e instanceof SingleThreadEventLoop) { SingleThreadEventLoop executor = (SingleThreadEventLoop) e; queueSize += executor.pendingTasks(); ThreadProperties tp = executor.threadProperties(); if (tp.state() == Thread.State.RUNNABLE) { activeCount++; } } } info.put("poolSize(workThread)", poolSize); info.put("activeCount(workingThread)", activeCount); info.put("queueSize(blockedTask)", queueSize); return info; } }
hongjun117/mpush
mpush-tools/src/main/java/com/mpush/tools/Utils.java
Java
apache-2.0
8,007
package org.vaadin.addons.excelexporter.utils; import java.awt.Color; import org.apache.poi.ss.usermodel.BorderStyle; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.xssf.usermodel.XSSFCellStyle; import org.apache.poi.xssf.usermodel.XSSFColor; import org.apache.poi.xssf.usermodel.extensions.XSSFCellBorder.BorderSide; import com.vaadin.shared.ui.grid.GridStaticCellType; import com.vaadin.ui.components.grid.FooterCell; import com.vaadin.ui.components.grid.HeaderCell; public class ExcelStyleUtil { private ExcelStyleUtil() { } /** * Sets the borders. * * @param headerCellStyle * the header cell style * @param left * the left * @param right * the right * @param top * the top * @param bottom * the bottom * @param color * the color * @return the XSSF cell style */ public static XSSFCellStyle setBorders(final XSSFCellStyle headerCellStyle, final Boolean left, final Boolean right, final Boolean top, final Boolean bottom, final Color color) { if (bottom) { headerCellStyle.setBorderBottom(BorderStyle.THIN); headerCellStyle.setBorderColor(BorderSide.BOTTOM, new XSSFColor(color)); } if (top) { headerCellStyle.setBorderTop(BorderStyle.THIN); headerCellStyle.setBorderColor(BorderSide.TOP, new XSSFColor(color)); } if (left) { headerCellStyle.setBorderLeft(BorderStyle.THIN); headerCellStyle.setBorderColor(BorderSide.LEFT, new XSSFColor(color)); } if (right) { headerCellStyle.setBorderRight(BorderStyle.THIN); headerCellStyle.setBorderColor(BorderSide.RIGHT, new XSSFColor(color)); } return headerCellStyle; } /** * Adds the generic grid header row configured in the header configs * * @param gridHeaderCell * the grid header cell * @param myCell * the my cell */ public static void addGenericGridHeaderRow(final HeaderCell gridHeaderCell, final Cell myCell) { if (gridHeaderCell.getCellType() .equals(GridStaticCellType.TEXT)) { myCell.setCellValue(gridHeaderCell.getText()); } else if (gridHeaderCell.getCellType() .equals(GridStaticCellType.HTML)) { myCell.setCellValue(gridHeaderCell.getHtml()); } else if (gridHeaderCell.getCellType() .equals(GridStaticCellType.WIDGET)) { myCell.setCellValue(gridHeaderCell.getComponent() .toString()); } } /** * Adds the generic grid footer row configured in the footer configs * * @param gridHeaderCell * the grid header cell * @param myCell * the my cell */ public static void addGenericGridFooterRow(final FooterCell gridHeaderCell, final Cell myCell) { if (gridHeaderCell.getCellType() .equals(GridStaticCellType.TEXT)) { myCell.setCellValue(gridHeaderCell.getText()); } else if (gridHeaderCell.getCellType() .equals(GridStaticCellType.HTML)) { myCell.setCellValue(gridHeaderCell.getHtml()); } else if (gridHeaderCell.getCellType() .equals(GridStaticCellType.WIDGET)) { myCell.setCellValue(gridHeaderCell.getComponent() .toString()); } } }
bonprix/vaadin-excel-exporter
vaadin-excel-exporter/src/main/java/org/vaadin/addons/excelexporter/utils/ExcelStyleUtil.java
Java
apache-2.0
3,213
/* Copyright Adaptris Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.adaptris.core.services.conditional.operator; import com.adaptris.annotation.AdapterComponent; import com.adaptris.annotation.ComponentProfile; import com.adaptris.core.AdaptrisMessage; import com.adaptris.core.services.conditional.Condition; import com.adaptris.core.services.conditional.Operator; import com.thoughtworks.xstream.annotations.XStreamAlias; /** * <p> * This {@link Operator} simply tests a single value is not null. * </p> * <p> * The value used in the not-null test is the {@link Condition} that this {@link Operator} is * configured for; which could be the message payload or a metadata item for example. <br/> * </p> * * @config not-null * @author amcgrath * */ @XStreamAlias("not-null") @AdapterComponent @ComponentProfile(summary = "Tests that a value exists (is not null).", tag = "conditional,operator") public class NotNull implements Operator { @Override public boolean apply(AdaptrisMessage message, String object) { return object != null; } public String toString() { return "is not null"; } }
adaptris/interlok
interlok-core/src/main/java/com/adaptris/core/services/conditional/operator/NotNull.java
Java
apache-2.0
1,661
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence.db.wal.crc; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.configuration.WALMode; import org.apache.ignite.internal.pagemem.wal.IgniteWriteAheadLogManager; import org.apache.ignite.internal.pagemem.wal.WALIterator; import org.jetbrains.annotations.NotNull; /** * */ public class IgniteReplayWalIteratorInvalidCrcTest extends IgniteAbstractWalIteratorInvalidCrcTest { /** {@inheritDoc} */ @NotNull @Override protected WALMode getWalMode() { return WALMode.LOG_ONLY; } /** {@inheritDoc} */ @Override protected WALIterator getWalIterator( IgniteWriteAheadLogManager walMgr, boolean ignoreArchiveDir ) throws IgniteCheckedException { if (ignoreArchiveDir) throw new UnsupportedOperationException( "Cannot invoke \"getWalIterator\" with true \"ignoreArchiveDir\" parameter value." ); else return walMgr.replay(null); } /** * {@inheritDoc} * Case is not relevant to the replay iterator. */ @Override public void testNotTailCorruptedPtr() { } }
amirakhmedov/ignite
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/db/wal/crc/IgniteReplayWalIteratorInvalidCrcTest.java
Java
apache-2.0
1,986
/* * #! * % * Copyright (C) 2014 - 2015 Humboldt-Universität zu Berlin * % * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #_ */ package storm.lrb.bolt; import java.io.BufferedWriter; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.util.HashMap; import java.util.Map; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import storm.lrb.tools.TupleHelpers; import backtype.storm.Config; import backtype.storm.task.OutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.topology.OutputFieldsDeclarer; import backtype.storm.topology.base.BaseRichBolt; import backtype.storm.tuple.Tuple; /** * This bolt bufferwrites all recieved tuples to the given filename in the STORM_LOCAL_DIR. The buffersize can be * adjusted with the constructor. (flushes every two minutes, this can be adjusted by changing the * TOPOLOGY_TICK_TUPLE_FREQ_SECS) */ public class FileWriterBolt extends BaseRichBolt { private static final long serialVersionUID = 1L; private final static Logger LOGGER = LoggerFactory.getLogger(FileWriterBolt.class); private PrintWriter printwriter; private Writer filewriter; private Writer bufferedwriter; private OutputCollector collector; private final String filename; private final boolean local; private int bufferfactor = 2; public FileWriterBolt(String filename, int bufferfactor) { this.filename = filename; this.local = false; this.bufferfactor = bufferfactor; } public FileWriterBolt(String filename, boolean local) { this.filename = filename; this.local = local; } /** * set the bufferfactor higher if a the rate of emitting tuples is expected to be high * * @param filename * @param bufferfactor * @param local * activate some buffer tweaks when the bolt runnings locally */ public FileWriterBolt(String filename, int bufferfactor, boolean local) { this.filename = filename; this.local = local; this.bufferfactor = bufferfactor; } @Override public void prepare(@SuppressWarnings("rawtypes") Map conf, TopologyContext topologyContext, OutputCollector outputCollector) { this.collector = outputCollector; this.printwriter = null; String path = (String)conf.get(Config.STORM_LOCAL_DIR); DateTime dt = new DateTime(); String b = dt.toString("hh-mm-ss"); String fileuri = path + "/" + this.filename + "_" + b + ".out"; LOGGER.debug("Writing to file '%s'", fileuri); try { this.filewriter = new FileWriter(fileuri); this.bufferedwriter = new BufferedWriter(this.filewriter, this.bufferfactor * 1024); this.printwriter = new PrintWriter(this.bufferedwriter); } catch(FileNotFoundException e) { throw new RuntimeException(e); } catch(UnsupportedEncodingException e) { throw new RuntimeException(e); } catch(IOException e) { throw new RuntimeException(e); } } @Override public void execute(Tuple tuple) { if(TupleHelpers.isTickTuple(tuple)) { this.printwriter.flush(); } else { Object value = tuple.getValue(0); this.printwriter.println(value); this.collector.ack(tuple); if(this.local) { this.printwriter.flush(); } } } @Override public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {} @Override public Map<String, Object> getComponentConfiguration() { Map<String, Object> conf = new HashMap<String, Object>(); conf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 70); return conf; } @Override public void cleanup() { this.printwriter.flush(); this.printwriter.close(); try { this.bufferedwriter.close(); this.filewriter.close(); } catch(IOException e) { throw new RuntimeException(e); } super.cleanup(); } }
krichter722/aeolus
queries/lrb/src/main/java/storm/lrb/bolt/FileWriterBolt.java
Java
apache-2.0
4,420
/* * Copyright 2012 Athens Team * * This file to you under the Apache License, version 2.0 (the "License"); you * may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.eincs.athens.message; import java.io.Serializable; import com.eincs.pantheon.message.PanteonRequest; import com.eincs.pantheon.message.attach.PanteonCookies; import com.eincs.pantheon.message.attach.PanteonParams; /** * @author Jung-Haeng Lee */ public class AthensRequest implements Serializable { private static final long serialVersionUID = -5509385458282534203L; public static AthensRequest create(long reqSeq, AnalyzeTargetKey targetKey, PanteonRequest request) { AthensRequest result = new AthensRequest(); result.setRequestSeq(reqSeq); result.setTargetKey(targetKey); result.setParams(request.getParams()); result.setCookies(request.getCookies()); result.setTags(AthensTags.create()); return result; } private long requestSeq; private AnalyzeTargetKey targetKey; private PanteonParams params; private PanteonCookies cookies; private AthensTags tags; public long getRequestSeq() { return requestSeq; } public void setRequestSeq(long requestSeq) { this.requestSeq = requestSeq; } public AnalyzeTargetKey getTargetKey() { return targetKey; } public void setTargetKey(AnalyzeTargetKey targetKey) { this.targetKey = targetKey; } public PanteonParams getParams() { return params; } public void setParams(PanteonParams params) { this.params = params; } public PanteonCookies getCookies() { return cookies; } public void setCookies(PanteonCookies cookies) { this.cookies = cookies; } public AthensTags getTags() { return tags; } public void setTags(AthensTags tags) { this.tags = tags; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("["); sb.append("seq:").append(requestSeq).append(", "); sb.append("key:").append(targetKey).append(", "); sb.append("params:").append(params).append(", "); sb.append("cookies:").append(cookies).append(", "); sb.append("tags:").append(tags).append(", "); sb.append("]"); return sb.toString(); } }
athens-team/athena
athens-common/src/com/eincs/athens/message/AthensRequest.java
Java
apache-2.0
2,602
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.client; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwtjsonrpc.client.HostPageCache; import com.google.gwtjsonrpc.client.RemoteJsonService; interface HostPageDataService extends RemoteJsonService { @HostPageCache(name = "gerrit_hostpagedata_obj", once = true) void load(AsyncCallback<HostPageData> callback); }
GerritCodeReview/gerrit-attic
src/main/java/com/google/gerrit/client/HostPageDataService.java
Java
apache-2.0
979
/* * Copyright 2016 Johannes Donath <johannesd@torchmind.com> * and other copyright owners as documented in the project's IP log. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.basinmc.maven.plugins.minecraft.launcher; import java.util.NoSuchElementException; import javax.annotation.Nonnull; /** * Provides a list of known (and technically unknown) types of releases as returned by the launcher * metadata API. * * @author <a href="mailto:johannesd@torchmind.com">Johannes Donath</a> */ public enum VersionType { /** * Represents an outdated alpha version of the game which was released during the game's * development phase. */ OLD_ALPHA, /** * Represents an outdated beta version of the game which was released during the game's * development phase. */ OLD_BETA, /** * Represents an untested version of the game which will be promoted to a {@link #RELEASE} * when all of its features are implemented and its contents are sufficiently stable. */ SNAPSHOT, /** * Represents a modern (semi supported) version of the game. */ RELEASE, /** * A release type which has not yet classified within our client implementation. */ UNKNOWN; /** * Retrieves a version type based on a string representation. */ @Nonnull public static VersionType fromString(@Nonnull String version) { try { return valueOf(version.toUpperCase()); } catch (NoSuchElementException ex) { return UNKNOWN; } } }
BasinMC/minecraft-maven-plugin
src/main/java/org/basinmc/maven/plugins/minecraft/launcher/VersionType.java
Java
apache-2.0
2,095
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.juli; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; import java.util.LinkedHashMap; import java.util.Map; import java.util.logging.Formatter; import java.util.logging.LogRecord; /** * Provides same information as default log format but on a single line to make * it easier to grep the logs. The only exception is stacktraces which are * always preceded by whitespace to make it simple to skip them. */ /* * Date processing based on AccessLogValve. */ public class OneLineFormatter extends Formatter { private static final String ST_SEP = System.lineSeparator() + " "; private static final String UNKONWN_THREAD_NAME = "Unknown thread with ID "; private static final Object threadMxBeanLock = new Object(); private static volatile ThreadMXBean threadMxBean = null; private static final int THREAD_NAME_CACHE_SIZE = 10000; private static ThreadLocal<LinkedHashMap<Integer,String>> threadNameCache = new ThreadLocal<LinkedHashMap<Integer,String>>() { @Override protected LinkedHashMap<Integer,String> initialValue() { return new LinkedHashMap<Integer,String>() { private static final long serialVersionUID = 1L; @Override protected boolean removeEldestEntry( Entry<Integer, String> eldest) { return (size() > THREAD_NAME_CACHE_SIZE); } }; } }; /* Timestamp format */ private static final String timeFormat = "dd-MMM-yyyy HH:mm:ss"; /** * The size of our global date format cache */ private static final int globalCacheSize = 30; /** * The size of our thread local date format cache */ private static final int localCacheSize = 5; /** * Global date format cache. */ private static final DateFormatCache globalDateCache = new DateFormatCache(globalCacheSize, timeFormat, null); /** * Thread local date format cache. */ private static final ThreadLocal<DateFormatCache> localDateCache = new ThreadLocal<DateFormatCache>() { @Override protected DateFormatCache initialValue() { return new DateFormatCache(localCacheSize, timeFormat, globalDateCache); } }; @Override public String format(LogRecord record) { StringBuilder sb = new StringBuilder(); // Timestamp addTimestamp(sb, record.getMillis()); // Severity sb.append(' '); sb.append(record.getLevel()); // Thread sb.append(' '); sb.append('['); if (Thread.currentThread() instanceof AsyncFileHandler.LoggerThread) { // If using the async handler can't get the thread name from the // current thread. sb.append(getThreadName(record.getThreadID())); } else { sb.append(Thread.currentThread().getName()); } sb.append(']'); // Source sb.append(' '); sb.append(record.getSourceClassName()); sb.append('.'); sb.append(record.getSourceMethodName()); // Message sb.append(' '); sb.append(formatMessage(record)); // Stack trace if (record.getThrown() != null) { sb.append(ST_SEP); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); record.getThrown().printStackTrace(pw); pw.close(); sb.append(sw.getBuffer()); } // New line for next record sb.append(System.lineSeparator()); return sb.toString(); } protected void addTimestamp(StringBuilder buf, long timestamp) { buf.append(localDateCache.get().getFormat(timestamp)); long frac = timestamp % 1000; buf.append('.'); if (frac < 100) { if (frac < 10) { buf.append('0'); buf.append('0'); } else { buf.append('0'); } } buf.append(frac); } /** * LogRecord has threadID but no thread name. * LogRecord uses an int for thread ID but thread IDs are longs. * If the real thread ID > (Integer.MAXVALUE / 2) LogRecord uses it's own * ID in an effort to avoid clashes due to overflow. * <p> * Words fail me to describe what I think of the design decision to use an * int in LogRecord for a long value and the resulting mess that follows. */ private static String getThreadName(int logRecordThreadId) { Map<Integer,String> cache = threadNameCache.get(); String result = null; if (logRecordThreadId > (Integer.MAX_VALUE / 2)) { result = cache.get(Integer.valueOf(logRecordThreadId)); } if (result != null) { return result; } if (logRecordThreadId > Integer.MAX_VALUE / 2) { result = UNKONWN_THREAD_NAME + logRecordThreadId; } else { // Double checked locking OK as threadMxBean is volatile if (threadMxBean == null) { synchronized (threadMxBeanLock) { if (threadMxBean == null) { threadMxBean = ManagementFactory.getThreadMXBean(); } } } ThreadInfo threadInfo = threadMxBean.getThreadInfo(logRecordThreadId); if (threadInfo == null) { return Long.toString(logRecordThreadId); } result = threadInfo.getThreadName(); } cache.put(Integer.valueOf(logRecordThreadId), result); return result; } }
nkasvosve/beyondj
apache-jsp/src/main/java/org/apache/juli/OneLineFormatter.java
Java
apache-2.0
6,909
package edu.cs4730.opengl30cube; import android.content.Context; import android.opengl.GLSurfaceView; import android.view.MotionEvent; /* * simple extention of the GLsurfaceview. basically setup to use opengl 3.0 * and set some configs. This would be where the touch listener is setup to do something. * * It also declares and sets the render. */ public class myGlSurfaceView extends GLSurfaceView { myRenderer myRender; public myGlSurfaceView(Context context) { super(context); // Create an OpenGL ES 3.0 context. setEGLContextClientVersion(3); super.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Set the Renderer for drawing on the GLSurfaceView myRender = new myRenderer(context); setRenderer(myRender); // Render the view only when there is a change in the drawing data setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY); } //private final float TOUCH_SCALE_FACTOR = 180.0f / 320; private static final float TOUCH_SCALE_FACTOR = 0.015f; private float mPreviousX; private float mPreviousY; @Override public boolean onTouchEvent(MotionEvent e) { // MotionEvent reports input details from the touch screen // and other input controls. In this case, you are only // interested in events where the touch position changed. float x = e.getX(); float y = e.getY(); switch (e.getAction()) { case MotionEvent.ACTION_MOVE: float dx = x - mPreviousX; //subtract, so the cube moves the same direction as your finger. //with plus it moves the opposite direction. myRender.setX(myRender.getX() - (dx * TOUCH_SCALE_FACTOR)); float dy = y - mPreviousY; myRender.setY(myRender.getY() - (dy * TOUCH_SCALE_FACTOR)); } mPreviousX = x; mPreviousY = y; return true; } }
JimSeker/opengl
OpenGL30Cube/app/src/main/java/edu/cs4730/opengl30cube/myGlSurfaceView.java
Java
apache-2.0
1,974
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Created on Feb 8, 2005 * */ package ghidra.app.plugin.processors.sleigh.expression; import ghidra.app.plugin.processors.sleigh.ParserWalker; import ghidra.app.plugin.processors.sleigh.SleighLanguage; import ghidra.program.model.address.Address; import ghidra.program.model.mem.MemoryAccessException; import ghidra.xml.XmlPullParser; /** * * * The integer offset of the address following the current instruction */ public class EndInstructionValue extends PatternValue { private static final int HASH = "[inst_next]".hashCode(); @Override public int hashCode() { return HASH; } @Override public boolean equals(Object obj) { return obj instanceof EndInstructionValue; } /* (non-Javadoc) * @see ghidra.app.plugin.processors.sleigh.expression.PatternValue#minValue() */ @Override public long minValue() { return 0; } /* (non-Javadoc) * @see ghidra.app.plugin.processors.sleigh.expression.PatternValue#maxValue() */ @Override public long maxValue() { return 0; } /* (non-Javadoc) * @see ghidra.app.plugin.processors.sleigh.expression.PatternExpression#getValue(ghidra.app.plugin.processors.sleigh.ParserWalker) */ @Override public long getValue(ParserWalker walker) throws MemoryAccessException { Address addr = walker.getNaddr(); return addr.getAddressableWordOffset(); } /* (non-Javadoc) * @see ghidra.app.plugin.processors.sleigh.PatternExpression#restoreXml(org.jdom.Element) */ @Override public void restoreXml(XmlPullParser parser, SleighLanguage lang) { parser.discardSubTree("end_exp"); // Nothing to do } @Override public String toString() { return "[inst_next]"; } }
NationalSecurityAgency/ghidra
Ghidra/Framework/SoftwareModeling/src/main/java/ghidra/app/plugin/processors/sleigh/expression/EndInstructionValue.java
Java
apache-2.0
2,240
/* * Copyright 1999-2018 Alibaba Group Holding Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.csp.sentinel.adapter.dubbo; import org.apache.dubbo.common.constants.CommonConstants; import org.apache.dubbo.common.extension.Activate; import org.apache.dubbo.rpc.Filter; import org.apache.dubbo.rpc.Invocation; import org.apache.dubbo.rpc.Invoker; import org.apache.dubbo.rpc.Result; import org.apache.dubbo.rpc.RpcContext; import org.apache.dubbo.rpc.RpcException; import static org.apache.dubbo.common.constants.CommonConstants.CONSUMER; /** * Puts current consumer's application name in the attachment of each invocation. * * @author Eric Zhao */ @Activate(group = CONSUMER) public class DubboAppContextFilter implements Filter { @Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { String application = invoker.getUrl().getParameter(CommonConstants.APPLICATION_KEY); if (application != null) { RpcContext.getContext().setAttachment(DubboUtils.SENTINEL_DUBBO_APPLICATION_KEY, application); } return invoker.invoke(invocation); } }
alibaba/Sentinel
sentinel-adapter/sentinel-apache-dubbo-adapter/src/main/java/com/alibaba/csp/sentinel/adapter/dubbo/DubboAppContextFilter.java
Java
apache-2.0
1,679
package com.pengrad.telegrambot.passport; import java.io.Serializable; import java.util.Arrays; /** * Stas Parshin * 31 July 2018 */ public class SecureValue implements Serializable { private final static long serialVersionUID = 0L; private DataCredentials data; private FileCredentials front_side; private FileCredentials reverse_side; private FileCredentials selfie; private FileCredentials[] translation; private FileCredentials[] files; public DataCredentials data() { return data; } public FileCredentials frontSide() { return front_side; } public FileCredentials reverseSide() { return reverse_side; } public FileCredentials selfie() { return selfie; } public FileCredentials[] translation() { return translation; } public FileCredentials[] files() { return files; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SecureValue that = (SecureValue) o; if (data != null ? !data.equals(that.data) : that.data != null) return false; if (front_side != null ? !front_side.equals(that.front_side) : that.front_side != null) return false; if (reverse_side != null ? !reverse_side.equals(that.reverse_side) : that.reverse_side != null) return false; if (selfie != null ? !selfie.equals(that.selfie) : that.selfie != null) return false; // Probably incorrect - comparing Object[] arrays with Arrays.equals if (!Arrays.equals(translation, that.translation)) return false; // Probably incorrect - comparing Object[] arrays with Arrays.equals return Arrays.equals(files, that.files); } @Override public int hashCode() { int result = data != null ? data.hashCode() : 0; result = 31 * result + (front_side != null ? front_side.hashCode() : 0); result = 31 * result + (reverse_side != null ? reverse_side.hashCode() : 0); result = 31 * result + (selfie != null ? selfie.hashCode() : 0); result = 31 * result + Arrays.hashCode(translation); result = 31 * result + Arrays.hashCode(files); return result; } @Override public String toString() { return "SecureValue{" + "data=" + data + ", front_side=" + front_side + ", reverse_side=" + reverse_side + ", selfie=" + selfie + ", translation=" + Arrays.toString(translation) + ", files=" + Arrays.toString(files) + '}'; } }
pengrad/java-telegram-bot-api
library/src/main/java/com/pengrad/telegrambot/passport/SecureValue.java
Java
apache-2.0
2,682
package com.amp.coclogger.gui.autonexter; import java.awt.event.WindowEvent; import java.awt.event.WindowListener; import javax.swing.JFrame; import org.apache.log4j.Logger; import com.amp.coclogger.prefs.PrefName; public class NexterWindowListener implements WindowListener { private static final Logger logger = Logger.getLogger(NexterWindowListener.class); private JFrame frame; public NexterWindowListener(JFrame frame){ this.frame = frame; } @Override public void windowOpened(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowClosing(WindowEvent e) { logger.debug(String.format("Saving location %d,%d %dx%d", frame.getX(), frame.getY(), frame.getWidth(), frame.getHeight())); PrefName.AUTO_X.putInt(frame.getX()); PrefName.AUTO_Y.putInt(frame.getY()); PrefName.AUTO_WIDTH.putInt(frame.getWidth()); PrefName.AUTO_HEIGHT.putInt(frame.getHeight()); } @Override public void windowClosed(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowIconified(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowDeiconified(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowActivated(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowDeactivated(WindowEvent e) { // TODO Auto-generated method stub } }
apritchard/coc-logger
coclogger/src/main/java/com/amp/coclogger/gui/autonexter/NexterWindowListener.java
Java
apache-2.0
1,422
/* * Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.commons.hierarchy; import java.util.Map; import java.util.Map.Entry; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; /** * @param <T> value type * */ public class EntryValuePredicate<T> implements Predicate<Map.Entry<T, T>> { private final T value; public EntryValuePredicate(T value) { Preconditions.checkNotNull(value, "Value must not be null."); this.value = value; } @Override public boolean apply(Entry<T, T> input) { return value.equals(input.getValue()); } }
IHTSDO/snow-owl
commons/com.b2international.commons.base/src/com/b2international/commons/hierarchy/EntryValuePredicate.java
Java
apache-2.0
1,182
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.nd4j.linalg.api.ops; import org.nd4j.autodiff.samediff.SDVariable; import org.nd4j.autodiff.samediff.SameDiff; import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.shape.LongShapeDescriptor; import java.util.Collections; import java.util.List; public abstract class BaseTransformBoolOp extends BaseTransformOp implements TransformSameOp { public BaseTransformBoolOp(SameDiff sameDiff, SDVariable i_v1, SDVariable i_v2) { super(sameDiff, i_v1, i_v2); } public BaseTransformBoolOp(SameDiff sameDiff, SDVariable i_v1, SDVariable i_v2, boolean inPlace) { super(sameDiff, i_v1, i_v2, inPlace); } public BaseTransformBoolOp(SameDiff sameDiff, SDVariable i_v, boolean inPlace) { super(sameDiff, i_v, inPlace); } public BaseTransformBoolOp(SameDiff sameDiff) { super(sameDiff); } public BaseTransformBoolOp(SameDiff sameDiff, SDVariable i_v1, SDVariable i_v2, Object[] extraArgs) { super(sameDiff, i_v1, i_v2, extraArgs); } public BaseTransformBoolOp(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, Object[] extraArgs) { super(sameDiff, i_v, shape, inPlace, extraArgs); } public BaseTransformBoolOp(SameDiff sameDiff, SDVariable i_v, Object[] extraArgs) { super(sameDiff, i_v, extraArgs); } public BaseTransformBoolOp(INDArray x, INDArray z) { super(x, z); } public BaseTransformBoolOp(INDArray x, INDArray y, INDArray z) { super(x, y, z); } public BaseTransformBoolOp() { super(); } public BaseTransformBoolOp(INDArray x) { super(x); } @Override public Type getOpType() { return Type.TRANSFORM_BOOL; } @Override public Type opType() { return Type.TRANSFORM_BOOL; } @Override public DataType resultType() { return DataType.BOOL; } @Override public boolean validateDataTypes(boolean experimentalMode) { if (y() != null) Preconditions.checkArgument(x().dataType() == y().dataType(), "Op.X must be the same type as Op.Y: " + "x.datatype=%s, y.datatype=%s", x().dataType(), y.dataType()); if (z() != null) Preconditions.checkArgument(z().isB(),"Op.Z type must be bool: z.datatype=%s for op %s", z().dataType(), getClass()); return true; } @Override public List<LongShapeDescriptor> calculateOutputShape() { if(x == null) return Collections.emptyList(); return Collections.singletonList(LongShapeDescriptor.fromShape(x.shape(), DataType.BOOL)); } @Override public List<org.nd4j.linalg.api.buffer.DataType> calculateOutputDataTypes(List<org.nd4j.linalg.api.buffer.DataType> dataTypes){ //All bool tranform ops: always bool output type SDVariable[] args = args(); Preconditions.checkState(dataTypes != null && dataTypes.size() == args.length, "Expected exactly %s input datatype(s) for %s, got input %s", args.length, getClass(), dataTypes); return Collections.singletonList(DataType.BOOL); } }
RobAltena/deeplearning4j
nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformBoolOp.java
Java
apache-2.0
3,995
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ /* * WormsLike_Main.java * * Created on 26 juin 2011, 12:03:13 */ package wormslike; import java.io.File; import java.util.HashMap; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.UIManager; import javax.swing.UIManager.LookAndFeelInfo; import wormslike_game.Game_Frame; import wormslike_mapeditor.MapEditor_Main; import wormslike_menu.Menu_Main; import wormslike_menu.VideoSettings_Menu; import wormslike_sound.Game_Sound; /** * CLASSE PRINCIPALE * @author wallouf */ public class WormsLike_Main extends javax.swing.JFrame { public static int THEME = 0; public static Game_Sound sound; /** Creates new form WormsLike_Main */ //CREE LE NOM, LA TAILLE, LES OPTIONS, ET LE STYLE DE LA FENETRE public WormsLike_Main() { try { boolean find = false; for (LookAndFeelInfo info : UIManager.getInstalledLookAndFeels()) { if ("Metal".equals(info.getName())) { UIManager.setLookAndFeel(info.getClassName()); find = true; break; } } if (!find) { UIManager.setLookAndFeel("com.sun.java.swing.plaf.windows.WindowsLookAndFeel"); } } catch (Exception e) { } initComponents(); this.setResizable(false); this.setTitle("World Like v1.0 (0.9.4)"); this.setSize(1000, 650); this.setLocationRelativeTo(null); this.setContentPane(mM); sound = new Game_Sound(); Game_Sound.playMenu(); THEME = 0; } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 959, Short.MAX_VALUE) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 642, Short.MAX_VALUE) ); pack(); }// </editor-fold>//GEN-END:initComponents /** * @param args the command line arguments */ public static void main(String args[]) { java.awt.EventQueue.invokeLater(new Runnable() { @Override public void run() { new WormsLike_Main().setVisible(true); } }); } //PERMET DE GERER LE MENU ET LES DIFFERENTS SOUS MENUS public void changeView(String name) { //on reviens sur le menu if (name.equalsIgnoreCase("retour_video")) { mM.setSelection(0); this.setContentPane(mM); //on lance une partie deux joueurs humains et on cache le menu (plus de ressources) } else if (name.equalsIgnoreCase("two")) { int option = JOptionPane.showConfirmDialog(null, "Please select a map with .xml.gz extension", "Select your map", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE); if (option == JOptionPane.OK_OPTION) { jfc.setCurrentDirectory(new File("./bin/maps/")); if (jfc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION)//retourne une valeur si yes ou ok est choisit { HashMap<Integer, Integer> mapPos; if (jfc.getSelectedFile().getName().endsWith(".xml.gz")) { mapPos = (HashMap<Integer, Integer>) MapEditor_Main.decodeFromFileGz(jfc.getSelectedFile().getPath()); } else { System.out.println("Please select a good path."); return; } if (gF != null && gF.isVisible()) { gF.dispose(); } Game_Sound.stopMenu(); gF = new Game_Frame(this, mapPos, false, false); this.setVisible(false); } } //on lance une partie avec un joueur humain et une IA et on cache le menu } else if (name.equalsIgnoreCase("one")) { int option = JOptionPane.showConfirmDialog(null, "Please select a map with .xml.gz extension", "Select your map", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE); if (option == JOptionPane.OK_OPTION) { jfc.setCurrentDirectory(new File("./bin/maps/")); if (jfc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION)//retourne une valeur si yes ou ok est choisit { HashMap<Integer, Integer> mapPos; if (jfc.getSelectedFile().getName().endsWith(".xml.gz")) { mapPos = (HashMap<Integer, Integer>) MapEditor_Main.decodeFromFileGz(jfc.getSelectedFile().getPath()); } else { System.out.println("Please select a good path."); return; } if (gF != null && gF.isVisible()) { gF.dispose(); } Game_Sound.stopMenu(); gF = new Game_Frame(this, mapPos, false, true); this.setVisible(false); } } //on affiche le menu video } else if (name.equalsIgnoreCase("video settings")) { this.setContentPane(vSM); //on affiche le createur de map } else if (name.equalsIgnoreCase("map editor")) { if (mapEditor != null && mapEditor.isVisible()) { mapEditor.dispose(); } mapEditor = new MapEditor_Main(); mapEditor.setVisible(true); //on quitte le logiciel } else if (name.equalsIgnoreCase("exit")) { Game_Sound.stopMenu(); System.exit(0); } this.validate(); this.repaint(); } // Variables declaration - do not modify//GEN-BEGIN:variables // End of variables declaration//GEN-END:variables Menu_Main mM = new Menu_Main(this); VideoSettings_Menu vSM = new VideoSettings_Menu(this, THEME); Game_Frame gF; MapEditor_Main mapEditor; private JFileChooser jfc = new JFileChooser(""); }
wallouf/wormsLike
src/wormslike/WormsLike_Main.java
Java
apache-2.0
6,869
//Copyright 2016 Kelly Honsinger // //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. import java.awt.Point; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import com.treestar.flowjo.engine.FEML; import com.treestar.flowjo.engine.Query; import com.treestar.flowjo.engine.SimpleQueryCallback; import com.treestar.flowjo.engine.utility.EPluginHelper; import com.treestar.lib.FJPluginHelper; import com.treestar.lib.PluginHelper; import com.treestar.lib.fcs.ParameterUtil; import com.treestar.lib.fjml.FJML; import com.treestar.lib.fjml.types.DisplayType; import com.treestar.lib.xml.GatingML; import com.treestar.lib.xml.SElement; /** * Gating Plugin: Creates contour gates based on user specified percentage * @author kelly * 7/13/2016 */ public class GateProcessor { /** * Function takes List of lists of points and reorders list from lowest list size to highest list size * @param vals * @return List<List<Point>> */ public List<List<Point>> Reorder( List<List<Point>> vals) { ArrayList<List<Point>> newList= new ArrayList<List<Point>>(vals.size()); int temp=0; while (!vals.isEmpty()) { temp = 0; for (int j = 0; j < vals.size(); j++) { if ( vals.get(temp).size() > vals.get(j).size()) { temp = j; } } newList.add(vals.remove(temp)); } return newList; } /* */ /** * Function takes gate-set points, parent elemen, and parmeters. * It then constructs a Gating-ML element based on these points * and passes this object into the compute function. Compute function computes * population count inbetween gate set defined in XML and returns it as an integer. * This integer is then divides the parent population's count producing a percentage. * The percentage is compared to the parents and kept if it is reasonable close to it. * The function iterates through all gate sets and returns the best fitting one. * @param vals * @param fcmlElem * @param fXParm * @param fYParm * @return SElement */ public SElement SetUp(List<List<Point>> vals, SElement fcmlElem, String fXParm, String fYParm) { SElement gate = new SElement(GatingML.gatingML2); int retrievalIndex =0; SElement currentBest = new SElement(gate); double lowestDifference= 100; for(retrievalIndex =0; retrievalIndex <vals.size(); retrievalIndex++) { if (!vals.isEmpty()) { String sampleURI = EPluginHelper.getSampleURI(fcmlElem); SElement polyGate = new SElement(GatingML.PolygonGate); gate.addContent(polyGate); polyGate.setString(GatingML.id, GuiFrontEnd.getCellSampleName()); SElement dElem = new SElement(GatingML.dimension); polyGate.addContent(dElem); SElement tElem = new SElement(GatingML.FCS_DIMENSION); dElem.addContent(tElem); try{ tElem.setString(GatingML.NAME, fXParm.substring(0, fXParm.indexOf(" :"))); } catch(StringIndexOutOfBoundsException e) { tElem.setString(GatingML.NAME, fXParm); } dElem = new SElement(GatingML.dimension); polyGate.addContent(dElem); tElem = new SElement(GatingML.FCS_DIMENSION); dElem.addContent(tElem); try{ tElem.setString(GatingML.NAME, fYParm.substring(0, fYParm.indexOf(" :"))); } catch(StringIndexOutOfBoundsException e) { tElem.setString(GatingML.NAME, fYParm); } gate = Compute(gate, polyGate, vals, retrievalIndex, sampleURI); int populationCOunt = CompareResult(fcmlElem, gate); double popPercentage = (double)populationCOunt/PluginHelper.getNumExportedEvents(fcmlElem); popPercentage = Math.abs((popPercentage*100)-(double)GuiFrontEnd.getPercentageVal()); if (popPercentage <.5 ) { return gate; } if (popPercentage < lowestDifference) { lowestDifference = popPercentage; currentBest= new SElement(gate); } gate.removeContent(); } } return currentBest; } /** * Function constructs a Gating-ML SElement which contains points of all vertices on the perimeter. * @param gate * @param polyGate * @param vals * @param retrievalIndex * @param sampleURI * @return SElement */ public SElement Compute(SElement gate, SElement polyGate, List<List<Point>> vals, int retrievalIndex, String sampleURI){ for (Point pt : vals.get(retrievalIndex)) { double x=0; double y=0; try{ x= FJPluginHelper.channelToScale(sampleURI, GuiFrontEnd.getSelectX().substring(0, GuiFrontEnd.selectX.indexOf(" :")), pt.x, 256); } catch(StringIndexOutOfBoundsException e) { x = FJPluginHelper.channelToScale(sampleURI, GuiFrontEnd.getSelectX(), pt.x, 256); } try{ y= FJPluginHelper.channelToScale(sampleURI, GuiFrontEnd.getSelectY().substring(0, GuiFrontEnd.selectY.indexOf(" :")), 256-pt.y, 256); } catch(StringIndexOutOfBoundsException e) { y = FJPluginHelper.channelToScale(sampleURI, GuiFrontEnd.getSelectY(), 256-pt.y, 256); } SElement gv = new SElement(GatingML.vertex); polyGate.addContent(gv); SElement gc = new SElement(GatingML.coordinate); gv.addContent(gc); gc.setDouble(GatingML.value, x); gc = new SElement(GatingML.coordinate); gv.addContent(gc); gc.setDouble(GatingML.value, y); } return gate; } /** * Function constructs a XML SElement that is recognized by the flowjo engine as a request for a statistic. * This statistic is specifically the population gated around. This is returned in a call back type * and the value is extracted from it as an integer. * @param parentPop * @param gate * @return int */ public static int CompareResult(SElement parentPop, SElement gate) { try{ SElement polyCopy = new SElement(gate.getChild(GatingML.PolygonGate)); SElement gateCopy = new SElement(FJML.Gate); gateCopy.addContent(polyCopy); SElement parentCopy = new SElement(parentPop.getChild(FEML.FcmlQuery)); parentCopy.removeChild(FJML.ExternalPopNode); SElement stat = new SElement(FJML.Statistic); stat.setString(FJML.name, "Count"); parentCopy.addContent(gateCopy); parentCopy.addContent(stat); SElement tempCopy = new SElement(parentPop); tempCopy.removeChild(FEML.FcmlQuery); tempCopy.removeChild(FJML.ExternalPopNode); tempCopy.addContent(parentCopy); SimpleQueryCallback callback = new SimpleQueryCallback(); Query query = new Query(tempCopy, callback); query.executeQuery(); SElement data= callback.getResultElement(); data = data.getChild(FEML.FcmlQuery); String strVersion = data.toString(); String values = strVersion.substring(strVersion.indexOf("value"), strVersion.indexOf("gateId")); values = values.replaceAll("\\D+",""); return Integer.parseInt(values); } catch(Exception e) { return 0; } } /** * This function creates an XML element that is passsed as a request to the flowjo engine. Results requested * is a list of list of points that represents all the coordinates for each cluster on a contour map. * @param fcmlElem * @param paramXName * @param paramYName * @param level * @return List<List<Point>> */ public List<List<Point>> getContourPolygons(SElement fcmlElem, String paramXName, String paramYName, String level) { paramXName = ParameterUtil.stripStainName(paramXName); paramYName = ParameterUtil.stripStainName(paramYName); SElement queryElem = new SElement(fcmlElem); SElement fcmlQueryElem = queryElem.getChild(FEML.FcmlQuery); if (fcmlQueryElem == null) return null; fcmlQueryElem.removeChild(FJML.ExternalPopNode); SElement graphElem = new SElement(FJML.Graph); fcmlQueryElem.addContent(graphElem); graphElem.setBool(FJML.smoothing, true); graphElem.setBool("fast", true); graphElem.setString(FJML.type, DisplayType.Contour.toString()); SElement axis = new SElement(FJML.Axis); graphElem.addContent(axis); axis.setString(FJML.dimension, FJML.x); axis.setString(FJML.name, paramXName); axis = new SElement(FJML.Axis); graphElem.addContent(axis); axis.setString(FJML.dimension, FJML.y); axis.setString(FJML.name, paramYName); SElement settings = new SElement(FJML.GraphSettings); graphElem.addContent(settings); settings.setString(FJML.level, level); SimpleQueryCallback callback = new SimpleQueryCallback(); Query query = new Query(queryElem, callback); query.executeQuery(); SElement queryResult = callback.getResultElement(); fcmlQueryElem = queryResult.getChild(FEML.FcmlQuery); if (fcmlQueryElem == null) return null; graphElem = fcmlQueryElem.getChild(FJML.Graph); if (graphElem == null) return null; graphElem = graphElem.getChild(FJML.svg); if (graphElem == null) return null; List<List<Point>> result = new ArrayList<List<Point>>(); for (SElement pathElem : graphElem.getChildren(FJML.path)) { String pts = pathElem.getString(FJML.d); if (pts.isEmpty()) continue; List<Point> polyPts = new ArrayList<Point>(); result.add(polyPts); StringTokenizer tokenizer = new StringTokenizer(pts); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if ("M".equals(token) || ("L".equals(token))) { String xVal = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : ""; String yVal = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : ""; if (xVal.isEmpty() || yVal.isEmpty()) continue; polyPts.add(new Point(Integer.parseInt(xVal), Integer.parseInt(yVal))); } } } return result; } }
FlowJo-LLC/AutoGating-Plugin
TestFlowjoPlugin/src/GateProcessor.java
Java
apache-2.0
11,568
package org.deeplearning4j.spark.models.embeddings.word2vec; import org.apache.commons.lang3.tuple.Pair; import org.apache.spark.api.java.function.FlatMapFunction; import org.apache.spark.broadcast.Broadcast; import org.deeplearning4j.models.word2vec.VocabWord; import org.deeplearning4j.models.word2vec.wordstore.VocabCache; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import scala.Tuple2; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicLong; /** * @author jeffreytang * @author raver119@gmail.com */ public class SecondIterationFunction implements FlatMapFunction< Iterator<Tuple2<List<VocabWord>, Long>>, Entry<VocabWord, INDArray> > { private int ithIteration = 1; private int vectorLength; private boolean useAdaGrad; private int batchSize = 0; private double negative; private int window; private double alpha; private double minAlpha; private long totalWordCount; private long seed; private int maxExp; private double[] expTable; private int iterations; private AtomicLong nextRandom = new AtomicLong(5); private volatile VocabCache<VocabWord> vocab; private transient volatile NegativeHolder negativeHolder; private transient volatile VocabHolder vocabHolder; private AtomicLong cid = new AtomicLong(0); private AtomicLong aff = new AtomicLong(0); public SecondIterationFunction(Broadcast<Map<String, Object>> word2vecVarMapBroadcast, Broadcast<double[]> expTableBroadcast, Broadcast<VocabCache<VocabWord>> vocabCacheBroadcast) { Map<String, Object> word2vecVarMap = word2vecVarMapBroadcast.getValue(); this.expTable = expTableBroadcast.getValue(); this.vectorLength = (int) word2vecVarMap.get("vectorLength"); this.useAdaGrad = (boolean) word2vecVarMap.get("useAdaGrad"); this.negative = (double) word2vecVarMap.get("negative"); this.window = (int) word2vecVarMap.get("window"); this.alpha = (double) word2vecVarMap.get("alpha"); this.minAlpha = (double) word2vecVarMap.get("minAlpha"); this.totalWordCount = (long) word2vecVarMap.get("totalWordCount"); this.seed = (long) word2vecVarMap.get("seed"); this.maxExp = (int) word2vecVarMap.get("maxExp"); this.iterations = (int) word2vecVarMap.get("iterations"); this.batchSize = (int) word2vecVarMap.get("batchSize"); // this.indexSyn0VecMap = new HashMap<>(); // this.pointSyn1VecMap = new HashMap<>(); this.vocab = vocabCacheBroadcast.getValue(); if (this.vocab == null) throw new RuntimeException("VocabCache is null"); } @Override public Iterable<Entry<VocabWord, INDArray>> call(Iterator<Tuple2<List<VocabWord>, Long>> pairIter) { this.vocabHolder = VocabHolder.getInstance(); this.vocabHolder.setSeed(seed, vectorLength); if (negative > 0) { negativeHolder = NegativeHolder.getInstance(); negativeHolder.initHolder(vocab, expTable, this.vectorLength); } while (pairIter.hasNext()) { List<Pair<List<VocabWord>, Long>> batch = new ArrayList<>(); while (pairIter.hasNext() && batch.size() < batchSize) { Tuple2<List<VocabWord>, Long> pair = pairIter.next(); List<VocabWord> vocabWordsList = pair._1(); Long sentenceCumSumCount = pair._2(); batch.add(Pair.of(vocabWordsList, sentenceCumSumCount)); } for (int i = 0; i < iterations; i++) { //System.out.println("Training sentence: " + vocabWordsList); for (Pair<List<VocabWord>, Long> pair: batch) { List<VocabWord> vocabWordsList = pair.getKey(); Long sentenceCumSumCount = pair.getValue(); double currentSentenceAlpha = Math.max(minAlpha, alpha - (alpha - minAlpha) * (sentenceCumSumCount / (double) totalWordCount)); trainSentence(vocabWordsList, currentSentenceAlpha); } } } return vocabHolder.getSplit(vocab); } public void trainSentence(List<VocabWord> vocabWordsList, double currentSentenceAlpha) { if (vocabWordsList != null && !vocabWordsList.isEmpty()) { for (int ithWordInSentence = 0; ithWordInSentence < vocabWordsList.size(); ithWordInSentence++) { // Random value ranging from 0 to window size nextRandom.set(Math.abs(nextRandom.get() * 25214903917L + 11)); int b = (int) (long) this.nextRandom.get() % window; VocabWord currentWord = vocabWordsList.get(ithWordInSentence); if (currentWord != null) { skipGram(ithWordInSentence, vocabWordsList, b, currentSentenceAlpha); } } } } public void skipGram(int ithWordInSentence, List<VocabWord> vocabWordsList, int b, double currentSentenceAlpha) { VocabWord currentWord = vocabWordsList.get(ithWordInSentence); if (currentWord != null && !vocabWordsList.isEmpty()) { int end = window * 2 + 1 - b; for (int a = b; a < end; a++) { if (a != window) { int c = ithWordInSentence - window + a; if (c >= 0 && c < vocabWordsList.size()) { VocabWord lastWord = vocabWordsList.get(c); iterateSample(currentWord, lastWord, currentSentenceAlpha); } } } } } public void iterateSample(VocabWord w1, VocabWord w2, double currentSentenceAlpha) { if (w1 == null || w2 == null || w2.getIndex() < 0 || w2.getIndex() == w1.getIndex()) return; final int currentWordIndex = w2.getIndex(); // error for current word and context INDArray neu1e = Nd4j.create(vectorLength); // First iteration Syn0 is random numbers INDArray l1 = vocabHolder.getSyn0Vector(currentWordIndex, vocab); // for (int i = 0; i < w1.getCodeLength(); i++) { int code = w1.getCodes().get(i); int point = w1.getPoints().get(i); if(point < 0) throw new IllegalStateException("Illegal point " + point); // Point to INDArray syn1 = vocabHolder.getSyn1Vector(point); /* if (pointSyn1VecMap.containsKey(point)) { syn1 = pointSyn1VecMap.get(point); } else { syn1 = Nd4j.zeros(1, vectorLength); // 1 row of vector length of zeros pointSyn1VecMap.put(point, syn1); } */ // Dot product of Syn0 and Syn1 vecs double dot = Nd4j.getBlasWrapper().level1().dot(vectorLength, 1.0, l1, syn1); if (dot < -maxExp || dot >= maxExp) continue; int idx = (int) ((dot + maxExp) * ((double) expTable.length / maxExp / 2.0)); if (idx > expTable.length) continue; //score double f = expTable[idx]; //gradient double g = (1 - code - f) * (useAdaGrad ? w1.getGradient(i, currentSentenceAlpha, currentSentenceAlpha) : currentSentenceAlpha); Nd4j.getBlasWrapper().level1().axpy(vectorLength, g, syn1, neu1e); Nd4j.getBlasWrapper().level1().axpy(vectorLength, g, l1, syn1); } int target = w1.getIndex(); int label; //negative sampling if(negative > 0) for (int d = 0; d < negative + 1; d++) { if (d == 0) label = 1; else { nextRandom.set(Math.abs(nextRandom.get() * 25214903917L + 11)); int idx = Math.abs((int) (nextRandom.get() >> 16) % negativeHolder.getTable().length()); target = negativeHolder.getTable().getInt(idx); if (target <= 0) target = (int) nextRandom.get() % (vocab.numWords() - 1) + 1; if (target == w1.getIndex()) continue; label = 0; } if(target >= negativeHolder.getSyn1Neg().rows() || target < 0) continue; double f = Nd4j.getBlasWrapper().dot(l1,negativeHolder.getSyn1Neg().slice(target)); double g; if (f > maxExp) g = useAdaGrad ? w1.getGradient(target, (label - 1), alpha) : (label - 1) * alpha; else if (f < -maxExp) g = label * (useAdaGrad ? w1.getGradient(target, alpha, alpha) : alpha); else { int idx = (int) ((f + maxExp) * (expTable.length / maxExp / 2)); if (idx >= expTable.length) continue; g = useAdaGrad ? w1.getGradient(target, label - expTable[idx], alpha) : (label - expTable[idx]) * alpha; } Nd4j.getBlasWrapper().level1().axpy(vectorLength, g, negativeHolder.getSyn1Neg().slice(target),neu1e); Nd4j.getBlasWrapper().level1().axpy(vectorLength, g, l1,negativeHolder.getSyn1Neg().slice(target)); } // Updated the Syn0 vector based on gradient. Syn0 is not random anymore. Nd4j.getBlasWrapper().level1().axpy(vectorLength, 1.0f, neu1e, l1); //VocabWord word = vocab.elementAtIndex(currentWordIndex); //indexSyn0VecMap.put(word, l1); } private INDArray getRandomSyn0Vec(int vectorLength, long lseed) { /* we use wordIndex as part of seed here, to guarantee that during word syn0 initialization on dwo distinct nodes, initial weights will be the same for the same word */ return Nd4j.rand(lseed * seed, new int[]{1 ,vectorLength}).subi(0.5).divi(vectorLength); } }
xuzhongxing/deeplearning4j
deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SecondIterationFunction.java
Java
apache-2.0
10,201
/* $Id$ * $URL: https://dev.almende.com/svn/abms/coala-common/src/main/java/com/almende/coala/process/BasicProcessorStatus.java $ * * Part of the EU project Adapt4EE, see http://www.adapt4ee.eu/ * * @license * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * Copyright (c) 2010-2013 Almende B.V. */ package io.coala.process; import io.coala.lifecycle.MachineStatus; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; public enum BasicProcessorStatus implements ProcessorStatus<BasicProcessorStatus> { /** */ CREATED, /** */ IDLE, /** */ BUSY, ; private final Collection<BasicProcessorStatus> permittedNext; private BasicProcessorStatus(BasicProcessorStatus... premittedNextStatus) { if (premittedNextStatus == null || premittedNextStatus.length == 0) this.permittedNext = Collections.emptySet(); else this.permittedNext = Collections.unmodifiableSet(EnumSet .copyOf(Arrays.asList(premittedNextStatus))); } /** @see MachineStatus#getPermittedTransitions() */ //@Override public Collection<BasicProcessorStatus> getPermittedTransitions() { return this.permittedNext; } /** @see MachineStatus#permitsTransitionFrom(MachineStatus) */ @Override public boolean permitsTransitionFrom(final BasicProcessorStatus status) { return status.getPermittedTransitions().contains(this); } /** @see MachineStatus#permitsTransitionTo(MachineStatus) */ @Override public boolean permitsTransitionTo(final BasicProcessorStatus status) { return getPermittedTransitions().contains(status); } }
krevelen/coala
coala-core/src/main/java/io/coala/process/BasicProcessorStatus.java
Java
apache-2.0
2,105
package org.support.project.web.util; import java.util.Locale; import org.support.project.common.config.Resources; import org.support.project.common.log.Log; import org.support.project.common.log.LogFactory; import org.support.project.di.Container; import org.support.project.di.DI; import org.support.project.di.Instance; import org.support.project.web.bean.LoginedUser; import org.support.project.web.config.CommonWebParameter; /** * Thread = リクエスト 単位でリソースを切り替える必要があるので、それの制御を楽にするためのクラス * * @author Koda * */ @DI(instance = Instance.Singleton) public class ThreadResources { private static final Log LOG = LogFactory.getLog(ThreadResources.class); public static ThreadResources get() { return Container.getComp(ThreadResources.class); } public Resources getResources() { LoginedUser loginedUser = (LoginedUser) ThredUserPool.get().getInfo(CommonWebParameter.LOGIN_USER_INFO_SESSION_KEY); if (loginedUser != null && loginedUser.getLocale() != null) { return Resources.getInstance(loginedUser.getLocale()); } return Resources.getInstance(Locale.getDefault()); } }
support-project/knowledge
src/main/java/org/support/project/web/util/ThreadResources.java
Java
apache-2.0
1,232
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2010 psiinon@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.view; import java.awt.CardLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JPasswordField; import org.parosproxy.paros.Constant; import org.parosproxy.paros.model.OptionsParam; import org.parosproxy.paros.network.ConnectionParam; import org.parosproxy.paros.view.AbstractParamPanel; import org.zaproxy.zap.utils.FontUtils; import org.zaproxy.zap.utils.ZapTextField; public class OptionsConnectionPanel extends AbstractParamPanel { private static final long serialVersionUID = 1L; private JPanel panelProxyAuth = null; private JPanel panelProxyChain = null; private ZapTextField txtProxyChainRealm = null; private ZapTextField txtProxyChainUserName = null; private JPasswordField txtProxyChainPassword = null; private JCheckBox chkShowPassword = null; private ProxyDialog proxyDialog = null; private boolean prompting = false; public void setProxyDialog(ProxyDialog proxyDialog) { this.proxyDialog = proxyDialog; } public OptionsConnectionPanel(boolean prompting) { super(); this.prompting = prompting; initialize(); } public OptionsConnectionPanel() { super(); initialize(); } /** * This method initializes panelProxyAuth * * @return javax.swing.JPanel */ private JPanel getPanelProxyAuth() { if (panelProxyAuth == null) { java.awt.GridBagConstraints gridBagConstraints82 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints72 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints62 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints52 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints42 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints31 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints21 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints16 = new GridBagConstraints(); javax.swing.JLabel jLabel11 = new JLabel(); javax.swing.JLabel jLabel10 = new JLabel(); javax.swing.JLabel jLabel9 = new JLabel(); panelProxyAuth = new JPanel(); panelProxyAuth.setLayout(new GridBagLayout()); jLabel9.setText(Constant.messages.getString("conn.options.proxy.auth.realm")); jLabel10.setText(Constant.messages.getString("conn.options.proxy.auth.username")); if (prompting) { jLabel11.setText(Constant.messages.getString("conn.options.proxy.auth.passprompt")); } else { jLabel11.setText(Constant.messages.getString("conn.options.proxy.auth.password")); } panelProxyAuth.setBorder(javax.swing.BorderFactory.createTitledBorder(null, Constant.messages.getString("conn.options.proxy.auth.auth"), javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, FontUtils.getFont(FontUtils.Size.standard), java.awt.Color.black)); gridBagConstraints16.gridx = 0; gridBagConstraints16.gridy = 0; gridBagConstraints16.insets = new java.awt.Insets(2,2,2,2); gridBagConstraints16.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints16.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints16.gridwidth = 2; gridBagConstraints16.weightx = 1.0D; gridBagConstraints21.gridx = 0; gridBagConstraints21.gridy = 1; gridBagConstraints21.insets = new java.awt.Insets(2,2,2,2); gridBagConstraints21.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints21.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints21.weightx = 0.5D; gridBagConstraints31.gridx = 1; gridBagConstraints31.gridy = 1; gridBagConstraints31.weightx = 0.5D; gridBagConstraints31.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints31.insets = new java.awt.Insets(2,2,2,2); gridBagConstraints31.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints31.ipadx = 50; gridBagConstraints42.gridx = 0; gridBagConstraints42.gridy = 2; gridBagConstraints42.insets = new java.awt.Insets(2,2,2,2); gridBagConstraints42.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints42.weightx = 0.5D; gridBagConstraints42.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints52.gridx = 1; gridBagConstraints52.gridy = 2; gridBagConstraints52.weightx = 0.5D; gridBagConstraints52.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints52.insets = new java.awt.Insets(2,2,2,2); gridBagConstraints52.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints52.ipadx = 50; gridBagConstraints62.gridx = 0; gridBagConstraints62.gridy = 3; gridBagConstraints62.insets = new java.awt.Insets(2,2,2,2); gridBagConstraints62.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints62.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints62.weightx = 0.5D; gridBagConstraints72.gridx = 1; gridBagConstraints72.gridy = 3; gridBagConstraints72.weightx = 0.5D; gridBagConstraints72.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints72.insets = new java.awt.Insets(2,2,2,2); gridBagConstraints72.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints72.ipadx = 50; gridBagConstraints82.gridx = 1; gridBagConstraints82.gridy = 4; gridBagConstraints82.weightx = 0.5D; gridBagConstraints82.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints82.insets = new java.awt.Insets(2,2,2,2); gridBagConstraints82.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints82.ipadx = 50; panelProxyAuth.add(jLabel9, gridBagConstraints21); panelProxyAuth.add(getTxtProxyChainRealm(), gridBagConstraints31); panelProxyAuth.add(jLabel10, gridBagConstraints42); panelProxyAuth.add(getTxtProxyChainUserName(), gridBagConstraints52); panelProxyAuth.add(jLabel11, gridBagConstraints62); panelProxyAuth.add(getTxtProxyChainPassword(), gridBagConstraints72); panelProxyAuth.add(getChkShowPassword(), gridBagConstraints82); } return panelProxyAuth; } /** * This method initializes panelProxyChain * * @return javax.swing.JPanel */ private JPanel getPanelProxyChain() { if (panelProxyChain == null) { panelProxyChain = new JPanel(); java.awt.GridBagConstraints gridBagConstraints92 = new GridBagConstraints(); javax.swing.JLabel jLabel8 = new JLabel(); java.awt.GridBagConstraints gridBagConstraints102 = new GridBagConstraints(); panelProxyChain.setLayout(new GridBagLayout()); gridBagConstraints92.gridx = 0; gridBagConstraints92.gridy = 0; gridBagConstraints92.insets = new java.awt.Insets(2,2,2,2); gridBagConstraints92.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints92.fill = java.awt.GridBagConstraints.HORIZONTAL; panelProxyChain.setName("Proxy Chain"); jLabel8.setText(""); gridBagConstraints102.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints102.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints102.gridx = 0; gridBagConstraints102.gridy = 1; gridBagConstraints102.weightx = 1.0D; gridBagConstraints102.weighty = 1.0D; panelProxyChain.add(getPanelProxyAuth(), gridBagConstraints92); panelProxyChain.add(jLabel8, gridBagConstraints102); } return panelProxyChain; } /** * This method initializes this */ private void initialize() { this.setLayout(new CardLayout()); this.setName(Constant.messages.getString("conn.options.title")); this.add(getPanelProxyChain(), getPanelProxyChain().getName()); } @Override public void initParam(Object obj) { OptionsParam optionsParam = (OptionsParam) obj; ConnectionParam connectionParam = optionsParam.getConnectionParam(); // set Proxy Chain parameters txtProxyChainRealm.setText(connectionParam.getProxyChainRealm()); txtProxyChainRealm.discardAllEdits(); txtProxyChainUserName.setText(connectionParam.getProxyChainUserName()); txtProxyChainUserName.discardAllEdits(); chkShowPassword.setSelected(false);//Default don't show (everytime) txtProxyChainPassword.setEchoChar('*');//Default mask (everytime) this.proxyDialog.pack(); } @Override public void saveParam(Object obj) throws Exception { OptionsParam optionsParam = (OptionsParam) obj; ConnectionParam connectionParam = optionsParam.getConnectionParam(); connectionParam.setProxyChainRealm(txtProxyChainRealm.getText()); connectionParam.setProxyChainUserName(txtProxyChainUserName.getText()); // Make sure this isn't saved in the config file connectionParam.setProxyChainPassword( new String(txtProxyChainPassword.getPassword()), false); } private ZapTextField getTxtProxyChainRealm() { if (txtProxyChainRealm == null) { txtProxyChainRealm = new ZapTextField(); } return txtProxyChainRealm; } private ZapTextField getTxtProxyChainUserName() { if (txtProxyChainUserName == null) { txtProxyChainUserName = new ZapTextField(); } return txtProxyChainUserName; } private JPasswordField getTxtProxyChainPassword() { if (txtProxyChainPassword == null) { txtProxyChainPassword = new JPasswordField(); txtProxyChainPassword.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { proxyDialog.saveAndClose(); } }); } return txtProxyChainPassword; } /** * This method initializes chkShowPassword * * @return javax.swing.JCheckBox */ private JCheckBox getChkShowPassword() { if (chkShowPassword == null) { chkShowPassword = new JCheckBox(); chkShowPassword.setText(Constant.messages.getString("conn.options.proxy.auth.showpass")); chkShowPassword.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (chkShowPassword.isSelected()) { txtProxyChainPassword.setEchoChar((char) 0); } else { txtProxyChainPassword.setEchoChar('*'); } } }); } return chkShowPassword; } public void passwordFocus() { this.getTxtProxyChainPassword().requestFocus(); } @Override public String getHelpIndex() { return "ui.dialogs.options.connection"; } }
zapbot/zaproxy
src/org/zaproxy/zap/view/OptionsConnectionPanel.java
Java
apache-2.0
11,519
/******************************************************************************** * Copyright (c) 2011-2017 Red Hat Inc. and/or its affiliates and others * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * http://www.apache.org/licenses/LICENSE-2.0 * * SPDX-License-Identifier: Apache-2.0 ********************************************************************************/ package org.eclipse.ceylon.model.typechecker.model; import static java.util.Collections.emptyList; import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.isNameMatching; import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.isOverloadedVersion; import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.isResolvable; import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.lookupMember; import static org.eclipse.ceylon.model.typechecker.model.ModelUtil.lookupMemberForBackend; import static org.eclipse.ceylon.model.typechecker.model.Unit.isToplevelImport; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.eclipse.ceylon.common.Backends; /** * Any program element of relevance to the model. * * @author Gavin King * */ public abstract class Element implements Scoped, ImportScope { Element() {} private Scope container; private Scope scope; protected Unit unit; private List<Import> imports = null; @Override public List<Import> getImports() { return imports; } @Override public void addImport(Import imp) { if (imports==null) { imports = new ArrayList<Import>(3); } imports.add(imp); } @Override public void removeImport(Import imp) { if (imports!=null) { imports.remove(imp); } } public Import getImport(String name) { List<Import> imports = getImports(); if (imports!=null) { for (Import i: imports) { if (!i.isAmbiguous() && i.getTypeDeclaration()==null && i.getAlias().equals(name)) { return i; } } } return null; } @Override public List<Declaration> getMembers() { return emptyList(); } @Override public Unit getUnit() { return unit; } public void setUnit(Unit compilationUnit) { this.unit = compilationUnit; } /** * The "real" scope of the element, ignoring that * conditions (in an assert, if, or while) each have * their own "fake" scope that does not apply to regular * declarations that occur within the fake scope. * * @see ConditionScope */ @Override public Scope getContainer() { return container; } public void setContainer(Scope scope) { this.container = scope; } /** * The scope of the element, taking into account that * conditions (in an assert, if, or while) each have * their own "fake" scope. * * @see ConditionScope */ @Override public Scope getScope() { return scope; } public void setScope(Scope scope) { this.scope = scope; } @Override public String getQualifiedNameString() { return getContainer().getQualifiedNameString(); } /** * Search only directly inside this scope. */ @Override public Declaration getDirectMember(String name, List<Type> signature, boolean variadic) { return getDirectMember(name, signature, variadic, false); } /** * Search only directly inside this scope. */ public Declaration getDirectMember(String name, List<Type> signature, boolean variadic, boolean onlyExactMatches) { return lookupMember(getMembers(), name, signature, variadic, onlyExactMatches); } /** * Search only directly inside this scope for a member * with the given name and any of the given backends */ @Override public Declaration getDirectMemberForBackend(String name, Backends backends) { return lookupMemberForBackend(getMembers(), name, backends); } /** * Search only this scope, including members inherited * by the scope, without considering containing scopes * or imports. We're not looking for un-shared direct * members, but return them anyway, to let the caller * produce a nicer error. */ public Declaration getMember(String name, List<Type> signature, boolean variadic, boolean onlyExactMatches) { return getDirectMember(name, signature, variadic, onlyExactMatches); } /** * Search only this scope, including members inherited * by the scope, without considering containing scopes * or imports. We're not looking for un-shared direct * members, but return them anyway, to let the caller * produce a nicer error. */ @Override public Declaration getMember(String name, List<Type> signature, boolean variadic) { return getMember(name, signature, variadic, false); } /** * Search in this scope, taking into account containing * scopes, imports, and members inherited by this scope * and containing scopes, returning even un-shared * declarations of this scope and containing scopes. */ @Override public Declaration getMemberOrParameter(Unit unit, String name, List<Type> signature, boolean variadic) { return getMemberOrParameter(unit, name, signature, variadic, false); } //TODO: copy/pasted from Unit private Declaration getImportedDeclaration(String name, List<Type> signature, boolean ellipsis) { List<Import> imports = getImports(); if (imports!=null) { for (Import i: imports) { if (!i.isAmbiguous() && i.getAlias().equals(name)) { //in case of an overloaded member, this will //be the "abstraction", so search for the //correct overloaded version Declaration d = i.getDeclaration(); if (isToplevelImport(i, d)) { return d.getContainer() .getMember(d.getName(), signature, ellipsis); } } } } return null; } //TODO: copy/pasted from Unit public Declaration getImportedDeclaration(TypeDeclaration td, String name, List<Type> signature, boolean ellipsis) { List<Import> imports = getImports(); if (imports!=null) { for (Import i: imports) { TypeDeclaration itd = i.getTypeDeclaration(); if (itd!=null && td.inherits(itd) && !i.isAmbiguous() && i.getAlias().equals(name)) { //in case of an overloaded member, this will //be the "abstraction", so search for the //correct overloaded version Declaration d = i.getDeclaration(); return d.getContainer() .getMember(d.getName(), signature, ellipsis); } } } return null; } /** * Search in this scope, taking into account containing * scopes, imports, and members inherited by this scope * and containing scopes, returning even un-shared * declarations of this scope and containing scopes. */ public Declaration getMemberOrParameter(Unit unit, String name, List<Type> signature, boolean variadic, boolean onlyExactMatches) { Declaration d = getImportedDeclaration(name, signature, variadic); if (d!=null) { return d; } d = getMemberOrParameter(name, signature, variadic); if (d!=null) { return d; } else if (getScope()!=null) { return getScope() .getMemberOrParameter(unit, name, signature, variadic); } else { //union type or bottom type return null; } } /** * Search only this scope, including members inherited * by this scope, without considering containing scopes * or imports. We are even interested in un-shared * direct members. */ protected Declaration getMemberOrParameter(String name, List<Type> signature, boolean variadic) { return getMemberOrParameter(name, signature, variadic, false); } /** * Search only this scope, including members inherited * by this scope, without considering containing scopes * or imports. We are even interested in un-shared * direct members. */ protected Declaration getMemberOrParameter(String name, List<Type> signature, boolean variadic, boolean onlyExactMatches) { return getDirectMember(name, signature, variadic, onlyExactMatches); } @Override public boolean isInherited(Declaration d) { if (d.getContainer()==this) { return false; } else if (getContainer()!=null) { return getContainer().isInherited(d); } else { return false; } } @Override public TypeDeclaration getInheritingDeclaration(Declaration d) { if (d.getContainer()==this) { return null; } else if (getContainer()!=null) { return getContainer().getInheritingDeclaration(d); } else { return null; } } @Override public Type getDeclaringType(Declaration d) { if (d.isMember()) { return getContainer().getDeclaringType(d); } else { return null; } } @Override public Map<String, DeclarationWithProximity> getMatchingDeclarations(Unit unit, String startingWith, int proximity, Cancellable canceller) { Map<String, DeclarationWithProximity> result = getScope() .getMatchingDeclarations(unit, startingWith, proximity+1, canceller); for (Declaration d: getMembers()) { if (canceller != null && canceller.isCancelled()) { return Collections.emptyMap(); } if (isResolvable(d) && !isOverloadedVersion(d)){ if(isNameMatching(startingWith, d)) { result.put(d.getName(unit), new DeclarationWithProximity(d, proximity)); } for(String alias : d.getAliases()){ if(isNameMatching(startingWith, alias)){ result.put(alias, new DeclarationWithProximity( alias, d, proximity)); } } } } return result; } @Override public Backends getScopedBackends() { return getScope().getScopedBackends(); } }
ceylon/ceylon
model/src/org/eclipse/ceylon/model/typechecker/model/Element.java
Java
apache-2.0
11,913
package net.tatans.coeus.network.tools; import net.tatans.coeus.exception.ApplicationException; import android.app.Application; import android.content.Context; public class TatansApplication extends SpeakerApplication{ /** * Global application context. */ private static Context sContext; /** * Construct of LitePalApplication. Initialize application context. */ public TatansApplication() { sContext = this; } /** * Initialize to make ready to work. If you didn't configure LitePalApplication * in the AndroidManifest.xml, make sure you call this method as soon as possible. In * Application's onCreate() method will be fine. * * @param context * Application context. */ public static void initialize(Context context) { sContext = context; } /** * Get the global application context. * * @return Application context. * @throws ApplicationException */ public static Context getContext() { if (sContext == null) { throw new ApplicationException("AndroidManifest.xmlÖÐûÓÐ×¢²áTatansApplicationÀà"); } return sContext; } }
shengxiadeyu/searchOOM
coeus-network/src/main/java/net/tatans/coeus/network/tools/TatansApplication.java
Java
apache-2.0
1,109
package com.github.jnthnclt.os.lab.core.guts; import com.github.jnthnclt.os.lab.base.UIO; import com.github.jnthnclt.os.lab.core.io.PointerReadableByteBufferFile; import com.github.jnthnclt.os.lab.io.IAppendOnly; import com.google.common.io.Files; import java.io.File; import org.testng.Assert; import org.testng.annotations.Test; /** * * @author jonathan.colt */ public class FooterNGTest { @Test public void testToString() throws Exception { Footer write = new Footer(1, 2, 3, 4, UIO.longsBytes(new long[]{1, 2, 3, 4}), UIO.longsBytes(new long[]{4, 5}), 5, 6, 7, 8); File file = new File(Files.createTempDir(), "footer.bin"); AppendOnlyFile appendOnlyFile = new AppendOnlyFile(file); IAppendOnly appendOnly = appendOnlyFile.appender(); write.write(appendOnly); appendOnly.flush(true); appendOnly.close(); ReadOnlyFile indexFile = new ReadOnlyFile(file); PointerReadableByteBufferFile pointerReadable = indexFile.pointerReadable(-1); Footer read = Footer.read(pointerReadable, 0); System.out.println("write:" + write.toString()); System.out.println("read:" + read.toString()); Assert.assertEquals(write.toString(), read.toString()); } }
jnthnclt/lab
lab-core/src/test/java/com/github/jnthnclt/os/lab/core/guts/FooterNGTest.java
Java
apache-2.0
1,275
/* Copyright 2012 Marvin Pinto (me@marvinp.ca) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package ca.marvinp.projecteuler.solutions; import ca.marvinp.projecteuler.misc.Timer; import ca.marvinp.projecteuler.misc.Constants; import java.math.BigInteger; public class P10 { private Timer timer; private String answer; public P10() { BigInteger res = BigInteger.valueOf(0); BigInteger bi = BigInteger.valueOf(1); int limit = 2000000; timer = new Timer(); timer.start(); BigInteger np = bi.nextProbablePrime(); while( np.compareTo(BigInteger.valueOf(limit)) < 0 ) { res = res.add(np); bi = np; np = bi.nextProbablePrime(); } timer.stop(); answer = res.toString(); } public String getAnswer() { StringBuilder sb = new StringBuilder(); sb.append("Answer: The sum of all primes is: "); sb.append(answer +""+ Constants.NEWLINE); sb.append(timer.getElapsedTime()); return sb.toString(); } public String getQuestion() { StringBuilder sb = new StringBuilder(); sb.append("URL: https://projecteuler.net/problem=10" + Constants.NEWLINE); sb.append("Find the sum of all the primes below two million."); return sb.toString(); } }
marvinpinto/project-euler
src/ca/marvinp/projecteuler/solutions/P10.java
Java
apache-2.0
1,746
package boj_7562; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.LinkedList; import java.util.Queue; import java.util.StringTokenizer; /** * 백준 온라인 저지 7562번 (나이트의 이동) 문제풀이 * * @see https://www.acmicpc.net/problem/7562 * @author devetude */ public class Main { // 개행 문자 상수 private static final char NEW_LINE = '\n'; // 나이트 이동 가능 배열 상수 private static final int[][] MOVES = { { 2, 1 }, { 1, 2 }, { -1, 2 }, { -2, 1 }, { -2, -1 }, { -1, -2 }, { 1, -2 }, { 2, -1 } }; // 이동 가능 배열 행, 열 상수 private static final int X = 1; private static final int Y = 0; public static void main(String args[]) throws Exception { // 버퍼를 통해 입력 값을 받음 BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); int T = Integer.parseInt(br.readLine()); // 버퍼를 통해 결과 값을 만듬 StringBuilder sb = new StringBuilder(); while (T-- > 0) { int I = Integer.parseInt(br.readLine()); StringTokenizer st = new StringTokenizer(br.readLine(), " "); Point start = new Point(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken())); st = new StringTokenizer(br.readLine(), " "); Point end = new Point(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken())); // 방문 여부 확인 배열 초기화 int[][] isVisited = new int[I][I]; isVisited[start.y][start.x] = 1; // 큐 객체 변수 초기화 Queue<Point> queue = new LinkedList<>(); // 시작 정점을 큐에 담음 queue.offer(start); // dfs 실행 while (!queue.isEmpty()) { Point current = queue.poll(); if (current.x == end.x && current.y == end.y) { break; } for (final int[] MOVE : MOVES) { int nextY = current.y + MOVE[Y]; int nextX = current.x + MOVE[X]; if (0 <= nextY && nextY < I && 0 <= nextX && nextX < I) { if (isVisited[nextY][nextX] == 0) { isVisited[nextY][nextX] = isVisited[current.y][current.x] + 1; queue.offer(new Point(nextX, nextY)); } } } } sb.append(isVisited[end.y][end.x] - 1).append(NEW_LINE); } br.close(); // 결과 값 한꺼번에 출력 System.out.println(sb.toString()); } /** * 정점 이너 클래스 * * @author devetude */ private static class Point { public int x; public int y; /** * 생성자 * * @param x * @param y */ public Point(int x, int y) { this.x = x; this.y = y; } } }
devetude/BOJ-PSJ
src/boj_7562/Main.java
Java
apache-2.0
2,556
package org.bouncycastle.crypto.engines; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.DataLengthException; import org.bouncycastle.crypto.OutputLengthException; import org.bouncycastle.crypto.StreamCipher; import org.bouncycastle.crypto.params.KeyParameter; import org.bouncycastle.crypto.params.ParametersWithIV; /** * Implementation of Martin Hell's, Thomas Johansson's and Willi Meier's stream * cipher, Grain v1. */ public class Grainv1Engine implements StreamCipher { /** * Constants */ private static final int STATE_SIZE = 5; /** * Variables to hold the state of the engine during encryption and * decryption */ private byte[] workingKey; private byte[] workingIV; private byte[] out; private int[] lfsr; private int[] nfsr; private int output; private int index = 2; private boolean initialised = false; public String getAlgorithmName() { return "Grain v1"; } /** * Initialize a Grain v1 cipher. * * @param forEncryption Whether or not we are for encryption. * @param params The parameters required to set up the cipher. * @throws IllegalArgumentException If the params argument is inappropriate. */ public void init(boolean forEncryption, CipherParameters params) throws IllegalArgumentException { /** * Grain encryption and decryption is completely symmetrical, so the * 'forEncryption' is irrelevant. */ if (!(params instanceof ParametersWithIV)) { throw new IllegalArgumentException( "Grain v1 Init parameters must include an IV"); } ParametersWithIV ivParams = (ParametersWithIV)params; byte[] iv = ivParams.getIV(); if (iv == null || iv.length != 8) { throw new IllegalArgumentException( "Grain v1 requires exactly 8 bytes of IV"); } if (!(ivParams.getParameters() instanceof KeyParameter)) { throw new IllegalArgumentException( "Grain v1 Init parameters must include a key"); } KeyParameter key = (KeyParameter)ivParams.getParameters(); /** * Initialize variables. */ workingIV = new byte[key.getKey().length]; workingKey = new byte[key.getKey().length]; lfsr = new int[STATE_SIZE]; nfsr = new int[STATE_SIZE]; out = new byte[2]; System.arraycopy(iv, 0, workingIV, 0, iv.length); System.arraycopy(key.getKey(), 0, workingKey, 0, key.getKey().length); reset(); } /** * 160 clocks initialization phase. */ private void initGrain() { for (int i = 0; i < 10; i++) { output = getOutput(); nfsr = shift(nfsr, getOutputNFSR() ^ lfsr[0] ^ output); lfsr = shift(lfsr, getOutputLFSR() ^ output); } initialised = true; } /** * Get output from non-linear function g(x). * * @return Output from NFSR. */ private int getOutputNFSR() { int b0 = nfsr[0]; int b9 = nfsr[0] >>> 9 | nfsr[1] << 7; int b14 = nfsr[0] >>> 14 | nfsr[1] << 2; int b15 = nfsr[0] >>> 15 | nfsr[1] << 1; int b21 = nfsr[1] >>> 5 | nfsr[2] << 11; int b28 = nfsr[1] >>> 12 | nfsr[2] << 4; int b33 = nfsr[2] >>> 1 | nfsr[3] << 15; int b37 = nfsr[2] >>> 5 | nfsr[3] << 11; int b45 = nfsr[2] >>> 13 | nfsr[3] << 3; int b52 = nfsr[3] >>> 4 | nfsr[4] << 12; int b60 = nfsr[3] >>> 12 | nfsr[4] << 4; int b62 = nfsr[3] >>> 14 | nfsr[4] << 2; int b63 = nfsr[3] >>> 15 | nfsr[4] << 1; return (b62 ^ b60 ^ b52 ^ b45 ^ b37 ^ b33 ^ b28 ^ b21 ^ b14 ^ b9 ^ b0 ^ b63 & b60 ^ b37 & b33 ^ b15 & b9 ^ b60 & b52 & b45 ^ b33 & b28 & b21 ^ b63 & b45 & b28 & b9 ^ b60 & b52 & b37 & b33 ^ b63 & b60 & b21 & b15 ^ b63 & b60 & b52 & b45 & b37 ^ b33 & b28 & b21 & b15 & b9 ^ b52 & b45 & b37 & b33 & b28 & b21) & 0x0000FFFF; } /** * Get output from linear function f(x). * * @return Output from LFSR. */ private int getOutputLFSR() { int s0 = lfsr[0]; int s13 = lfsr[0] >>> 13 | lfsr[1] << 3; int s23 = lfsr[1] >>> 7 | lfsr[2] << 9; int s38 = lfsr[2] >>> 6 | lfsr[3] << 10; int s51 = lfsr[3] >>> 3 | lfsr[4] << 13; int s62 = lfsr[3] >>> 14 | lfsr[4] << 2; return (s0 ^ s13 ^ s23 ^ s38 ^ s51 ^ s62) & 0x0000FFFF; } /** * Get output from output function h(x). * * @return Output from h(x). */ private int getOutput() { int b1 = nfsr[0] >>> 1 | nfsr[1] << 15; int b2 = nfsr[0] >>> 2 | nfsr[1] << 14; int b4 = nfsr[0] >>> 4 | nfsr[1] << 12; int b10 = nfsr[0] >>> 10 | nfsr[1] << 6; int b31 = nfsr[1] >>> 15 | nfsr[2] << 1; int b43 = nfsr[2] >>> 11 | nfsr[3] << 5; int b56 = nfsr[3] >>> 8 | nfsr[4] << 8; int b63 = nfsr[3] >>> 15 | nfsr[4] << 1; int s3 = lfsr[0] >>> 3 | lfsr[1] << 13; int s25 = lfsr[1] >>> 9 | lfsr[2] << 7; int s46 = lfsr[2] >>> 14 | lfsr[3] << 2; int s64 = lfsr[4]; return (s25 ^ b63 ^ s3 & s64 ^ s46 & s64 ^ s64 & b63 ^ s3 & s25 & s46 ^ s3 & s46 & s64 ^ s3 & s46 & b63 ^ s25 & s46 & b63 ^ s46 & s64 & b63 ^ b1 ^ b2 ^ b4 ^ b10 ^ b31 ^ b43 ^ b56) & 0x0000FFFF; } /** * Shift array 16 bits and add val to index.length - 1. * * @param array The array to shift. * @param val The value to shift in. * @return The shifted array with val added to index.length - 1. */ private int[] shift(int[] array, int val) { array[0] = array[1]; array[1] = array[2]; array[2] = array[3]; array[3] = array[4]; array[4] = val; return array; } /** * Set keys, reset cipher. * * @param keyBytes The key. * @param ivBytes The IV. */ private void setKey(byte[] keyBytes, byte[] ivBytes) { ivBytes[8] = (byte)0xFF; ivBytes[9] = (byte)0xFF; workingKey = keyBytes; workingIV = ivBytes; /** * Load NFSR and LFSR */ int j = 0; for (int i = 0; i < nfsr.length; i++) { nfsr[i] = (workingKey[j + 1] << 8 | workingKey[j] & 0xFF) & 0x0000FFFF; lfsr[i] = (workingIV[j + 1] << 8 | workingIV[j] & 0xFF) & 0x0000FFFF; j += 2; } } public int processBytes(byte[] in, int inOff, int len, byte[] out, int outOff) throws DataLengthException { if (!initialised) { throw new IllegalStateException(getAlgorithmName() + " not initialised"); } if ((inOff + len) > in.length) { throw new DataLengthException("input buffer too short"); } if ((outOff + len) > out.length) { throw new OutputLengthException("output buffer too short"); } for (int i = 0; i < len; i++) { out[outOff + i] = (byte)(in[inOff + i] ^ getKeyStream()); } return len; } public void reset() { index = 2; setKey(workingKey, workingIV); initGrain(); } /** * Run Grain one round(i.e. 16 bits). */ private void oneRound() { output = getOutput(); out[0] = (byte)output; out[1] = (byte)(output >> 8); nfsr = shift(nfsr, getOutputNFSR() ^ lfsr[0]); lfsr = shift(lfsr, getOutputLFSR()); } public byte returnByte(byte in) { if (!initialised) { throw new IllegalStateException(getAlgorithmName() + " not initialised"); } return (byte)(in ^ getKeyStream()); } private byte getKeyStream() { if (index > 1) { oneRound(); index = 0; } return out[index++]; } }
ttt43ttt/gwt-crypto
src/main/java/org/bouncycastle/crypto/engines/Grainv1Engine.java
Java
apache-2.0
8,234
/* * Copyright (C) 2015 The Casser Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.noorq.casser.mapping.validator; import java.lang.reflect.Array; import java.nio.ByteBuffer; import java.util.Collection; import java.util.Map; public interface SizeConstraint { static final int[] EMPTY = new int[0]; default int[] getSize(Object value) { if (value == null) { return null; } if (value.getClass().isArray()) { return new int[] { Array.getLength(value) }; } if (value instanceof CharSequence) { CharSequence seq = (CharSequence) value; return new int[] { seq.length() }; } if (value instanceof ByteBuffer) { ByteBuffer bb = (ByteBuffer) value; return new int[] { bb.position() }; } if (value instanceof Collection) { Collection<?> col = (Collection<?>) value; return new int[] { col.size() }; } if (value instanceof Map) { Map<?, ?> map = (Map<?, ?>) value; return new int[] { map.size() }; } return EMPTY; } }
noorq/casser
src/main/java/com/noorq/casser/mapping/validator/SizeConstraint.java
Java
apache-2.0
1,548
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.shiena.seasar; import org.seasar.framework.unit.impl.SimpleInternalTestContext; import org.spockframework.runtime.model.IterationInfo; /** * @author Mitsuhiro Koga */ public class S2SpockSimpleInternalTestContext extends SimpleInternalTestContext implements S2SpockInternalTestContext { /** 繰り返しのテストメソッドの情報 */ protected IterationInfo iterationInfo; /** * {@inheritDoc} */ public void setIterationInfo(IterationInfo iterationInfo) { this.iterationInfo = iterationInfo; } /** * {@inheritDoc} */ public String getIterationName() { return iterationInfo.getName(); } /** * {@inheritDoc} */ public String getFeatureName() { return iterationInfo.getParent().getName(); } }
shiena/spock-seasar
src/main/java/org/shiena/seasar/S2SpockSimpleInternalTestContext.java
Java
apache-2.0
1,430
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespaxmlparser; import com.yahoo.document.DocumentTypeManager; import com.yahoo.document.serialization.DeserializationException; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import java.io.FileInputStream; import java.io.InputStream; /** * @author thomasg */ public class VespaXMLReader { DocumentTypeManager docTypeManager; XMLStreamReader reader; public VespaXMLReader(String fileName, DocumentTypeManager docTypeManager) throws Exception { this(new FileInputStream(fileName), docTypeManager); } public VespaXMLReader(InputStream stream, DocumentTypeManager docTypeManager) throws Exception { this.docTypeManager = docTypeManager; XMLInputFactory xmlInputFactory = XMLInputFactory.newInstance(); xmlInputFactory.setProperty("javax.xml.stream.isSupportingExternalEntities", Boolean.FALSE); reader = xmlInputFactory.createXMLStreamReader(stream); } public VespaXMLReader(XMLStreamReader reader, DocumentTypeManager docTypeManager) { this.docTypeManager = docTypeManager; this.reader = reader; } protected RuntimeException newDeserializeException(String message) { return new DeserializationException(message + " (at line " + reader.getLocation().getLineNumber() + ", column " + reader.getLocation().getColumnNumber() + ")"); } protected RuntimeException newException(Exception e) { return new DeserializationException(e.getMessage() + " (at line " + reader.getLocation().getLineNumber() + ", column " + reader.getLocation().getColumnNumber() + ")", e); } protected void skipToEnd(String tagName) throws XMLStreamException { while (reader.hasNext()) { if (reader.getEventType() == XMLStreamReader.END_ELEMENT && tagName.equals(reader.getName().toString())) { return; } reader.next(); } throw new DeserializationException("Missing end tag for element '" + tagName + "'" + reader.getLocation()); } public static boolean isBase64EncodingAttribute(String attributeName, String attributeValue) { return "binaryencoding".equals(attributeName) && "base64".equalsIgnoreCase(attributeValue); } public static boolean isBase64EncodedElement(XMLStreamReader reader) { for (int i = 0; i < reader.getAttributeCount(); i++) { if (isBase64EncodingAttribute(reader.getAttributeName(i).toString(), reader.getAttributeValue(i))) { return true; } } return false; } }
vespa-engine/vespa
document/src/main/java/com/yahoo/vespaxmlparser/VespaXMLReader.java
Java
apache-2.0
2,824
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.rekognition.model; import java.io.Serializable; import javax.annotation.Generated; @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DetectModerationLabelsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * Array of detected Moderation labels and the time, in milliseconds from the start of the video, they were * detected. * </p> */ private java.util.List<ModerationLabel> moderationLabels; /** * <p> * Version number of the moderation detection model that was used to detect unsafe content. * </p> */ private String moderationModelVersion; /** * <p> * Shows the results of the human in the loop evaluation. * </p> */ private HumanLoopActivationOutput humanLoopActivationOutput; /** * <p> * Array of detected Moderation labels and the time, in milliseconds from the start of the video, they were * detected. * </p> * * @return Array of detected Moderation labels and the time, in milliseconds from the start of the video, they were * detected. */ public java.util.List<ModerationLabel> getModerationLabels() { return moderationLabels; } /** * <p> * Array of detected Moderation labels and the time, in milliseconds from the start of the video, they were * detected. * </p> * * @param moderationLabels * Array of detected Moderation labels and the time, in milliseconds from the start of the video, they were * detected. */ public void setModerationLabels(java.util.Collection<ModerationLabel> moderationLabels) { if (moderationLabels == null) { this.moderationLabels = null; return; } this.moderationLabels = new java.util.ArrayList<ModerationLabel>(moderationLabels); } /** * <p> * Array of detected Moderation labels and the time, in milliseconds from the start of the video, they were * detected. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setModerationLabels(java.util.Collection)} or {@link #withModerationLabels(java.util.Collection)} if you * want to override the existing values. * </p> * * @param moderationLabels * Array of detected Moderation labels and the time, in milliseconds from the start of the video, they were * detected. * @return Returns a reference to this object so that method calls can be chained together. */ public DetectModerationLabelsResult withModerationLabels(ModerationLabel... moderationLabels) { if (this.moderationLabels == null) { setModerationLabels(new java.util.ArrayList<ModerationLabel>(moderationLabels.length)); } for (ModerationLabel ele : moderationLabels) { this.moderationLabels.add(ele); } return this; } /** * <p> * Array of detected Moderation labels and the time, in milliseconds from the start of the video, they were * detected. * </p> * * @param moderationLabels * Array of detected Moderation labels and the time, in milliseconds from the start of the video, they were * detected. * @return Returns a reference to this object so that method calls can be chained together. */ public DetectModerationLabelsResult withModerationLabels(java.util.Collection<ModerationLabel> moderationLabels) { setModerationLabels(moderationLabels); return this; } /** * <p> * Version number of the moderation detection model that was used to detect unsafe content. * </p> * * @param moderationModelVersion * Version number of the moderation detection model that was used to detect unsafe content. */ public void setModerationModelVersion(String moderationModelVersion) { this.moderationModelVersion = moderationModelVersion; } /** * <p> * Version number of the moderation detection model that was used to detect unsafe content. * </p> * * @return Version number of the moderation detection model that was used to detect unsafe content. */ public String getModerationModelVersion() { return this.moderationModelVersion; } /** * <p> * Version number of the moderation detection model that was used to detect unsafe content. * </p> * * @param moderationModelVersion * Version number of the moderation detection model that was used to detect unsafe content. * @return Returns a reference to this object so that method calls can be chained together. */ public DetectModerationLabelsResult withModerationModelVersion(String moderationModelVersion) { setModerationModelVersion(moderationModelVersion); return this; } /** * <p> * Shows the results of the human in the loop evaluation. * </p> * * @param humanLoopActivationOutput * Shows the results of the human in the loop evaluation. */ public void setHumanLoopActivationOutput(HumanLoopActivationOutput humanLoopActivationOutput) { this.humanLoopActivationOutput = humanLoopActivationOutput; } /** * <p> * Shows the results of the human in the loop evaluation. * </p> * * @return Shows the results of the human in the loop evaluation. */ public HumanLoopActivationOutput getHumanLoopActivationOutput() { return this.humanLoopActivationOutput; } /** * <p> * Shows the results of the human in the loop evaluation. * </p> * * @param humanLoopActivationOutput * Shows the results of the human in the loop evaluation. * @return Returns a reference to this object so that method calls can be chained together. */ public DetectModerationLabelsResult withHumanLoopActivationOutput(HumanLoopActivationOutput humanLoopActivationOutput) { setHumanLoopActivationOutput(humanLoopActivationOutput); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getModerationLabels() != null) sb.append("ModerationLabels: ").append(getModerationLabels()).append(","); if (getModerationModelVersion() != null) sb.append("ModerationModelVersion: ").append(getModerationModelVersion()).append(","); if (getHumanLoopActivationOutput() != null) sb.append("HumanLoopActivationOutput: ").append(getHumanLoopActivationOutput()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DetectModerationLabelsResult == false) return false; DetectModerationLabelsResult other = (DetectModerationLabelsResult) obj; if (other.getModerationLabels() == null ^ this.getModerationLabels() == null) return false; if (other.getModerationLabels() != null && other.getModerationLabels().equals(this.getModerationLabels()) == false) return false; if (other.getModerationModelVersion() == null ^ this.getModerationModelVersion() == null) return false; if (other.getModerationModelVersion() != null && other.getModerationModelVersion().equals(this.getModerationModelVersion()) == false) return false; if (other.getHumanLoopActivationOutput() == null ^ this.getHumanLoopActivationOutput() == null) return false; if (other.getHumanLoopActivationOutput() != null && other.getHumanLoopActivationOutput().equals(this.getHumanLoopActivationOutput()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getModerationLabels() == null) ? 0 : getModerationLabels().hashCode()); hashCode = prime * hashCode + ((getModerationModelVersion() == null) ? 0 : getModerationModelVersion().hashCode()); hashCode = prime * hashCode + ((getHumanLoopActivationOutput() == null) ? 0 : getHumanLoopActivationOutput().hashCode()); return hashCode; } @Override public DetectModerationLabelsResult clone() { try { return (DetectModerationLabelsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
aws/aws-sdk-java
aws-java-sdk-rekognition/src/main/java/com/amazonaws/services/rekognition/model/DetectModerationLabelsResult.java
Java
apache-2.0
9,868
/* * Copyright 2022 Apollo Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.ctrip.framework.apollo.config.data.extension.initialize; import com.ctrip.framework.apollo.config.data.extension.properties.ApolloClientProperties; import org.springframework.boot.context.properties.bind.BindHandler; import org.springframework.boot.context.properties.bind.Binder; /** * @author vdisk <vdisk@foxmail.com> */ public interface ApolloClientExtensionInitializer { /** * initialize extension * * @param apolloClientProperties apollo client extension properties * @param binder properties binder * @param bindHandler properties bind handler */ void initialize(ApolloClientProperties apolloClientProperties, Binder binder, BindHandler bindHandler); }
nobodyiam/apollo
apollo-client-config-data/src/main/java/com/ctrip/framework/apollo/config/data/extension/initialize/ApolloClientExtensionInitializer.java
Java
apache-2.0
1,329
/* * Copyright 2015 herd contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.finra.herd.model.jpa; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * Attribute associated with a storage. */ @XmlRootElement @XmlType @Table(name = StorageAttributeEntity.TABLE_NAME) @Entity public class StorageAttributeEntity extends AuditableEntity { /** * The table name. */ public static final String TABLE_NAME = "strge_atrbt"; @Id @Column(name = TABLE_NAME + "_id") @GeneratedValue(generator = TABLE_NAME + "_seq") @SequenceGenerator(name = TABLE_NAME + "_seq", sequenceName = TABLE_NAME + "_seq") private Integer id; /** * The attribute name column. */ @Column(name = "atrbt_nm") private String name; /** * The attribute value column. */ @Column(name = "atrbt_value_tx", length = 4000) private String value; @ManyToOne @JoinColumn(name = "strge_cd", referencedColumnName = "strge_cd", nullable = false) private StorageEntity storage; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public StorageEntity getStorage() { return storage; } public void setStorage(StorageEntity storage) { this.storage = storage; } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } StorageAttributeEntity that = (StorageAttributeEntity) other; if (id != null ? !id.equals(that.id) : that.id != null) { return false; } if (name != null ? !name.equals(that.name) : that.name != null) { return false; } if (value != null ? !value.equals(that.value) : that.value != null) { return false; } return true; } @Override public int hashCode() { int result = id != null ? id.hashCode() : 0; result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (value != null ? value.hashCode() : 0); return result; } }
seoj/herd
herd-code/herd-model/src/main/java/org/finra/herd/model/jpa/StorageAttributeEntity.java
Java
apache-2.0
3,418
package com.vladkel.eFindMe.graph.transformer; import java.awt.Color; import java.awt.Paint; import org.apache.commons.collections15.Transformer; import com.vladkel.eFindMe.graph.parsingxml.GraphXML; import com.vladkel.eFindMe.search.engine.model.Match; import com.vladkel.eFindMe.search.engine.model.Url; import edu.uci.ics.jung.visualization.picking.PickedInfo; public class GraphNodeColor implements Transformer<Url, Paint>{ protected PickedInfo<Url> picked; protected String myNode; public GraphNodeColor(PickedInfo<Url> pi){ this.picked = pi; } @Override public Paint transform(Url node) { Match matches = GraphXML.getInstance().getMatch(node.getId()); if(matches != null) { switch(matches.getTrust().toString()) { case "Trusted": return Color.GREEN; case "Unknown": return Color.GRAY; case "Bad": return Color.RED; default: return Color.GREEN; } } else if(node.getTrust() != null) { switch(node.getTrust().toString()) { case "Trusted": return Color.GREEN; case "Unknown": return Color.GRAY; case "Bad": return Color.RED; default: return Color.GREEN; } } else return Color.GREEN; } }
Vladk-el/eFindMe
src/main/java/com/vladkel/eFindMe/graph/transformer/GraphNodeColor.java
Java
apache-2.0
1,285
/* * Copyright [2016] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netpet.spools.javacore.xml; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.util.List; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; import org.jdom.output.XMLOutputter; /** * Created with IntelliJ IDEA. * User: trigger * Date: 13-10-21 * Time: 上午9:51 * JDOM 生成与解析XML文档 */ public class JDomDemo implements XmlDocument { public static void main(String[] args) { JDomDemo demo = new JDomDemo(); // demo.createXml("F://a.xml"); demo.parserXml("F://a.xml"); } public void createXml(String fileName) { Document document; Element root; root=new Element("employees"); document=new Document(root); Element employee=new Element("employee"); root.addContent(employee); Element name=new Element("name"); name.setText("ddvip"); employee.addContent(name); Element sex=new Element("sex"); sex.setText("m"); employee.addContent(sex); Element age=new Element("age"); age.setText("23"); employee.addContent(age); XMLOutputter XMLOut = new XMLOutputter(); try { XMLOut.output(document, new FileOutputStream(fileName)); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } public void parserXml(String fileName) { SAXBuilder builder=new SAXBuilder(false); try { Document document=builder.build(fileName); Element employees=document.getRootElement(); List employeeList=employees.getChildren("employee"); for(int i=0;i<employeeList.size();i++) { Element employee=(Element)employeeList.get(i); List employeeInfo=employee.getChildren(); for(int j=0;j<employeeInfo.size();i++) { System.out.println(((Element)employeeInfo.get(j)).getName()+":"+((Element)employeeInfo.get(j)).getValue()); } } } catch (JDOMException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
WindsorWang/Spools
spools-javacore/src/main/java/com/netpet/spools/javacore/xml/JDomDemo.java
Java
apache-2.0
2,940
package io.datakernel.di.impl; import java.util.concurrent.atomic.AtomicReferenceArray; /** * An abstract class instead of an interface for the same reason as {@link CompiledBinding} */ @SuppressWarnings("rawtypes") public interface CompiledBindingInitializer<R> { void initInstance(R instance, AtomicReferenceArray[] instances, int synchronizedScope); }
softindex/datakernel
core-di/src/main/java/io/datakernel/di/impl/CompiledBindingInitializer.java
Java
apache-2.0
360
/** * Copyright (c) 2000-present Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package com.liferay.arquillian.container.remote.installdependency; import com.liferay.arquillian.container.remote.LiferayRemoteContainerConfiguration; import com.liferay.arquillian.portal.bundle.PortalURLBundleActivator; import com.liferay.arquillian.portal.bundle.servlet.PortalURLServlet; import com.liferay.hot.deploy.jmx.listener.mbean.manager.PluginMBeanManager; import com.liferay.portal.kernel.util.StringUtil; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.management.MBeanServerConnection; import javax.management.MBeanServerInvocationHandler; import javax.management.ObjectName; import javax.management.remote.JMXConnector; import javax.management.remote.JMXConnectorFactory; import javax.management.remote.JMXServiceURL; import org.jboss.arquillian.container.spi.ContainerRegistry; import org.jboss.arquillian.container.spi.client.container.LifecycleException; import org.jboss.arquillian.container.spi.context.annotation.ContainerScoped; import org.jboss.arquillian.container.spi.event.StartContainer; import org.jboss.arquillian.container.spi.event.StopContainer; import org.jboss.arquillian.core.api.Instance; import org.jboss.arquillian.core.api.InstanceProducer; import org.jboss.arquillian.core.api.annotation.ApplicationScoped; import org.jboss.arquillian.core.api.annotation.Inject; import org.jboss.arquillian.core.api.annotation.Observes; import org.jboss.osgi.metadata.OSGiManifestBuilder; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.Asset; import org.jboss.shrinkwrap.api.exporter.ZipExporter; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.jboss.shrinkwrap.resolver.api.maven.ConfigurableMavenResolverSystem; import org.jboss.shrinkwrap.resolver.api.maven.Maven; import org.jboss.shrinkwrap.resolver.api.maven.MavenFormatStage; import org.jboss.shrinkwrap.resolver.api.maven.MavenStrategyStage; import org.osgi.jmx.framework.BundleStateMBean; import org.osgi.jmx.framework.FrameworkMBean; /** * @author Cristina González */ public class LiferayInstallDependenciesObserver { public static final boolean IS_WINDOWS = System.getProperty("os.name") != null && StringUtil.toLowerCase( System.getProperty("os.name") ).contains("windows"); public void startContainer(@Observes StartContainer context) throws Exception { LiferayRemoteContainerConfiguration config = _configurationInstance.get(); String dependencyPropertyFile = config.getDependencyPropertyFile(); _installedBundles = new ArrayList<>(); _initOSGiJMXAttributes(config); _initLiferayJMXAttributes(); _installPortalDependencies(); if (dependencyPropertyFile != null) { Path dependencyPropertyFilePath = Paths.get(dependencyPropertyFile); Charset charset = Charset.forName("UTF-8"); try { List<String> dependencies = Files.readAllLines( dependencyPropertyFilePath, charset); String dependencyPath = ""; for (String dependency : dependencies) { if (dependency.startsWith(_MAVEN_PREFIX)) { String mavenDependency = dependency.substring( _MAVEN_PREFIX.length() + 1); dependencyPath = _getMavenDependencyPath( mavenDependency); } else if (dependency.startsWith(_FILE_PREFIX)) { dependencyPath = dependency.substring( _FILE_PREFIX.length() + 1); } _installBundle( Paths.get(dependencyPath).toAbsolutePath().toString()); } } catch (IOException ioe) { throw new LifecycleException( "Can't find file " + dependencyPropertyFilePath.toAbsolutePath(), ioe); } } } public void stopContainer(@Observes StopContainer context) throws LifecycleException { for (long bundleId : _installedBundles) { try { _frameworkMBean.uninstallBundle(bundleId); } catch (IOException ioe) { throw new LifecycleException("Can't uninstall bundle", ioe); } } } private void _awaitUntilBundleActive(long bundleId) throws InterruptedException, IOException, TimeoutException { long timeoutMillis = System.currentTimeMillis() + 3000; while (System.currentTimeMillis() < timeoutMillis) { if ("ACTIVE".equals(this._bundleStateMBean.getState(bundleId))) { return; } Thread.sleep(100L); } throw new TimeoutException( "The bundle with bundleId [" + bundleId + "] is not Active"); } private void _awaitUntilLegacyPluginDeployed(String contextName) throws InterruptedException, IOException, TimeoutException { long timeoutMillis = System.currentTimeMillis() + 3000; while (System.currentTimeMillis() < timeoutMillis) { List<String> legacyPluginsList = _pluginsManagerMBean.listLegacyPlugins(); if (legacyPluginsList.contains(contextName)) { return; } Thread.sleep(500L); } throw new TimeoutException( "The plugin [" + contextName + "] is not Well Deployed"); } private String _getMavenDependencyPath(String mavenDependency) { String userHome = System.getProperty("user.home"); ConfigurableMavenResolverSystem resolver = Maven.configureResolver(); ConfigurableMavenResolverSystem resolverWithLocalRepo = resolver.withRemoteRepo( "local-m2", "file://" + userHome + "/.m2/repository", "default"); ConfigurableMavenResolverSystem resolverWithLocalRepoAndLiferayRepo = resolverWithLocalRepo.withRemoteRepo( "liferay-public", "http://cdn.repository.liferay.com/" + "nexus/content/groups/public", "default"); MavenStrategyStage resolve = resolverWithLocalRepoAndLiferayRepo.resolve(mavenDependency); MavenFormatStage mavenFormatStage = resolve.withoutTransitivity(); File[] resolved = mavenFormatStage.asFile(); return resolved[0].getAbsolutePath(); } private <U> U _getMBeanProxy( final MBeanServerConnection mbeanServer, final ObjectName oname, final Class<U> type, final long timeout, final TimeUnit unit) throws TimeoutException { Callable<U> callable = new Callable<U>() { @Override public U call() throws Exception { IOException lastException = null; long timeoutMillis = System.currentTimeMillis() + unit.toMillis(timeout); while (System.currentTimeMillis() < timeoutMillis) { Set<ObjectName> names = mbeanServer.queryNames(oname, null); if (names.size() == 1) { ObjectName instanceName = names.iterator().next(); return MBeanServerInvocationHandler.newProxyInstance( mbeanServer, instanceName, type, false); } else { Thread.sleep(500); } } _log.log( Level.WARNING, "Cannot get MBean proxy for type: " + oname, lastException); throw new TimeoutException(); } }; ExecutorService executor = Executors.newSingleThreadExecutor(); Future<U> future = executor.submit(callable); try { return future.get(timeout, unit); } catch (TimeoutException te) { throw te; } catch (Exception ex) { throw new IllegalStateException(ex); } } private MBeanServerConnection _getMBeanServerConnection( LiferayRemoteContainerConfiguration configuration) throws IOException { String[] credentials = new String[] { configuration.getJmxUsername(), configuration.getJmxPassword() }; Map<String, ?> env = Collections.singletonMap( JMXConnector.CREDENTIALS, credentials); JMXServiceURL serviceURL = new JMXServiceURL( configuration.getJmxServiceURL()); JMXConnector connector = JMXConnectorFactory.connect(serviceURL, env); return connector.getMBeanServerConnection(); } private MBeanServerConnection _getMBeanServerConnection( final LiferayRemoteContainerConfiguration configuration, final long timeout, final TimeUnit unit) throws TimeoutException { Callable<MBeanServerConnection> callable = new Callable<MBeanServerConnection>() { @Override public MBeanServerConnection call() throws Exception { Exception lastException = null; long timeoutMillis = System.currentTimeMillis() + unit.toMillis(timeout); while (System.currentTimeMillis() < timeoutMillis) { try { return _getMBeanServerConnection(configuration); } catch (Exception e) { lastException = e; Thread.sleep(500); } } TimeoutException timeoutException = new TimeoutException(); timeoutException.initCause(lastException); throw timeoutException; } }; ExecutorService executor = Executors.newSingleThreadExecutor(); Future<MBeanServerConnection> future = executor.submit(callable); try { return future.get(timeout, unit); } catch (TimeoutException te) { throw te; } catch (Exception ex) { throw new IllegalStateException(ex); } } private void _initLiferayJMXAttributes() throws LifecycleException { try { // Get the PluginsMBean ObjectName oname = new ObjectName( "com.liferay.portal.monitoring:classification=" + "plugin_statistics,name=PluginsManager"); _pluginsManagerMBean = _getMBeanProxy( mbeanServerInstance.get(), oname, PluginMBeanManager.class, 30, TimeUnit.SECONDS); } catch (RuntimeException re) { throw re; } catch (Exception e) { throw new LifecycleException( "Cannot get a Liferay JMX connection", e); } } private void _initOSGiJMXAttributes( LiferayRemoteContainerConfiguration configuration) throws LifecycleException { MBeanServerConnection mbeanServer = null; // Try to connect to an already running server try { mbeanServer = _getMBeanServerConnection( configuration, 30, TimeUnit.SECONDS); mbeanServerInstance.set(mbeanServer); } catch (TimeoutException te) { throw new LifecycleException( "Error connecting to Karaf MBeanServer: ", te); } try { // Get the FrameworkMBean ObjectName oname = new ObjectName("osgi.core:type=framework,*"); _frameworkMBean = _getMBeanProxy( mbeanServer, oname, FrameworkMBean.class, 30, TimeUnit.SECONDS); // Get the BundleStateMBean oname = new ObjectName("osgi.core:type=bundleState,*"); _bundleStateMBean = _getMBeanProxy( mbeanServer, oname, BundleStateMBean.class, 30, TimeUnit.SECONDS); } catch (RuntimeException re) { throw re; } catch (Exception e) { throw new LifecycleException("Cannot start Karaf container", e); } } private void _installBundle(String filePath) throws LifecycleException { try { String pathWithProtocol; if (IS_WINDOWS) { pathWithProtocol = "file://" + filePath.replaceFirst("^[a-zA-Z]:", "/$0"); } else { pathWithProtocol = "file://" + filePath; } String contextName = ""; if (filePath.endsWith(".war")) { int x = filePath.lastIndexOf("/"); int y = filePath.lastIndexOf(".war"); contextName = filePath.substring(x + 1, y); Matcher matcher = _pattern.matcher(contextName); if (matcher.matches()) { contextName = matcher.group(1); } String pathWithQueryString = filePath + "?Web-ContextPath=/" + contextName; URL url = new URL("file", null, pathWithQueryString); pathWithProtocol = "webbundle:" + url.toString(); } long bundleId = _frameworkMBean.installBundle(pathWithProtocol); _installedBundles.add(bundleId); _frameworkMBean.startBundle(bundleId); _awaitUntilBundleActive(bundleId); if (!contextName.isEmpty()) { _awaitUntilLegacyPluginDeployed(contextName); } } catch (IOException ioe) { if (ioe.getMessage().contains("A bundle is already installed")) { _log.warning("The bundle was already installed " + filePath); } else { throw new LifecycleException( "The bundle in the path " + filePath + " can't be found, so it can't be installed", ioe); } } catch (InterruptedException ie) { throw new LifecycleException("InterruptedException", ie); } catch (TimeoutException te) { throw new LifecycleException("Timeout exception", te); } } private void _installPortalDependencies() throws LifecycleException { JavaArchive archive = ShrinkWrap.create( JavaArchive.class, "arquillian-install-portlet-in-liferay.jar"); archive.addClass(PortalURLBundleActivator.class); archive.addClass(PortalURLServlet.class); archive.setManifest( new Asset() { @Override public InputStream openStream() { OSGiManifestBuilder builder = OSGiManifestBuilder.newInstance(); builder.addBundleManifestVersion(2); builder.addBundleSymbolicName( "arquillian-install-portlet-in-liferay"); builder.addImportPackages( "com.liferay.portal.kernel.exception", "com.liferay.portal.kernel.util", "com.liferay.portal.kernel.model", "com.liferay.portal.kernel.service", "javax.servlet.http", "javax.portlet", "javax.servlet", "org.osgi.framework"); builder.addBundleActivator(PortalURLBundleActivator.class); return builder.openStream(); } }); UUID uuid = UUID.randomUUID(); File tmpfile = new File("tmpfiles" + uuid.toString() + ".jar"); ZipExporter exporter = archive.as(ZipExporter.class); exporter.exportTo(tmpfile); _installBundle(tmpfile.getAbsolutePath()); tmpfile.deleteOnExit(); } private static final String _FILE_PREFIX = "file"; private static final String _MAVEN_PREFIX = "mvn"; private static final Logger _log = Logger.getLogger( LiferayInstallDependenciesObserver.class.getName()); private static final Pattern _pattern = Pattern.compile( "(.*?)(-\\d+\\.\\d+\\.\\d+\\.\\d+)?"); private BundleStateMBean _bundleStateMBean; @ApplicationScoped @Inject private Instance<LiferayRemoteContainerConfiguration> _configurationInstance; @Inject private Instance<ContainerRegistry> _containerRegistryInstance; private FrameworkMBean _frameworkMBean; private List<Long> _installedBundles; private PluginMBeanManager _pluginsManagerMBean; @ContainerScoped @Inject private InstanceProducer<MBeanServerConnection> mbeanServerInstance; }
liferay-labs/arquillian-liferay
arquillian-container-liferay/src/main/java/com/liferay/arquillian/container/remote/installdependency/LiferayInstallDependenciesObserver.java
Java
apache-2.0
15,169
/* * Copyright 2011 Thingtrack, S.L. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.thingtrack.konekti.dao.impl.internal; import java.util.List; import org.springframework.stereotype.Repository; import com.thingtrack.konekti.dao.template.JpaDao; import com.thingtrack.konekti.dao.api.LocationDao; import com.thingtrack.konekti.domain.Location; import com.thingtrack.konekti.domain.Organization; /** * @author Thingtrack S.L. * */ @Repository public class LocationDaoImpl extends JpaDao<Location, Integer> implements LocationDao { @Override public Location getByName(String name) throws Exception { Location location = (Location)getEntityManager() .createQuery("SELECT p FROM " + getEntityName() + " p WHERE p.name = :name") .setParameter("name", name).getSingleResult(); return location; } @SuppressWarnings("unchecked") @Override public List<Location> getAllByOrganization(Organization organization) throws Exception { return getEntityManager() .createQuery("SELECT p FROM " + getEntityName() + " p WHERE :organization MEMBER OF p.organizations") .setParameter("organization", organization).getResultList(); } }
thingtrack/konekti
core/konekti.dao.impl/src/main/java/com/thingtrack/konekti/dao/impl/internal/LocationDaoImpl.java
Java
apache-2.0
1,676
package com.android.adapterpattern.classadapter; /** * Description: #TODO * * @author zzp(zhao_zepeng@hotmail.com) * @since 2016-06-05 */ public class OutletAdapter extends HKOutlet implements IChinaOutlet{ @Override public String getChinaType() { String type = getHKType(); type = type.replace("Chinese", "British"); return type; } }
zhaozepeng/Design-Patterns
AdapterPattern/app/src/main/java/com/android/adapterpattern/classadapter/OutletAdapter.java
Java
apache-2.0
377
package tenkamochi2.form.admin; import java.util.List; import java.util.Map; import org.seasar.struts.annotation.Maxlength; import org.seasar.struts.annotation.Required; import tenkamochi2.entity.TClub; public class ClubForm { public Integer id; /* 部名 */ @Required @Maxlength(maxlength=10) public String ClubName; /* 部長のID */ @Required @Maxlength(maxlength=10) public String OfficerId; /* 部の概要 */ @Required @Maxlength(maxlength=10) public String ClubMemo; /* Clubのリスト */ public List<TClub> clubItems; /* Clubのマップ */ public List<Map<Integer,String>> clubMap; /* 選択した項目のパラメータ */ public String[] club_checks = new String[0]; }
dawachin/TenkamochiSystem
src/main/java/tenkamochi2/form/admin/ClubForm.java
Java
apache-2.0
729
/* * Copyright 2011 Matthew Avery, mavery@advancedpwr.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.advancedpwr.record.methods; import com.advancedpwr.record.AccessPath; public class DoubleBuilder extends AbstractPrimitiveBuilder implements MethodWriterFactory { public String resultBuilder() { return "new Double( " + result() + ")"; } /* (non-Javadoc) * @see com.advancedpwr.record.methods.Factory#accept(java.lang.Class) */ public boolean accept( Class inClass ) { return double.class.isAssignableFrom( inClass ) || Double.class.isAssignableFrom( inClass ); } /* (non-Javadoc) * @see com.advancedpwr.record.methods.Factory#createMethodBuilder(com.advancedpwr.record.AccessPath) */ public BuildMethodWriter createMethodBuilder( AccessPath inPath ) { return new DoubleBuilder(); } }
avery1701/thor
src/main/java/com/advancedpwr/record/methods/DoubleBuilder.java
Java
apache-2.0
1,356
package com.flipkart.foxtrot.server.resources; import com.flipkart.foxtrot.core.auth.FoxtrotRole; import com.flipkart.foxtrot.core.auth.User; import com.flipkart.foxtrot.server.auth.AuthConfig; import com.flipkart.foxtrot.server.auth.AuthStore; import com.flipkart.foxtrot.server.auth.TokenType; import com.flipkart.foxtrot.server.auth.io.CreateUserRequest; import com.flipkart.foxtrot.server.utils.AuthUtils; import io.swagger.annotations.Api; import lombok.val; import org.hibernate.validator.constraints.NotEmpty; import javax.annotation.security.RolesAllowed; import javax.inject.Inject; import javax.inject.Provider; import javax.validation.Valid; import javax.validation.constraints.NotNull; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.Collections; import java.util.Date; /** * */ @Path("/v1/auth") @Produces(MediaType.APPLICATION_JSON) @Api("Auth related APIs. DO NOT expose these for public access.") @RolesAllowed(FoxtrotRole.Value.SYSADMIN) public class Auth { private final Provider<AuthStore> authProvider; private final AuthConfig authConfig; @Inject public Auth(Provider<AuthStore> authProvider, AuthConfig authConfig) { this.authProvider = authProvider; this.authConfig = authConfig; } @POST @Path("/users") @Consumes(MediaType.APPLICATION_JSON) public Response provisionUser(@NotNull @Valid final CreateUserRequest createUserRequest) { val user = new User(createUserRequest.getId(), createUserRequest.getRoles(), createUserRequest.getTables(), createUserRequest.isSystem(), new Date(), new Date()); return Response.ok(authProvider.get().provisionUser(user)).build(); } @GET @Path("/users/{userId}") public Response getUser(@NotNull @NotEmpty @PathParam("userId") final String userId) { return Response.ok(authProvider.get().getUser(userId)).build(); } @PUT @Path("/users/{userId}/roles/grant/{role}") public Response grantRole(@NotNull @NotEmpty @PathParam("userId") final String userId, @NotNull @PathParam("role") final FoxtrotRole role) { val status = authProvider.get() .grantRole(userId, role); return updateUserResponse(userId, status); } @PUT @Path("/users/{userId}/roles/revoke/{role}") public Response revokeRole(@NotNull @NotEmpty @PathParam("userId") final String userId, @NotNull @PathParam("role") final FoxtrotRole role) { val status = authProvider.get() .revokeRole(userId, role); return updateUserResponse(userId, status); } @PUT @Path("/users/{userId}/tables/access/grant/{table}") public Response grantTableAccess(@NotNull @NotEmpty @PathParam("userId") final String userId, @NotNull @NotEmpty @PathParam("table") final String table) { val status = authProvider.get() .grantTableAccess(userId, table); return updateUserResponse(userId, status); } @PUT @Path("/users/{userId}/tables/access/revoke/{table}") public Response revokeTableAccess(@NotNull @NotEmpty @PathParam("userId") final String userId, @NotNull @NotEmpty @PathParam("table") final String table) { val status = authProvider.get() .revokeTableAccess(userId, table); return updateUserResponse(userId, status); } @DELETE @Path("/users/{userId}") public Response deleteUser(@NotNull @NotEmpty @PathParam("userId") final String userId) { final boolean status = authProvider.get().deleteUser(userId); if(!status) { return Response.notModified().build(); } return Response.ok().build(); } @POST @Path("/tokens/{userId}") public Response provisionToken(@NotNull @NotEmpty @PathParam("userId") final String userId) { val token = authProvider.get().provisionToken(userId, TokenType.STATIC, null).orElse(null); if(null == token) { return Response.notModified().build(); } return Response .ok(Collections.singletonMap("jwt", AuthUtils.createJWT(token, authConfig.getJwt()))) .build(); } @GET @Path("/tokens/{tokenId}") public Response getToken(@NotNull @NotEmpty @PathParam("tokenId") final String tokenId) { return Response.ok(authProvider.get().getToken(tokenId)) .build(); } @DELETE @Path("/tokens/{userId}") public Response deleteToken(@NotNull @NotEmpty @PathParam("userId") final String userId, @NotNull @NotEmpty @PathParam("tokenId") final String tokenId) { val status = authProvider.get().deleteToken(tokenId); if(!status) { return Response.notModified().build(); } return Response.ok().build(); } private Response updateUserResponse(String userId, boolean status) { if (!status) { return Response.notModified() .build(); } return Response.ok() .entity(authProvider.get().getUser(userId)) .build(); } }
Flipkart/foxtrot
foxtrot-server/src/main/java/com/flipkart/foxtrot/server/resources/Auth.java
Java
apache-2.0
5,324
//============================================================================ // // Copyright (C) 2006-2022 Talend Inc. - www.talend.com // // This source code is available under agreement available at // %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt // // You should have received a copy of the agreement // along with this program; if not, write to Talend SA // 9 rue Pages 92150 Suresnes, France // //============================================================================ package org.talend.components.marketo.runtime.client; import static org.junit.Assert.*; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import org.apache.avro.Schema; import org.apache.avro.generic.GenericData.Record; import org.apache.avro.generic.IndexedRecord; import org.junit.Before; import org.junit.Test; import org.talend.components.marketo.MarketoConstants; import org.talend.components.marketo.runtime.client.rest.response.SyncResult; import org.talend.components.marketo.runtime.client.type.MarketoException; import org.talend.components.marketo.runtime.client.type.MarketoRecordResult; import org.talend.components.marketo.tmarketoinput.TMarketoInputProperties; import org.talend.components.marketo.tmarketoinput.TMarketoInputProperties.StandardAction; import org.talend.components.marketo.tmarketooutput.TMarketoOutputProperties; import org.talend.components.marketo.tmarketooutput.TMarketoOutputProperties.CustomObjectDeleteBy; import org.talend.components.marketo.wizard.MarketoComponentWizardBaseProperties.CustomObjectSyncAction; import org.talend.components.marketo.wizard.MarketoComponentWizardBaseProperties.InputOperation; import org.talend.components.marketo.wizard.MarketoComponentWizardBaseProperties.OutputOperation; import com.google.gson.JsonObject; public class MarketoOpportunityClientTest extends MarketoLeadClientTest { @Before public void setUp() throws Exception { super.setUp(); iprops = new TMarketoInputProperties("test"); iprops.schemaInput.setupProperties(); iprops.schemaInput.setupLayout(); iprops.connection.setupProperties(); iprops.connection.setupLayout(); iprops.connection.setupProperties(); iprops.connection.endpoint.setValue("https://fake.io/rest"); iprops.connection.clientAccessId.setValue("clientaccess"); iprops.connection.secretKey.setValue("sekret"); iprops.connection.attemptsIntervalTime.setValue(200); // shorten interval for tests iprops.setupProperties(); iprops.setupLayout(); iprops.inputOperation.setValue(InputOperation.Opportunity); oprops = new TMarketoOutputProperties("test"); oprops.schemaInput.setupProperties(); oprops.schemaInput.setupLayout(); oprops.connection.setupProperties(); oprops.connection.setupLayout(); oprops.connection.setupProperties(); oprops.connection.endpoint.setValue("https://fake.io/rest"); oprops.connection.clientAccessId.setValue("clientaccess"); oprops.connection.secretKey.setValue("sekret"); oprops.connection.attemptsIntervalTime.setValue(200); // shorten interval for tests oprops.setupProperties(); oprops.setupLayout(); } @Test public void testDescribeOpportunity() throws Exception { MarketoRecordResult cor = new MarketoRecordResult(); cor.setSuccess(true); List<IndexedRecord> cos = new ArrayList<>(); IndexedRecord co = new Record(MarketoConstants.getCustomObjectDescribeSchema()); co.put(0, "car_c"); co.put(1, "marketoGUID"); co.put(2, "Car"); co.put(3, "Car system"); co.put(4, new Date()); co.put(5, new Date()); co.put(6, ""); co.put(7, "{ \"brand\", \"model\" }"); co.put(8, "{}"); co.put(9, "{}"); cos.add(co); cor.setRecords(cos); // iprops.standardAction.setValue(StandardAction.describe); // doThrow(new MarketoException("REST", "error")).when(client).executeGetRequest(any(Schema.class)); mktoRR = client.describeOpportunity(iprops); assertFalse(mktoRR.isSuccess()); assertFalse(mktoRR.getErrorsString().isEmpty()); // doReturn(new MarketoRecordResult()).when(client).executeGetRequest(any(Schema.class)); mktoRR = client.describeOpportunity(iprops); assertFalse(mktoRR.isSuccess()); assertTrue(mktoRR.getErrorsString().isEmpty()); // doReturn(cor).when(client).executeGetRequest(any(Schema.class)); mktoRR = client.describeOpportunity(iprops); assertTrue(mktoRR.isSuccess()); assertTrue(mktoRR.getErrorsString().isEmpty()); iprops.inputOperation.setValue(InputOperation.OpportunityRole); doReturn(cor).when(client).executeGetRequest(any(Schema.class)); mktoRR = client.describeOpportunity(iprops); assertTrue(mktoRR.isSuccess()); assertTrue(mktoRR.getErrorsString().isEmpty()); } @Test public void testGetOpportunities() throws Exception { iprops.standardAction.setValue(StandardAction.get); doThrow(new MarketoException("REST", "error")).when(client).executeGetRequest(any(Schema.class)); mktoRR = client.getOpportunities(iprops, null); assertFalse(mktoRR.isSuccess()); assertFalse(mktoRR.getErrorsString().isEmpty()); // doReturn(new MarketoRecordResult()).when(client).executeGetRequest(any(Schema.class)); mktoRR = client.getOpportunities(iprops, null); assertFalse(mktoRR.isSuccess()); assertTrue(mktoRR.getErrorsString().isEmpty()); // MarketoRecordResult mrr = new MarketoRecordResult(); mrr.setSuccess(true); mrr.setRemainCount(0); mrr.setRecordCount(1); List<IndexedRecord> records = new ArrayList<>(); IndexedRecord record = new Record(MarketoConstants.getCustomObjectRecordSchema()); record.put(0, "mkto-123456"); record.put(1, 0); record.put(2, new Date()); record.put(3, new Date()); records.add(record); mrr.setRecords(records); doReturn(mrr).when(client).executeGetRequest(any(Schema.class)); mktoRR = client.getOpportunities(iprops, null); assertTrue(mktoRR.isSuccess()); assertTrue(mktoRR.getErrorsString().isEmpty()); // doReturn(mrr).when(client).executeFakeGetRequest(any(Schema.class), anyString()); iprops.inputOperation.setValue(InputOperation.OpportunityRole); iprops.useCompoundKey.setValue(true); iprops.compoundKey.keyName.setValue(Arrays.asList("externalOpportunityId", "leadId", "role")); iprops.compoundKey.keyValue.setValue(Arrays.asList("opp00", "12345", "roly")); mktoRR = client.getOpportunities(iprops, null); assertTrue(mktoRR.isSuccess()); assertTrue(mktoRR.getErrorsString().isEmpty()); } @Test public void testSyncOpportunities() throws Exception { oprops.outputOperation.setValue(OutputOperation.syncOpportunities); oprops.customObjectSyncAction.setValue(CustomObjectSyncAction.createOrUpdate); // doThrow(new MarketoException("REST", "error")).when(client).executePostRequest(eq(SyncResult.class), any(JsonObject.class)); List<IndexedRecord> records = new ArrayList<>(); IndexedRecord record = new Record(MarketoConstants.getCustomObjectRecordSchema()); record.put(0, "mkto-123456"); records.add(record); mktoSR = client.syncOpportunities(oprops, records); assertFalse(mktoSR.isSuccess()); assertFalse(mktoSR.getErrorsString().isEmpty()); // doReturn(new SyncResult()).when(client).executePostRequest(eq(SyncResult.class), any(JsonObject.class)); mktoSR = client.syncOpportunities(oprops, records); assertFalse(mktoSR.isSuccess()); // doReturn(getListOperationResult(true, "deleted")).when(client).executePostRequest(eq(SyncResult.class), any(JsonObject.class)); mktoSR = client.syncOpportunities(oprops, records); assertTrue(mktoSR.isSuccess()); assertTrue(mktoSR.getErrorsString().isEmpty()); } @Test public void testDeleteOpportunities() throws Exception { oprops.customObjectDeleteBy.setValue(CustomObjectDeleteBy.idField); // doThrow(new MarketoException("REST", "error")).when(client).executePostRequest(eq(SyncResult.class), any(JsonObject.class)); List<IndexedRecord> records = new ArrayList<>(); IndexedRecord record = new Record(MarketoConstants.getCustomObjectRecordSchema()); record.put(0, "mkto-123456"); records.add(record); mktoSR = client.deleteOpportunities(oprops, records); assertFalse(mktoSR.isSuccess()); assertFalse(mktoSR.getErrorsString().isEmpty()); // doReturn(new SyncResult()).when(client).executePostRequest(eq(SyncResult.class), any(JsonObject.class)); mktoSR = client.deleteOpportunities(oprops, records); assertFalse(mktoSR.isSuccess()); // doReturn(getListOperationResult(true, "deleted")).when(client).executePostRequest(eq(SyncResult.class), any(JsonObject.class)); mktoSR = client.deleteOpportunities(oprops, records); assertTrue(mktoSR.isSuccess()); assertTrue(mktoSR.getErrorsString().isEmpty()); } }
Talend/components
components/components-marketo/components-marketo-runtime/src/test/java/org/talend/components/marketo/runtime/client/MarketoOpportunityClientTest.java
Java
apache-2.0
9,833
package com.example.wujie.zhihu.Fragment; import android.content.Context; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.DefaultItemAnimator; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.OrientationHelper; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.android.volley.RequestQueue; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.Volley; import com.example.wujie.zhihu.Activity.ItemActivity; import com.example.wujie.zhihu.Adapter.RecyclerViewAdapter; import com.example.wujie.zhihu.GsonRequest; import com.example.wujie.zhihu.Info.JsonLatestNews; import com.example.wujie.zhihu.Interface.OnRecyclerItemClickListener; import com.example.wujie.zhihu.R; import com.example.wujie.zhihu.ZhiHuDailyApplication; import com.example.wujie.zhihu.db.DBHelper; import com.example.wujie.zhihu.support.Constants; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; /** * Created by wujie on 2016/3/21. */ public class HomeFragment extends Fragment implements Response.Listener<JsonLatestNews>, Response.ErrorListener{ private static final String EXTRA_MESSAGE = "url"; private Context context; private RecyclerView mRecyclerView; private SwipeRefreshLayout mSwipeRefreshLayout; private View view; private String mUrl; private ArrayList<HashMap<String, Object>> itemList; private RecyclerViewAdapter mRecyclerViewAdapter; private LinearLayoutManager linearLayoutManager; private int visibleNewsDate; private boolean isLoad = false; public static HomeFragment newInstance(String mUrl){ HomeFragment f = new HomeFragment(); Bundle bundle = new Bundle(); bundle.putString(EXTRA_MESSAGE, mUrl); f.setArguments(bundle); return f; } @Override public void onAttach(Context context) { super.onAttach(context); new RecoverNewsListTask().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);//并发处理任务 } @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { context = getActivity(); mUrl = getArguments().getString(EXTRA_MESSAGE); view = inflater.inflate(R.layout.content_fragment, container, false); mRecyclerView = (RecyclerView) view.findViewById(R.id.recycleView); linearLayoutManager = new LinearLayoutManager(context); linearLayoutManager.setOrientation(OrientationHelper.VERTICAL); mRecyclerView.setLayoutManager(linearLayoutManager); mRecyclerView.setItemAnimator(new DefaultItemAnimator()); mSwipeRefreshLayout = (SwipeRefreshLayout)view.findViewById(R.id.swipe_refresh_layout); mSwipeRefreshLayout.setColorSchemeResources(R.color.colorPrimaryLight); itemList = new ArrayList<HashMap<String, Object>>(); /*mSwipeRefreshLayout.post(new Runnable() { @Override public void run() { mSwipeRefreshLayout.setRefreshing(true); } });*/ ArrayList<HashMap<String, Object>> list = new ArrayList<HashMap<String, Object>>(); mRecyclerViewAdapter = new RecyclerViewAdapter(context, list, new int[]{R.layout.view_pager, R.layout.item_main_list, R.layout.background}, new OnRecyclerItemClickListener() { @Override public void onItemClick(View view, int position) { Intent intent = new Intent(); intent.setClass(context, ItemActivity.class); int id = 0; if (itemList.get(position).get("Stories_Id") instanceof Double){ double m = (double)itemList.get(position).get("Stories_Id");////为什么取出的数据变成了double id = (int)Math.floor(m); } else { id = (int)itemList.get(position).get("Stories_Id"); } intent.putExtra("url", Constants.Url.STORY_DETAIL + id);//////!!!!!超出范围,不能点击 startActivity(intent); //getActivity().overridePendingTransition(android.R.anim.slide_in_left, android.R.anim.fade_out); } }); mRecyclerView.setAdapter(mRecyclerViewAdapter); mSwipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { //控件已经设定在加载时无法再加载了 loadNews(mUrl); } }); mRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() { int lastVisibleItem; @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { lastVisibleItem = linearLayoutManager.findLastVisibleItemPosition(); } @Override public void onScrollStateChanged(RecyclerView recyclerView, int newState) { if (newState == RecyclerView.SCROLL_STATE_IDLE && lastVisibleItem + 1 == mRecyclerViewAdapter.getItemCount() && !isLoad) { Log.d("TAG", "-------load"); setLoad(true); ArrayList<HashMap<String, Object>> list = ZhiHuDailyApplication.getDataBase() .newsOfTheDay(visibleNewsDate - 1, DBHelper.TABLE_NAME); if (list != null){ updateList(list); mRecyclerViewAdapter.updateNewsList(list); visibleNewsDate = visibleNewsDate - 1; setLoad(false); } else { loadNews(Constants.Url.STORY_BEFORE + visibleNewsDate); } } } }); return view; } private void setLoad(boolean load){ isLoad = load; } public void loadNews(String url) { RequestQueue mQueue = Volley.newRequestQueue(context); GsonRequest gsonRequest = new GsonRequest<JsonLatestNews>(url, JsonLatestNews.class, this, this); mQueue.add(gsonRequest); } @Override public void onErrorResponse(VolleyError volleyError) { Log.e("error", "error!!!!!!!"); mSwipeRefreshLayout.setRefreshing(false); setLoad(false); } @Override public void onResponse(JsonLatestNews response) { ArrayList<HashMap<String, Object>> transList = dealWithResponse(response); updateList(transList); mRecyclerViewAdapter.updateNewsList(transList); visibleNewsDate = Integer.parseInt(response.getDate()); if (response.getTop_stories() != null){ mSwipeRefreshLayout.setRefreshing(false); } else if (isLoad) { setLoad(false); } ZhiHuDailyApplication.getDataBase().insertOrUpdateNewsList(visibleNewsDate, DBHelper.TABLE_NAME, transList); } private ArrayList<HashMap<String, Object>> dealWithResponse(JsonLatestNews response){ ArrayList<HashMap<String, Object>> deList = new ArrayList<HashMap<String, Object>>(); if (response.getTop_stories()!=null){ String[] top_Stories_Title = new String[response.getTop_stories().size()]; String[] top_Stories_Url = new String[response.getTop_stories().size()]; Iterator iterator_TopStories = response.getTop_stories().iterator(); for (int i = 0;iterator_TopStories.hasNext();i++){ JsonLatestNews.Top_stories m = (JsonLatestNews.Top_stories) iterator_TopStories.next(); top_Stories_Url[i] = m.getImage(); top_Stories_Title[i] = m.getTitle(); } HashMap<String, Object> map = new HashMap<String, Object>(); map.put("Top_Stories_Title", top_Stories_Title); map.put("Top_Stories_Url", top_Stories_Url); deList.add(map); } //有问题,不断刷新,不断增加,如何判断是否刷新内容 Iterator iterator = response.getStories().iterator(); for (int i = 0; iterator.hasNext(); i++){ JsonLatestNews.Stories m = (JsonLatestNews.Stories) iterator.next(); HashMap<String, Object> map1 = new HashMap<String, Object>(); map1.put("Stories_Title", m.getTitle()); if (m.getImages() != null){ map1.put("Stories_Url", m.getImages().iterator().next()); } else { map1.put("Stories_Url", ""); } map1.put("Stories_Id", m.getId());//注意!!! deList.add(map1); } return deList; } public void updateList(ArrayList<HashMap<String, Object>> list){ if (list.get(0).containsKey("Top_Stories_Title")){ itemList = list; } else { itemList.addAll(list); } } private class RecoverNewsListTask extends AsyncTask<Void, Void, ArrayList<HashMap<String, Object>>> { @Override protected ArrayList<HashMap<String, Object>> doInBackground(Void... params) { int date = ZhiHuDailyApplication.getDataBase().tableLastNewsId(DBHelper.TABLE_NAME); visibleNewsDate = date; ArrayList<HashMap<String, Object>> list = ZhiHuDailyApplication.getDataBase() .newsOfTheDay(visibleNewsDate, DBHelper.TABLE_NAME); return list; } @Override protected void onPostExecute(ArrayList<HashMap<String, Object>> newsListRecovered) { if (newsListRecovered != null) { mRecyclerViewAdapter.updateNewsList(newsListRecovered); updateList(newsListRecovered); } } } }
ThinkandAction/ZhiHu
app/src/main/java/com/example/wujie/zhihu/Fragment/HomeFragment.java
Java
apache-2.0
10,207
package xdroid.app; import android.app.Application; import android.content.Context; import android.content.Intent; import xdroid.core.ActivityStarter; import xdroid.core.ContextOwner; import xdroid.core.Global; /** * @author Oleksii Kropachov (o.kropachov@shamanland.com) */ public class ApplicationX extends Application implements ActivityStarter, ContextOwner { @Override public Context getContext() { return this; } @Override public void startActivityForResult(Intent intent, int requestCode) { startActivity(intent); } @Override protected void attachBaseContext(Context base) { Global.setContext(this); super.attachBaseContext(base); } }
shamanland/xdroid
lib-app/src/main/java/xdroid/app/ApplicationX.java
Java
apache-2.0
717
package tasks.hasselt; import java.util.LinkedList; import java.util.List; import com.systemincloud.modeler.tasks.javatask.api.InputPort; import com.systemincloud.modeler.tasks.javatask.api.JavaTask; import com.systemincloud.modeler.tasks.javatask.api.OutputPort; import com.systemincloud.modeler.tasks.javatask.api.annotations.InputPortInfo; import com.systemincloud.modeler.tasks.javatask.api.annotations.JavaTaskInfo; import com.systemincloud.modeler.tasks.javatask.api.annotations.OutputPortInfo; import com.systemincloud.ext.vip.modeler.api.javatask.data.Image; import com.systemincloud.modeler.tasks.javatask.api.data.Control; import com.systemincloud.modeler.tasks.javatask.api.data.Bool; import com.systemincloud.modeler.tasks.javatask.api.data.Float32; import com.systemincloud.modeler.tasks.javatask.api.data.Int32; @JavaTaskInfo public class Defog extends JavaTask { @InputPortInfo(name = "In", dataType = Image.class) public InputPort in; @InputPortInfo(name = "A8", dataType = Int32.class) public InputPort a8; @InputPortInfo(name = "Mask", dataType = Bool.class) public InputPort m; @InputPortInfo(name = "c_i", dataType = Float32.class) public InputPort c_i; @InputPortInfo(name = "end", dataType = Control.class, asynchronous = true) public InputPort end; @OutputPortInfo(name = "Ack", dataType = Control.class) public OutputPort ack; @OutputPortInfo(name = "Out", dataType = Image.class) public OutputPort out; private List<Bool> masks = new LinkedList<>(); private List<Float> cs = new LinkedList<>(); private boolean initialized = false; private Image img; private int ar; private int ag; private int ab; @Override public void execute(int grp) { if(!initialized) init(); masks.add(m.getData(Bool.class)); cs .add(c_i.getData(Float32.class).getValue()); ack.putData(new Control()); } private void init() { this.img = in.getData(Image.class); int[] a = a8.getData(Int32.class).getValues(); this.ar = a[0]; this.ag = a[1]; this.ab = a[2]; this.initialized = true; } @Override public void executeAsync(InputPort asynchIn) { int[] inValues = img.getValues(); int[] r = new int[img.getNumberOfElements()]; int[] g = new int[img.getNumberOfElements()]; int[] b = new int[img.getNumberOfElements()]; boolean[] fog = masks.get(masks.size() - 1).getValues(); for(int i = 0; i < r.length; i++) { if(fog[i]) { int pixel = inValues[i]; r[i] = (pixel >> 16 & 0xff); g[i] = (pixel >> 8 & 0xff); b[i] = (pixel & 0xff); } } for(int i = masks.size() - 2; i >= 0; i--) { boolean[] fog1 = masks.get(i + 1).getValues(); boolean[] fog2 = masks.get(i).getValues(); float c = cs.get(i).floatValue(); for(int j = 0; j < r.length; j++) { if(fog2[j]) { int pixel = inValues[j]; int r_tmp = (int) (((pixel >> 16 & 0xff) - c*ar)*(1/(1-c))); int g_tmp = (int) (((pixel >> 8 & 0xff) - c*ag)*(1/(1-c))); int b_tmp = (int) (((pixel & 0xff) - c*ab)*(1/(1-c))); if(fog2[j] && !fog1[j]) { r[j] = r_tmp; g[j] = g_tmp; b[j] = b_tmp; } else if(fog2[j] && fog1[j]) { r[j] = 3*(r[j] >> 2) + (r_tmp >> 2); g[j] = 3*(g[j] >> 2) + (g_tmp >> 2); b[j] = 3*(b[j] >> 2) + (b_tmp >> 2); } if(r[j] < 0) r[j] = 0; if(g[j] < 0) g[j] = 0; if(b[j] < 0) b[j] = 0; if(r[j] > 255) r[j] = 255; if(g[j] > 255) g[j] = 255; if(b[j] > 255) b[j] = 255; } } } boolean[] noFog = masks.get(0).getValues(); for(int i = 0; i < r.length; i++) { int pixel = inValues[i]; if(!noFog[i]) { r[i] = (pixel >> 16 & 0xff); g[i] = (pixel >> 8 & 0xff); b[i] = (pixel & 0xff); } else { r[i] = 3*(r[i] >> 2) + ((pixel >> 16 & 0xff) >> 2); g[i] = 3*(g[i] >> 2) + ((pixel >> 8 & 0xff) >> 2); b[i] = 3*(b[i] >> 2) + ((pixel & 0xff) >> 2); if(r[i] < 0) r[i] = 0; if(g[i] < 0) g[i] = 0; if(b[i] < 0) b[i] = 0; if(r[i] > 255) r[i] = 255; if(g[i] > 255) g[i] = 255; if(b[i] > 255) b[i] = 255; } } int[] outValues = new int[r.length]; for(int i = 0; i < outValues.length; i++) outValues[i] = (r[i] << 16) | (g[i] << 8) | b[i]; out.putData(new Image(outValues, img.getH(), img.getW())); this.initialized = false; } }
systemincloud/sic-examples-ext-vip
com.systemincloud.ext.vip.examples.uc.defog/src/main/java/tasks/hasselt/Defog.java
Java
apache-2.0
4,339
package com.shunwang.apitools.util.wildcard; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.regex.Pattern; class RegexScanner { private final File rootDir; private final List<Pattern> includePatterns; private final List<String> matches = new ArrayList(128); public RegexScanner (File rootDir, List<String> includes, List<String> excludes) { if (rootDir == null) throw new IllegalArgumentException("rootDir cannot be null."); if (!rootDir.exists()) throw new IllegalArgumentException("Directory does not exist: " + rootDir); if (!rootDir.isDirectory()) throw new IllegalArgumentException("File must be a directory: " + rootDir); try { rootDir = rootDir.getCanonicalFile(); } catch (IOException ex) { throw new RuntimeException("OS error determining canonical path: " + rootDir, ex); } this.rootDir = rootDir; if (includes == null) throw new IllegalArgumentException("includes cannot be null."); if (excludes == null) throw new IllegalArgumentException("excludes cannot be null."); includePatterns = new ArrayList(); for (String include : includes) includePatterns.add(Pattern.compile(include, Pattern.CASE_INSENSITIVE)); List<Pattern> excludePatterns = new ArrayList(); for (String exclude : excludes) excludePatterns.add(Pattern.compile(exclude, Pattern.CASE_INSENSITIVE)); scanDir(rootDir); for (Iterator matchIter = matches.iterator(); matchIter.hasNext();) { String filePath = (String)matchIter.next(); for (Pattern exclude : excludePatterns) if (exclude.matcher(filePath).matches()) matchIter.remove(); } } private void scanDir (File dir) { for (File file : dir.listFiles()) { for (Pattern include : includePatterns) { int length = rootDir.getPath().length(); if (!rootDir.getPath().endsWith(File.separator)) length++; // Lose starting slash. String filePath = file.getPath().substring(length); if (include.matcher(filePath).matches()) { matches.add(filePath); break; } } if (file.isDirectory()) scanDir(file); } } public List<String> matches () { return matches; } public File rootDir () { return rootDir; } public static void main (String[] args) { // System.out.println(new Paths("C:\\Java\\ls", "**")); List<String> includes = new ArrayList(); includes.add("core[^T]+php"); // includes.add(".*/lavaserver/.*"); List<String> excludes = new ArrayList(); // excludes.add("website/**/doc**"); long start = System.nanoTime(); List<String> files = new RegexScanner(new File("..\\website\\includes"), includes, excludes).matches(); long end = System.nanoTime(); System.out.println(files.toString().replaceAll(", ", "\n").replaceAll("[\\[\\]]", "")); System.out.println((end - start) / 1000000f); } }
waitttttttttttttttttttttttting/apitools
apitools-web/src/main/java/com/shunwang/apitools/util/wildcard/RegexScanner.java
Java
apache-2.0
2,847
/*** Copyright (c) 2015 CommonsWare, LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.commonsware.cwac.cam2; import com.commonsware.cwac.cam2.util.Size; import java.util.ArrayList; /** * A representation of a camera. Specific camera engines will * have their own descriptors, which they can use for tracking * IDs or other information about the camera. From the standpoint * of the public API, a descriptor should be fairly opaque, supplying * information about the camera capabilities, but nothing more. */ public interface CameraDescriptor { /** * @return The possible preview sizes for the camera, in no * particular order */ ArrayList<Size> getPreviewSizes(); /** * @return The possible picture sizes for the camera, in no * particular order */ ArrayList<Size> getPictureSizes(); /** * Indicates if the camera (and this library) supports a * particular image format for pictures. * * @param format an ImageFormat value (e.g., ImageFormat.JPEG) * @return true if supported, false otherwise */ boolean isPictureFormatSupported(int format); }
rabl-dev/cwac-cam2
cam2/src/main/java/com/commonsware/cwac/cam2/CameraDescriptor.java
Java
apache-2.0
1,603
/* * * Copyright 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package springfox.test.contract.swagger; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import springfox.test.contract.swagger.models.ModelWithSameNameClasses; @Controller @RequestMapping("/same") public class SameController { @RequestMapping(value = "/create-same", method = RequestMethod.PUT) public void create(@RequestBody ModelWithSameNameClasses toCreate) { throw new UnsupportedOperationException(); } @RequestMapping(value = "/get-same/{id}", method = RequestMethod.GET) @ApiResponses(value = { @ApiResponse(code = 405, message = "Invalid input") }) public ModelWithSameNameClasses get(@PathVariable("id") String id) { throw new UnsupportedOperationException(); } }
springfox/springfox
swagger-contract-tests/src/main/java/springfox/test/contract/swagger/SameController.java
Java
apache-2.0
1,673
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.notebooks.v1; import com.google.api.core.BetaApi; import com.google.cloud.notebooks.v1.ManagedNotebookServiceGrpc.ManagedNotebookServiceImplBase; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import io.grpc.stub.StreamObserver; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; import javax.annotation.Generated; @BetaApi @Generated("by gapic-generator-java") public class MockManagedNotebookServiceImpl extends ManagedNotebookServiceImplBase { private List<AbstractMessage> requests; private Queue<Object> responses; public MockManagedNotebookServiceImpl() { requests = new ArrayList<>(); responses = new LinkedList<>(); } public List<AbstractMessage> getRequests() { return requests; } public void addResponse(AbstractMessage response) { responses.add(response); } public void setResponses(List<AbstractMessage> responses) { this.responses = new LinkedList<Object>(responses); } public void addException(Exception exception) { responses.add(exception); } public void reset() { requests = new ArrayList<>(); responses = new LinkedList<>(); } @Override public void listRuntimes( ListRuntimesRequest request, StreamObserver<ListRuntimesResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListRuntimesResponse) { requests.add(request); responseObserver.onNext(((ListRuntimesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListRuntimes, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListRuntimesResponse.class.getName(), Exception.class.getName()))); } } @Override public void getRuntime(GetRuntimeRequest request, StreamObserver<Runtime> responseObserver) { Object response = responses.poll(); if (response instanceof Runtime) { requests.add(request); responseObserver.onNext(((Runtime) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetRuntime, expected %s or %s", response == null ? "null" : response.getClass().getName(), Runtime.class.getName(), Exception.class.getName()))); } } @Override public void createRuntime( CreateRuntimeRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateRuntime, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteRuntime( DeleteRuntimeRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteRuntime, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void startRuntime( StartRuntimeRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method StartRuntime, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void stopRuntime(StopRuntimeRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method StopRuntime, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void switchRuntime( SwitchRuntimeRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method SwitchRuntime, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void resetRuntime( ResetRuntimeRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ResetRuntime, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void reportRuntimeEvent( ReportRuntimeEventRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ReportRuntimeEvent, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } }
googleapis/java-notebooks
google-cloud-notebooks/src/test/java/com/google/cloud/notebooks/v1/MockManagedNotebookServiceImpl.java
Java
apache-2.0
9,221
package m3.wikipedia.corpus.extractor; import com.thoughtworks.xstream.XStream; import java.beans.XMLDecoder; import java.beans.XMLEncoder; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.util.Calendar; import java.util.Date; import java.util.Enumeration; import java.util.Iterator; import java.util.Vector; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JFileChooser; import javax.swing.JFrame; import org.apache.hadoopts.hadoopts.topics.wikipedia.LocalWikipediaNetwork2; import m3.wikipedia.explorer.data.WikiNode; /** * * This class describes the Metadata of a single analysis * for Wikipedia time series. * * @author kamir */ public class WikiStudieMetaData { private Date dateTo; private Date dateFrom; public void setDateTo(Date dateTo) { this.dateTo = dateTo; } public void setDateFrom(Date dateFrom) { this.dateFrom = dateFrom; } private WikiStudieMetaData() { }; static WikiStudieMetaData currentS = null; public static WikiStudieMetaData initStudie() { WikiStudieMetaData s = new WikiStudieMetaData(); init( s ); return s; } private static void init(WikiStudieMetaData s) { if ( currentS == null ) { currentS = s; } else { // javax.swing.JOptionPane.showMessageDialog( new JFrame(), "Create new STUDY METADATA ... \n\n (later also in your HDGS repository!!!)" ); } } public String name = "?"; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { String ret = description + "\n" + getExtractionAttempts(); return ret; } public void setDescription(String description) { this.description = description; } /** * Returns the set of Core-WN, collected in this studie. * * @return */ public Vector<WikiNode> getWn() { return wn; } public void setWn(Vector<WikiNode> wn) { this.wn = wn; } public String description = "..."; public Vector<WikiNode> wn = new Vector<WikiNode>(); public void addNewNode( String w, String p ) { wn.add( new WikiNode( w,p ) ); } /** * */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append("NAME : " + this.name + "\n"); sb.append("PATH-1 : " + this.path1 + "\n" ); sb.append("PATH-2 : " + this.path2 + "\n" ); sb.append("Z-Nodes : " + this.wn.size() + "\n" ); return sb.toString(); } public static void store( File f, WikiStudieMetaData data ) throws FileNotFoundException, IOException { FileWriter os = new FileWriter( f ); XStream xstream = new XStream(); String s = xstream.toXML( data ); os.write( s ); os.flush(); os.close(); } public static WikiStudieMetaData load( File f ) throws FileNotFoundException { System.out.println(">>> LOAD XML-file ... " + f.getAbsolutePath()); FileInputStream os = new FileInputStream( f ); XStream xstream = new XStream(); Object o = xstream.fromXML(os); WikiStudieMetaData d = (WikiStudieMetaData)o; if ( d.extractions_attempts == null ) d.extractions_attempts = new Vector<String>(); System.out.println(">>> #nodes=" + d.getWn().size() ); init( d ); return d; } public LocalWikipediaNetwork2 net = null; public Vector<WikiNode> getAL() { return extract("A.L"); } public Vector<WikiNode> getAL(int nr) { Vector<WikiNode> nrBL = extract("A.L", nr); return nrBL; } public Vector<WikiNode> getBL(int nr) { Vector<WikiNode> nrBL = extract("B.L", nr); return nrBL; } public Vector<WikiNode> getBL() { return extract("B.L"); } public Vector<WikiNode> getIWL() { return extract("IWL"); } public Vector<WikiNode> getIWL(int nr) { Vector<WikiNode> nrBL = extract("IWL", nr); return nrBL; } public Vector<WikiNode> getCN() { return extract("CN"); } public Vector<WikiNode> getCN(int nr) { Vector<WikiNode> nrBL = extract("CN", nr); return nrBL; } File LISTFILE = null; public void initNetFromListFile(String path, String name) { if ( net != null ) return; net = new LocalWikipediaNetwork2(); File f = new File( path + "/" + "merged_listfile_" +name + ".lst" ); try { net.loadListFile2( f ); LISTFILE = f; } catch (FileNotFoundException ex) { System.exit(-1); Logger.getLogger(WikiStudieMetaData.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(WikiStudieMetaData.class.getName()).log(Level.SEVERE, null, ex); } } /** * * extract the key names for a particular CN.GROUP * * @param keyPart * @param nr * @return */ public Vector<WikiNode> extract(String keyPart, int CNid) { Vector<WikiNode> l = new Vector<WikiNode>(); Iterator<String> en = net.nodeListeHASHED.keySet().iterator(); while( en.hasNext() ) { String code = en.next(); // only if the right CN-group is here ... if( code.startsWith(CNid+"") ) { if( code.endsWith( keyPart ) ) { Vector<String[]> wns = net.nodeListeHASHED.get( code ); for( String[] s : wns ) { l.add( new WikiNode( s ) ); } } } } return l; } public Vector<WikiNode> extract(String keyPart) { Vector<WikiNode> l = new Vector<WikiNode>(); Iterator<String> en = net.nodeListeHASHED.keySet().iterator(); while( en.hasNext() ) { String code = en.next(); if( code.endsWith( keyPart ) ) { Vector<String[]> wns = net.nodeListeHASHED.get( code ); for( String[] s : wns ) { l.add( new WikiNode( s ) ); } } } return l; } public Vector<WikiNode> extractByCN(int CN) { String CNid = CN+""; Vector<WikiNode> l = new Vector<WikiNode>(); Iterator<String> en = net.nodeListeHASHED.keySet().iterator(); while( en.hasNext() ) { String code = en.next(); if( code.startsWith(CNid+"") ) { Vector<String[]> wns = net.nodeListeHASHED.get( code ); for( String[] s : wns ) { l.add( new WikiNode( s ) ); } } } System.out.println(">>> SUB-List extracted ... " ); return l; } public Vector<WikiNode> extract_ALL() { Vector<WikiNode> l = new Vector<WikiNode>(); Iterator<String> en = net.nodeListeHASHED.keySet().iterator(); while( en.hasNext() ) { String code = en.next(); Vector<String[]> wns = net.nodeListeHASHED.get( code ); for( String[] s : wns ) { l.add( new WikiNode( s ) ); } } return l; } Vector<String> extractions_attempts = new Vector<String>(); public void logExtraction(Calendar von, Calendar bis) { String el = "("+von.getTimeInMillis()+", ... ,"+ bis.getTimeInMillis() + " um: " + new Date( System.currentTimeMillis() ); extractions_attempts.add(el); } private String getExtractionAttempts() { String d = ""; for(String s : extractions_attempts ) { d = d.concat(s) + "\n"; } return d; } public int getNrOfNodes_ALL( String p, String n ) { initNetFromListFile(p, n); Vector<WikiNode> nn = extract_ALL(); return nn.size(); } public File selectFile( String path1 ) { File ff = new File( path1 ); javax.swing.JFileChooser jfc = new JFileChooser(ff); int sel = jfc.showOpenDialog( new JFrame() ); ff = jfc.getSelectedFile(); return ff; } boolean operate_LOCALY = true; boolean operate_DEV = false; public String selectedName = null; public String path2 = "/home/kamir/bin/WikiExplorer/WikiExplorer/"; public String path1 = "/Volumes/MyExternalDrive/CALCULATIONS/Wikipedia"; public File getRealPathToProjectFile(File ff ) { String na ="???"; if ( operate_DEV ) { path2 = "/home/kamir/bin/WikiExplorer/WikiExplorer/"; path1 = "/Volumes/MyExternalDrive/CALCULATIONS/Wikipedia"; } else { path2 = ff.getParent(); path1 = ff.getParent(); na = ff.getName().substring( 0, ff.getName().length() - 4 ); } File f = new File( path1 + "/" + na + ".xml"); initNetFromListFile(path2, na); selectedName = na; System.out.println(">>> path1 : " + path1 ); System.out.println(">>> path2 : " + path2 ); System.out.println(">>> name : " + selectedName ); name = selectedName; return f; } public String getTSExtractionLocationFileName() { String ext = "2008"; return "/user/kamir/wikipedia/corpus/" + this.name + "_" + ext +"_merged/part-r-00000"; // this.jtLF.setText("/home/kamir/bin/WikiExplorer/WikiExplorer/merged_listfile_" + this.jtf_Studie.getText() + ".lst"); } public String getLISTFILE() { return LISTFILE.getAbsolutePath(); } public void resetReloadBuffer() { wnRELOAD = new Vector<WikiNode>(); } public Vector<WikiNode> wnRELOAD = new Vector<WikiNode>(); public void addForReload(String w, String p) { wnRELOAD.add( new WikiNode( w, p ) ); } public Date getTimeRangeTo() { return dateTo; } public Date getTimeRangeFrom() { return dateFrom; } }
kamir/WikiExplorer.NG
src/main/java/m3/wikipedia/corpus/extractor/WikiStudieMetaData.java
Java
apache-2.0
10,580
public class WorkerProcess { public static void main(String[] args) { while(true) { try { Thread.sleep(1000); } catch(InterruptedException e) {} System.out.println("Worker process woke up"); } } }
dgomez10/xanon
src/main/java/WorkerProcess.java
Java
apache-2.0
286
/* * Copyright 2016 Ignacio del Valle Alles idelvall@brutusin.org. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.brutusin.rpc.exception; /** * * @author Ignacio del Valle Alles idelvall@brutusin.org */ public class InvalidRequestException extends RuntimeException{ public InvalidRequestException() { } public InvalidRequestException(String message) { super(message); } }
brutusin/Brutusin-RPC
rpc-impl/src/main/java/org/brutusin/rpc/exception/InvalidRequestException.java
Java
apache-2.0
931
/** * vertigo - simple java starter * * Copyright (C) 2013-2018, KleeGroup, direction.technique@kleegroup.com (http://www.kleegroup.com) * KleeGroup, Centre d'affaire la Boursidiere - BP 159 - 92357 Le Plessis Robinson Cedex - France * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.vertigo.quarto.impl.services.publisher.merger.processor; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; import io.vertigo.lang.Assertion; /** * Classe d'utilitaires pour les fichiers de type Zip. * * @author pforhan */ public final class ZipUtil { /** * Taille du Buffer. */ private static final int BUFFER_SIZE = 8 * 1024; /** * Constructeur privé pour classe utilitaire */ private ZipUtil() { // } /** * Lecture d'un fichier du fichier ODT. * * @param odtFile ZipFile Fichier source * @param entryName Nom de l'entrée à extraire * @return String le contenu du fichier sous forme de chaine encodée avec ENCODER * @throws IOException Si une exception d'entrée-sortie de fichier a lieu */ public static String readEntry(final ZipFile odtFile, final String entryName) throws IOException { Assertion.checkNotNull(odtFile); Assertion.checkArgNotEmpty(entryName); final ZipEntry zipEntry = odtFile.getEntry(entryName); Assertion.checkNotNull(zipEntry, "Le modèle {0} ne contient pas {1}, vérifier que le modèle est un document valide et du bon type.", odtFile.getName(), entryName); //----- final StringBuilder resultat = new StringBuilder(); try (final InputStreamReader reader = new InputStreamReader(odtFile.getInputStream(zipEntry), StandardCharsets.UTF_8)) { final char[] buffer = new char[BUFFER_SIZE]; int len; while ((len = reader.read(buffer, 0, BUFFER_SIZE)) > 0) { resultat.append(buffer, 0, len); } } return resultat.toString(); } /** * Ecriture d'une entry dans le fichier Zip à partir de son contenu et de son nom sous formes de chaine. * . * @param outputZipFile ZipOutputStream Fichier à modifier * @param entryContent Contenu de l'entry à insérer * @param entryName Nom de l'entry * @throws IOException Si une exception d'entrée sortie a lieu */ public static void writeEntry(final ZipOutputStream outputZipFile, final String entryContent, final String entryName) throws IOException { final ZipEntry content = new ZipEntry(entryName); outputZipFile.putNextEntry(content); final OutputStreamWriter writer = new OutputStreamWriter(outputZipFile, StandardCharsets.UTF_8); writer.write(entryContent, 0, entryContent.length()); writer.flush(); } /** * Ecriture d'une entry dans le fichier Zip à partir de son contenu et de son nom sous formes de chaine. * . * @param outputZipFile ZipOutputStream Fichier à modifier * @param entryContent Flux de l'entry à insérer * @param entryName Nom de l'entry * @throws IOException Si une exception d'entrée sortie a lieu */ public static void writeEntry(final ZipOutputStream outputZipFile, final InputStream entryContent, final String entryName) throws IOException { writeEntry(outputZipFile, entryContent, new ZipEntry(entryName)); } /** * Ecriture d'une entry dans le fichier Zip à partir de son contenu et de son nom sous formes de chaine. * . * @param outputOdtFile ZipOutputStream Fichier à modifier * @param entryContent Flux de l'entry à insérer * @param zipEntry ZipEntry * @throws IOException Si une exception d'entrée sortie a lieu */ public static void writeEntry(final ZipOutputStream outputOdtFile, final InputStream entryContent, final ZipEntry zipEntry) throws IOException { outputOdtFile.putNextEntry(zipEntry); final int bufferSize = 10 * 1024; final byte[] bytes = new byte[bufferSize]; int read; while ((read = entryContent.read(bytes)) > 0) { outputOdtFile.write(bytes, 0, read); } outputOdtFile.flush(); } }
KleeGroup/vertigo-addons
vertigo-quarto/src/main/java/io/vertigo/quarto/impl/services/publisher/merger/processor/ZipUtil.java
Java
apache-2.0
4,586
package com.cc.listview.base.listener; import com.cc.listview.base.cell.TXBaseListCell; /** * Created by Cheng on 16/7/26. */ public interface TXOnCreateCellListener<T> { TXBaseListCell<T> onCreateCell(int type); }
ChengCheng-Hello/PullToRefreshListView
base/src/main/java/com/cc/listview/base/listener/TXOnCreateCellListener.java
Java
apache-2.0
224
public class AnimalAnfibio implements CapacidadeAndar, CapacidadeNadar { private AnimalTerreste animalTerrestre; private AnimalMaritimo animalMaritmo; public AnimalAnfibio() { this.animalTerrestre = new AnimalTerreste(); this.animalMaritmo = new AnimalMaritimo(); } public void andar() { animalTerrestre.andar(); } public void nadar() { animalMaritmo.nadar(); } }
paulorosa/slides-poo
fontes-aulas/TADS-POO-1-2017/Aula13/src/AnimalAnfibio.java
Java
apache-2.0
404
/* * Copyright (c) 2014,2015,2016 Ahome' Innovation Technologies. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ait.tooling.server.hazelcast.support.spring; import java.io.IOException; import java.util.Objects; import com.hazelcast.core.Hazelcast; import com.hazelcast.core.HazelcastInstance; public final class HazelcastInstanceProvider implements IHazelcastInstanceProvider { private final HazelcastInstance m_instance; public HazelcastInstanceProvider(final HazelcastInstance instance) { m_instance = Objects.requireNonNull(instance); } @Override public HazelcastInstance getHazelcastInstance() { return m_instance; } @Override public void close() throws IOException { Hazelcast.shutdownAll(); } }
ahome-it/ahome-tooling-server-hazelcast
src/main/groovy/com/ait/tooling/server/hazelcast/support/spring/HazelcastInstanceProvider.java
Java
apache-2.0
1,330
package by.gdgminsk.filepermissionsdemo.util; import android.content.Context; import android.content.SharedPreferences; import android.support.annotation.IdRes; import by.gdgminsk.filepermissionsdemo.R; public class Prefs { private static final String PREF_STORAGE_SWITCH_STATE = "storage_switch_state"; private static final String PREF_MIGRATION_SWITCH_STATE = "migration_switch_state"; private static final String PREF_IMAGE_LOADED = "image_loaded"; private SharedPreferences mPrefs; private static Prefs sInstance; private Prefs(SharedPreferences prefs) { mPrefs = prefs; } public static Prefs get(Context context) { if (sInstance == null) { sInstance = new Prefs(context.getSharedPreferences("app_settings", Context.MODE_PRIVATE)); } return sInstance; } public void setImageLoaded(boolean imageLoaded) { mPrefs.edit().putBoolean(PREF_IMAGE_LOADED, imageLoaded).apply(); } public boolean isImageLoaded() { return mPrefs.getBoolean(PREF_IMAGE_LOADED, false); } public void setMigrationMode(@IdRes int migrationMode) { mPrefs.edit().putInt(PREF_MIGRATION_SWITCH_STATE, migrationMode).apply(); } @IdRes public int getMigrationMode() { return mPrefs.getInt(PREF_MIGRATION_SWITCH_STATE, R.id.migration_switch_java_move); } public void setStorageMode(@IdRes int storageMode) { mPrefs.edit().putInt(PREF_STORAGE_SWITCH_STATE, storageMode).apply(); } @IdRes public int getStorageMode() { return mPrefs.getInt(PREF_STORAGE_SWITCH_STATE, R.id.storage_switch_common); } public void reset() { mPrefs.edit() .remove(PREF_IMAGE_LOADED) .remove(PREF_MIGRATION_SWITCH_STATE) .remove(PREF_STORAGE_SWITCH_STATE) .apply(); } }
anton-novikau/file-permissions-demo
app/src/main/java/by/gdgminsk/filepermissionsdemo/util/Prefs.java
Java
apache-2.0
1,891
package ems.idls.alarms; /** * ems/idls/alarms/AlarmCategory.java . * Generated by the IDL-to-Java compiler (portable), version "3.2" * from alarms.idl * Tuesday, January 15, 2015 5:02:23 PM CST */ /** * Alarm Category Type */ public class AlarmCategory implements org.omg.CORBA.portable.IDLEntity { private int __value; private static int __size = 5; private static ems.idls.alarms.AlarmCategory[] __array = new ems.idls.alarms.AlarmCategory [__size]; public static final int _COMMUNICATIONS_ALARM = 0; public static final ems.idls.alarms.AlarmCategory COMMUNICATIONS_ALARM = new ems.idls.alarms.AlarmCategory(_COMMUNICATIONS_ALARM); public static final int _QUALITY_OF_SERVICE_ALARM = 1; public static final ems.idls.alarms.AlarmCategory QUALITY_OF_SERVICE_ALARM = new ems.idls.alarms.AlarmCategory(_QUALITY_OF_SERVICE_ALARM); public static final int _PROCESSING_ERROR_ALARM = 2; public static final ems.idls.alarms.AlarmCategory PROCESSING_ERROR_ALARM = new ems.idls.alarms.AlarmCategory(_PROCESSING_ERROR_ALARM); public static final int _EQUIPMENT_ALARM = 3; public static final ems.idls.alarms.AlarmCategory EQUIPMENT_ALARM = new ems.idls.alarms.AlarmCategory(_EQUIPMENT_ALARM); public static final int _ENVIRONMENTAL_ALARM = 4; public static final ems.idls.alarms.AlarmCategory ENVIRONMENTAL_ALARM = new ems.idls.alarms.AlarmCategory(_ENVIRONMENTAL_ALARM); public int value () { return __value; } public static ems.idls.alarms.AlarmCategory from_int (int value) { if (value >= 0 && value < __size) return __array[value]; else throw new org.omg.CORBA.BAD_PARAM (); } protected AlarmCategory (int value) { __value = value; __array[__value] = this; } } // class AlarmCategory
shorton3/dashingplatforms
src/ems/idls/alarms/AlarmCategory.java
Java
apache-2.0
1,778
package fr.javatronic.blog.processor; /** * Annotation1 - * * @author Sébastien Lesaint */ @java.lang.annotation.Target({java.lang.annotation.ElementType.TYPE}) @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE) @java.lang.annotation.Documented public @interface Annotation_003 { }
lesaint/experimenting-annotation-processing
experimenting-rounds/processor/src/main/java/fr/javatronic/blog/processor/Annotation_003.java
Java
apache-2.0
314
package fr.javatronic.blog.massive.annotation1; import fr.javatronic.blog.processor.Annotation_001; @Annotation_001 public class Class_738 { }
lesaint/experimenting-annotation-processing
experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/Class_738.java
Java
apache-2.0
145
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.s3control.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Location details for where the generated manifest should be written. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/s3control-2018-08-20/S3ManifestOutputLocation" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class S3ManifestOutputLocation implements Serializable, Cloneable { /** * <p> * The Account ID that owns the bucket the generated manifest is written to. * </p> */ private String expectedManifestBucketOwner; /** * <p> * The bucket ARN the generated manifest should be written to. * </p> */ private String bucket; /** * <p> * Prefix identifying one or more objects to which the manifest applies. * </p> */ private String manifestPrefix; /** * <p> * Specifies what encryption should be used when the generated manifest objects are written. * </p> */ private GeneratedManifestEncryption manifestEncryption; /** * <p> * The format of the generated manifest. * </p> */ private String manifestFormat; /** * <p> * The Account ID that owns the bucket the generated manifest is written to. * </p> * * @param expectedManifestBucketOwner * The Account ID that owns the bucket the generated manifest is written to. */ public void setExpectedManifestBucketOwner(String expectedManifestBucketOwner) { this.expectedManifestBucketOwner = expectedManifestBucketOwner; } /** * <p> * The Account ID that owns the bucket the generated manifest is written to. * </p> * * @return The Account ID that owns the bucket the generated manifest is written to. */ public String getExpectedManifestBucketOwner() { return this.expectedManifestBucketOwner; } /** * <p> * The Account ID that owns the bucket the generated manifest is written to. * </p> * * @param expectedManifestBucketOwner * The Account ID that owns the bucket the generated manifest is written to. * @return Returns a reference to this object so that method calls can be chained together. */ public S3ManifestOutputLocation withExpectedManifestBucketOwner(String expectedManifestBucketOwner) { setExpectedManifestBucketOwner(expectedManifestBucketOwner); return this; } /** * <p> * The bucket ARN the generated manifest should be written to. * </p> * * @param bucket * The bucket ARN the generated manifest should be written to. */ public void setBucket(String bucket) { this.bucket = bucket; } /** * <p> * The bucket ARN the generated manifest should be written to. * </p> * * @return The bucket ARN the generated manifest should be written to. */ public String getBucket() { return this.bucket; } /** * <p> * The bucket ARN the generated manifest should be written to. * </p> * * @param bucket * The bucket ARN the generated manifest should be written to. * @return Returns a reference to this object so that method calls can be chained together. */ public S3ManifestOutputLocation withBucket(String bucket) { setBucket(bucket); return this; } /** * <p> * Prefix identifying one or more objects to which the manifest applies. * </p> * * @param manifestPrefix * Prefix identifying one or more objects to which the manifest applies. */ public void setManifestPrefix(String manifestPrefix) { this.manifestPrefix = manifestPrefix; } /** * <p> * Prefix identifying one or more objects to which the manifest applies. * </p> * * @return Prefix identifying one or more objects to which the manifest applies. */ public String getManifestPrefix() { return this.manifestPrefix; } /** * <p> * Prefix identifying one or more objects to which the manifest applies. * </p> * * @param manifestPrefix * Prefix identifying one or more objects to which the manifest applies. * @return Returns a reference to this object so that method calls can be chained together. */ public S3ManifestOutputLocation withManifestPrefix(String manifestPrefix) { setManifestPrefix(manifestPrefix); return this; } /** * <p> * Specifies what encryption should be used when the generated manifest objects are written. * </p> * * @param manifestEncryption * Specifies what encryption should be used when the generated manifest objects are written. */ public void setManifestEncryption(GeneratedManifestEncryption manifestEncryption) { this.manifestEncryption = manifestEncryption; } /** * <p> * Specifies what encryption should be used when the generated manifest objects are written. * </p> * * @return Specifies what encryption should be used when the generated manifest objects are written. */ public GeneratedManifestEncryption getManifestEncryption() { return this.manifestEncryption; } /** * <p> * Specifies what encryption should be used when the generated manifest objects are written. * </p> * * @param manifestEncryption * Specifies what encryption should be used when the generated manifest objects are written. * @return Returns a reference to this object so that method calls can be chained together. */ public S3ManifestOutputLocation withManifestEncryption(GeneratedManifestEncryption manifestEncryption) { setManifestEncryption(manifestEncryption); return this; } /** * <p> * The format of the generated manifest. * </p> * * @param manifestFormat * The format of the generated manifest. * @see GeneratedManifestFormat */ public void setManifestFormat(String manifestFormat) { this.manifestFormat = manifestFormat; } /** * <p> * The format of the generated manifest. * </p> * * @return The format of the generated manifest. * @see GeneratedManifestFormat */ public String getManifestFormat() { return this.manifestFormat; } /** * <p> * The format of the generated manifest. * </p> * * @param manifestFormat * The format of the generated manifest. * @return Returns a reference to this object so that method calls can be chained together. * @see GeneratedManifestFormat */ public S3ManifestOutputLocation withManifestFormat(String manifestFormat) { setManifestFormat(manifestFormat); return this; } /** * <p> * The format of the generated manifest. * </p> * * @param manifestFormat * The format of the generated manifest. * @return Returns a reference to this object so that method calls can be chained together. * @see GeneratedManifestFormat */ public S3ManifestOutputLocation withManifestFormat(GeneratedManifestFormat manifestFormat) { this.manifestFormat = manifestFormat.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getExpectedManifestBucketOwner() != null) sb.append("ExpectedManifestBucketOwner: ").append(getExpectedManifestBucketOwner()).append(","); if (getBucket() != null) sb.append("Bucket: ").append(getBucket()).append(","); if (getManifestPrefix() != null) sb.append("ManifestPrefix: ").append(getManifestPrefix()).append(","); if (getManifestEncryption() != null) sb.append("ManifestEncryption: ").append(getManifestEncryption()).append(","); if (getManifestFormat() != null) sb.append("ManifestFormat: ").append(getManifestFormat()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof S3ManifestOutputLocation == false) return false; S3ManifestOutputLocation other = (S3ManifestOutputLocation) obj; if (other.getExpectedManifestBucketOwner() == null ^ this.getExpectedManifestBucketOwner() == null) return false; if (other.getExpectedManifestBucketOwner() != null && other.getExpectedManifestBucketOwner().equals(this.getExpectedManifestBucketOwner()) == false) return false; if (other.getBucket() == null ^ this.getBucket() == null) return false; if (other.getBucket() != null && other.getBucket().equals(this.getBucket()) == false) return false; if (other.getManifestPrefix() == null ^ this.getManifestPrefix() == null) return false; if (other.getManifestPrefix() != null && other.getManifestPrefix().equals(this.getManifestPrefix()) == false) return false; if (other.getManifestEncryption() == null ^ this.getManifestEncryption() == null) return false; if (other.getManifestEncryption() != null && other.getManifestEncryption().equals(this.getManifestEncryption()) == false) return false; if (other.getManifestFormat() == null ^ this.getManifestFormat() == null) return false; if (other.getManifestFormat() != null && other.getManifestFormat().equals(this.getManifestFormat()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getExpectedManifestBucketOwner() == null) ? 0 : getExpectedManifestBucketOwner().hashCode()); hashCode = prime * hashCode + ((getBucket() == null) ? 0 : getBucket().hashCode()); hashCode = prime * hashCode + ((getManifestPrefix() == null) ? 0 : getManifestPrefix().hashCode()); hashCode = prime * hashCode + ((getManifestEncryption() == null) ? 0 : getManifestEncryption().hashCode()); hashCode = prime * hashCode + ((getManifestFormat() == null) ? 0 : getManifestFormat().hashCode()); return hashCode; } @Override public S3ManifestOutputLocation clone() { try { return (S3ManifestOutputLocation) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
aws/aws-sdk-java
aws-java-sdk-s3control/src/main/java/com/amazonaws/services/s3control/model/S3ManifestOutputLocation.java
Java
apache-2.0
11,942
package org.swtk.commons.dict.wordnet.indexbyname.instance.d.i.j; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import org.swtk.common.dict.dto.wordnet.IndexNoun; import com.trimc.blogger.commons.utils.GsonUtils; public final class WordnetNounIndexNameInstanceDIJ { private static Map<String, Collection<IndexNoun>> map = new TreeMap<String, Collection<IndexNoun>>(); static { add("{\"term\":\"dijon\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08955621\"]}"); } private static void add(final String JSON) { IndexNoun indexNoun = GsonUtils.toObject(JSON, IndexNoun.class); Collection<IndexNoun> list = (map.containsKey(indexNoun.getTerm())) ? map.get(indexNoun.getTerm()) : new ArrayList<IndexNoun>(); list.add(indexNoun); map.put(indexNoun.getTerm(), list); } public static Collection<IndexNoun> get(final String TERM) { return map.get(TERM); } public static boolean has(final String TERM) { return map.containsKey(TERM); } public static Collection<String> terms() { return map.keySet(); } }
torrances/swtk-commons
commons-dict-wordnet-indexbyname/src/main/java/org/swtk/commons/dict/wordnet/indexbyname/instance/d/i/j/WordnetNounIndexNameInstanceDIJ.java
Java
apache-2.0
1,104
package com.reigndesign.app.network.models; import java.util.List; public class NewsEnvelope { private List<New> hits; private String query; private String params; public List<New> getHits() { return hits; } public void setHits(List<New> hits) { this.hits = hits; } public String getQuery() { return query; } public void setQuery(String query) { this.query = query; } public String getParams() { return params; } public void setParams(String params) { this.params = params; } }
jdla1990/hackersnews
app/src/main/java/com/reigndesign/app/network/models/NewsEnvelope.java
Java
apache-2.0
594
// Copyright (c) 1999-2010 Brian Wellington (bwelling@xbill.org) package org.xbill.DNS; import java.io.IOException; import java.math.BigInteger; import java.security.GeneralSecurityException; import java.security.KeyFactory; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.Signature; import java.security.interfaces.DSAPrivateKey; import java.security.interfaces.DSAPublicKey; import java.security.interfaces.RSAPrivateKey; import java.security.interfaces.RSAPublicKey; import java.security.spec.DSAPublicKeySpec; import java.security.spec.RSAPublicKeySpec; import java.util.Arrays; import java.util.Date; import java.util.Iterator; /** * Constants and methods relating to DNSSEC. * * DNSSEC provides authentication for DNS information. * @see RRSIGRecord * @see DNSKEYRecord * @see RRset * * @author Brian Wellington */ public class DNSSEC { public static class Algorithm { private Algorithm() {} /** RSA/MD5 public key (deprecated) */ public static final int RSAMD5 = 1; /** Diffie Hellman key */ public static final int DH = 2; /** DSA public key */ public static final int DSA = 3; /** Elliptic Curve key */ public static final int ECC = 4; /** RSA/SHA1 public key */ public static final int RSASHA1 = 5; /** DSA/SHA1, NSEC3-aware public key */ public static final int DSA_NSEC3_SHA1 = 6; /** RSA/SHA1, NSEC3-aware public key */ public static final int RSA_NSEC3_SHA1 = 7; /** RSA/SHA256 public key */ public static final int RSASHA256 = 8; /** RSA/SHA512 public key */ public static final int RSASHA512 = 10; /** Indirect keys; the actual key is elsewhere. */ public static final int INDIRECT = 252; /** Private algorithm, specified by domain name */ public static final int PRIVATEDNS = 253; /** Private algorithm, specified by OID */ public static final int PRIVATEOID = 254; private static Mnemonic algs = new Mnemonic("DNSSEC algorithm", Mnemonic.CASE_UPPER); static { algs.setMaximum(0xFF); algs.setNumericAllowed(true); algs.add(RSAMD5, "RSAMD5"); algs.add(DH, "DH"); algs.add(DSA, "DSA"); algs.add(ECC, "ECC"); algs.add(RSASHA1, "RSASHA1"); algs.add(DSA_NSEC3_SHA1, "DSA-NSEC3-SHA1"); algs.add(RSA_NSEC3_SHA1, "RSA-NSEC3-SHA1"); algs.add(RSASHA256, "RSASHA256"); algs.add(RSASHA512, "RSASHA512"); algs.add(INDIRECT, "INDIRECT"); algs.add(PRIVATEDNS, "PRIVATEDNS"); algs.add(PRIVATEOID, "PRIVATEOID"); } /** * Converts an algorithm into its textual representation */ public static String string(int alg) { return algs.getText(alg); } /** * Converts a textual representation of an algorithm into its numeric * code. Integers in the range 0..255 are also accepted. * @param s The textual representation of the algorithm * @return The algorithm code, or -1 on error. */ public static int value(String s) { return algs.getValue(s); } } private DNSSEC() { } private static void digestSIG(DNSOutput out, SIGBase sig) { out.writeU16(sig.getTypeCovered()); out.writeU8(sig.getAlgorithm()); out.writeU8(sig.getLabels()); out.writeU32(sig.getOrigTTL()); out.writeU32(sig.getExpire().getTime() / 1000); out.writeU32(sig.getTimeSigned().getTime() / 1000); out.writeU16(sig.getFootprint()); sig.getSigner().toWireCanonical(out); } /** * Creates a byte array containing the concatenation of the fields of the * SIG record and the RRsets to be signed/verified. This does not perform * a cryptographic digest. * @param rrsig The RRSIG record used to sign/verify the rrset. * @param rrset The data to be signed/verified. * @return The data to be cryptographically signed or verified. */ public static byte [] digestRRset(RRSIGRecord rrsig, RRset rrset) { DNSOutput out = new DNSOutput(); digestSIG(out, rrsig); int size = rrset.size(); Record [] records = new Record[size]; Iterator it = rrset.rrs(); Name name = rrset.getName(); Name wild = null; int sigLabels = rrsig.getLabels() + 1; // Add the root label back. if (name.labels() > sigLabels) wild = name.wild(name.labels() - sigLabels); while (it.hasNext()) records[--size] = (Record) it.next(); Arrays.sort(records); DNSOutput header = new DNSOutput(); if (wild != null) wild.toWireCanonical(header); else name.toWireCanonical(header); header.writeU16(rrset.getType()); header.writeU16(rrset.getDClass()); header.writeU32(rrsig.getOrigTTL()); for (int i = 0; i < records.length; i++) { out.writeByteArray(header.toByteArray()); int lengthPosition = out.current(); out.writeU16(0); out.writeByteArray(records[i].rdataToWireCanonical()); int rrlength = out.current() - lengthPosition - 2; out.save(); out.jump(lengthPosition); out.writeU16(rrlength); out.restore(); } return out.toByteArray(); } /** * Creates a byte array containing the concatenation of the fields of the * SIG(0) record and the message to be signed. This does not perform * a cryptographic digest. * @param sig The SIG record used to sign the rrset. * @param msg The message to be signed. * @param previous If this is a response, the signature from the query. * @return The data to be cryptographically signed. */ public static byte [] digestMessage(SIGRecord sig, Message msg, byte [] previous) { DNSOutput out = new DNSOutput(); digestSIG(out, sig); if (previous != null) out.writeByteArray(previous); msg.toWire(out); return out.toByteArray(); } /** * A DNSSEC exception. */ public static class DNSSECException extends Exception { DNSSECException(String s) { super(s); } } /** * An algorithm is unsupported by this DNSSEC implementation. */ public static class UnsupportedAlgorithmException extends DNSSECException { UnsupportedAlgorithmException(int alg) { super("Unsupported algorithm: " + alg); } } /** * The cryptographic data in a DNSSEC key is malformed. */ public static class MalformedKeyException extends DNSSECException { MalformedKeyException(KEYBase rec) { super("Invalid key data: " + rec.rdataToString()); } } /** * A DNSSEC verification failed because fields in the DNSKEY and RRSIG records * do not match. */ public static class KeyMismatchException extends DNSSECException { private KEYBase key; private SIGBase sig; KeyMismatchException(KEYBase key, SIGBase sig) { super("key " + key.getName() + "/" + DNSSEC.Algorithm.string(key.getAlgorithm()) + "/" + key.getFootprint() + " " + "does not match signature " + sig.getSigner() + "/" + DNSSEC.Algorithm.string(sig.getAlgorithm()) + "/" + sig.getFootprint()); } } /** * A DNSSEC verification failed because the signature has expired. */ public static class SignatureExpiredException extends DNSSECException { private Date when, now; SignatureExpiredException(Date when, Date now) { super("signature expired"); this.when = when; this.now = now; } /** * @return When the signature expired */ public Date getExpiration() { return when; } /** * @return When the verification was attempted */ public Date getVerifyTime() { return now; } } /** * A DNSSEC verification failed because the signature has not yet become valid. */ public static class SignatureNotYetValidException extends DNSSECException { private Date when, now; SignatureNotYetValidException(Date when, Date now) { super("signature is not yet valid"); this.when = when; this.now = now; } /** * @return When the signature will become valid */ public Date getExpiration() { return when; } /** * @return When the verification was attempted */ public Date getVerifyTime() { return now; } } /** * A DNSSEC verification failed because the cryptographic signature * verification failed. */ public static class SignatureVerificationException extends DNSSECException { SignatureVerificationException() { super("signature verification failed"); } } /** * The key data provided is inconsistent. */ public static class IncompatibleKeyException extends IllegalArgumentException { IncompatibleKeyException() { super("incompatible keys"); } } private static int BigIntegerLength(BigInteger i) { return (i.bitLength() + 7) / 8; } private static BigInteger readBigInteger(DNSInput in, int len) throws IOException { byte [] b = in.readByteArray(len); return new BigInteger(1, b); } private static BigInteger readBigInteger(DNSInput in) { byte [] b = in.readByteArray(); return new BigInteger(1, b); } private static void writeBigInteger(DNSOutput out, BigInteger val) { byte [] b = val.toByteArray(); if (b[0] == 0) out.writeByteArray(b, 1, b.length - 1); else out.writeByteArray(b); } private static PublicKey toRSAPublicKey(KEYBase r) throws IOException, GeneralSecurityException { DNSInput in = new DNSInput(r.getKey()); int exponentLength = in.readU8(); if (exponentLength == 0) exponentLength = in.readU16(); BigInteger exponent = readBigInteger(in, exponentLength); BigInteger modulus = readBigInteger(in); KeyFactory factory = KeyFactory.getInstance("RSA"); return factory.generatePublic(new RSAPublicKeySpec(modulus, exponent)); } private static PublicKey toDSAPublicKey(KEYBase r) throws IOException, GeneralSecurityException, MalformedKeyException { DNSInput in = new DNSInput(r.getKey()); int t = in.readU8(); if (t > 8) throw new MalformedKeyException(r); BigInteger q = readBigInteger(in, 20); BigInteger p = readBigInteger(in, 64 + t*8); BigInteger g = readBigInteger(in, 64 + t*8); BigInteger y = readBigInteger(in, 64 + t*8); KeyFactory factory = KeyFactory.getInstance("DSA"); return factory.generatePublic(new DSAPublicKeySpec(y, p, q, g)); } /** Converts a KEY/DNSKEY record into a PublicKey */ static PublicKey toPublicKey(KEYBase r) throws DNSSECException { int alg = r.getAlgorithm(); try { switch (alg) { case Algorithm.RSAMD5: case Algorithm.RSASHA1: case Algorithm.RSA_NSEC3_SHA1: case Algorithm.RSASHA256: case Algorithm.RSASHA512: return toRSAPublicKey(r); case Algorithm.DSA: case Algorithm.DSA_NSEC3_SHA1: return toDSAPublicKey(r); default: throw new UnsupportedAlgorithmException(alg); } } catch (IOException e) { throw new MalformedKeyException(r); } catch (GeneralSecurityException e) { throw new DNSSECException(e.toString()); } } private static byte [] fromRSAPublicKey(RSAPublicKey key) { DNSOutput out = new DNSOutput(); BigInteger exponent = key.getPublicExponent(); BigInteger modulus = key.getModulus(); int exponentLength = BigIntegerLength(exponent); if (exponentLength < 256) out.writeU8(exponentLength); else { out.writeU8(0); out.writeU16(exponentLength); } writeBigInteger(out, exponent); writeBigInteger(out, modulus); return out.toByteArray(); } private static byte [] fromDSAPublicKey(DSAPublicKey key) { DNSOutput out = new DNSOutput(); BigInteger q = key.getParams().getQ(); BigInteger p = key.getParams().getP(); BigInteger g = key.getParams().getG(); BigInteger y = key.getY(); int t = (p.toByteArray().length - 64) / 8; out.writeU8(t); writeBigInteger(out, q); writeBigInteger(out, p); writeBigInteger(out, g); writeBigInteger(out, y); return out.toByteArray(); } /** Builds a DNSKEY record from a PublicKey */ static byte [] fromPublicKey(PublicKey key, int alg) throws DNSSECException { byte [] data = null; switch (alg) { case Algorithm.RSAMD5: case Algorithm.RSASHA1: case Algorithm.RSA_NSEC3_SHA1: case Algorithm.RSASHA256: case Algorithm.RSASHA512: if (! (key instanceof RSAPublicKey)) throw new IncompatibleKeyException(); return fromRSAPublicKey((RSAPublicKey) key); case Algorithm.DSA: case Algorithm.DSA_NSEC3_SHA1: if (! (key instanceof DSAPublicKey)) throw new IncompatibleKeyException(); return fromDSAPublicKey((DSAPublicKey) key); default: throw new UnsupportedAlgorithmException(alg); } } private static String algString(int alg) throws UnsupportedAlgorithmException { switch (alg) { case Algorithm.RSAMD5: return "MD5withRSA"; case Algorithm.DSA: case Algorithm.DSA_NSEC3_SHA1: return "SHA1withDSA"; case Algorithm.RSASHA1: case Algorithm.RSA_NSEC3_SHA1: return "SHA1withRSA"; case Algorithm.RSASHA256: return "SHA256withRSA"; case Algorithm.RSASHA512: return "SHA512withRSA"; default: throw new UnsupportedAlgorithmException(alg); } } private static final int ASN1_SEQ = 0x30; private static final int ASN1_INT = 0x2; private static final int DSA_LEN = 20; private static byte [] DSASignaturefromDNS(byte [] dns) throws DNSSECException, IOException { if (dns.length != 1 + DSA_LEN * 2) throw new SignatureVerificationException(); DNSInput in = new DNSInput(dns); DNSOutput out = new DNSOutput(); int t = in.readU8(); byte [] r = in.readByteArray(DSA_LEN); int rlen = DSA_LEN; if (r[0] < 0) rlen++; byte [] s = in.readByteArray(DSA_LEN); int slen = DSA_LEN; if (s[0] < 0) slen++; out.writeU8(ASN1_SEQ); out.writeU8(rlen + slen + 4); out.writeU8(ASN1_INT); out.writeU8(rlen); if (rlen > DSA_LEN) out.writeU8(0); out.writeByteArray(r); out.writeU8(ASN1_INT); out.writeU8(slen); if (slen > DSA_LEN) out.writeU8(0); out.writeByteArray(s); return out.toByteArray(); } private static byte [] DSASignaturetoDNS(byte [] key, int t) throws IOException { DNSInput in = new DNSInput(key); DNSOutput out = new DNSOutput(); out.writeU8(t); int tmp = in.readU8(); if (tmp != ASN1_SEQ) throw new IOException(); int seqlen = in.readU8(); tmp = in.readU8(); if (tmp != ASN1_INT) throw new IOException(); int rlen = in.readU8(); if (rlen == DSA_LEN + 1) { if (in.readU8() != 0) throw new IOException(); } else if (rlen != DSA_LEN) throw new IOException(); byte [] bytes = in.readByteArray(DSA_LEN); out.writeByteArray(bytes); tmp = in.readU8(); if (tmp != ASN1_INT) throw new IOException(); int slen = in.readU8(); if (slen == DSA_LEN + 1) { if (in.readU8() != 0) throw new IOException(); } else if (slen != DSA_LEN) throw new IOException(); bytes = in.readByteArray(DSA_LEN); out.writeByteArray(bytes); return out.toByteArray(); } private static void verify(PublicKey key, int alg, byte [] data, byte [] signature) throws DNSSECException { if (key instanceof DSAPublicKey) { try { signature = DSASignaturefromDNS(signature); } catch (IOException e) { throw new IllegalStateException(); } } try { Signature s = Signature.getInstance(algString(alg)); s.initVerify(key); s.update(data); if (!s.verify(signature)) throw new SignatureVerificationException(); } catch (GeneralSecurityException e) { throw new DNSSECException(e.toString()); } } private static boolean matches(SIGBase sig, KEYBase key) { return (key.getAlgorithm() == sig.getAlgorithm() && key.getFootprint() == sig.getFootprint() && key.getName().equals(sig.getSigner())); } /** * Verify a DNSSEC signature. * @param rrset The data to be verified. * @param rrsig The RRSIG record containing the signature. * @param key The DNSKEY record to verify the signature with. * @throws UnsupportedAlgorithmException The algorithm is unknown * @throws MalformedKeyException The key is malformed * @throws KeyMismatchException The key and signature do not match * @throws SignatureExpiredException The signature has expired * @throws SignatureNotYetValidException The signature is not yet valid * @throws SignatureVerificationException The signature does not verify. * @throws DNSSECException Some other error occurred. */ public static void verify(RRset rrset, RRSIGRecord rrsig, DNSKEYRecord key) throws DNSSECException { if (!matches(rrsig, key)) throw new KeyMismatchException(key, rrsig); Date now = new Date(); if (now.compareTo(rrsig.getExpire()) > 0) throw new SignatureExpiredException(rrsig.getExpire(), now); if (now.compareTo(rrsig.getTimeSigned()) < 0) throw new SignatureNotYetValidException(rrsig.getTimeSigned(), now); verify(key.getPublicKey(), rrsig.getAlgorithm(), digestRRset(rrsig, rrset), rrsig.getSignature()); } private static byte [] sign(PrivateKey privkey, PublicKey pubkey, int alg, byte [] data) throws DNSSECException { byte [] signature; try { Signature s = Signature.getInstance(algString(alg)); s.initSign(privkey); s.update(data); signature = s.sign(); } catch (GeneralSecurityException e) { throw new DNSSECException(e.toString()); } if (pubkey instanceof DSAPublicKey) { try { DSAPublicKey dsa = (DSAPublicKey) pubkey; BigInteger P = dsa.getParams().getP(); int t = (BigIntegerLength(P) - 64) / 8; signature = DSASignaturetoDNS(signature, t); } catch (IOException e) { throw new IllegalStateException(); } } return signature; } static void checkAlgorithm(PrivateKey key, int alg) throws UnsupportedAlgorithmException { switch (alg) { case Algorithm.RSAMD5: case Algorithm.RSASHA1: case Algorithm.RSA_NSEC3_SHA1: case Algorithm.RSASHA256: case Algorithm.RSASHA512: if (! (key instanceof RSAPrivateKey)) throw new IncompatibleKeyException(); break; case Algorithm.DSA: case Algorithm.DSA_NSEC3_SHA1: if (! (key instanceof DSAPrivateKey)) throw new IncompatibleKeyException(); break; default: throw new UnsupportedAlgorithmException(alg); } } /** * Generate a DNSSEC signature. key and privateKey must refer to the * same underlying cryptographic key. * @param rrset The data to be signed * @param key The DNSKEY record to use as part of signing * @param privkey The PrivateKey to use when signing * @param inception The time at which the signatures should become valid * @param expiration The time at which the signatures should expire * @throws UnsupportedAlgorithmException The algorithm is unknown * @throws MalformedKeyException The key is malformed * @throws DNSSECException Some other error occurred. * @return The generated signature */ public static RRSIGRecord sign(RRset rrset, DNSKEYRecord key, PrivateKey privkey, Date inception, Date expiration) throws DNSSECException { int alg = key.getAlgorithm(); checkAlgorithm(privkey, alg); RRSIGRecord rrsig = new RRSIGRecord(rrset.getName(), rrset.getDClass(), rrset.getTTL(), rrset.getType(), alg, rrset.getTTL(), expiration, inception, key.getFootprint(), key.getName(), null); rrsig.setSignature(sign(privkey, key.getPublicKey(), alg, digestRRset(rrsig, rrset))); return rrsig; } static SIGRecord signMessage(Message message, SIGRecord previous, KEYRecord key, PrivateKey privkey, Date inception, Date expiration) throws DNSSECException { int alg = key.getAlgorithm(); checkAlgorithm(privkey, alg); SIGRecord sig = new SIGRecord(Name.root, DClass.ANY, 0, 0, alg, 0, expiration, inception, key.getFootprint(), key.getName(), null); DNSOutput out = new DNSOutput(); digestSIG(out, sig); if (previous != null) out.writeByteArray(previous.getSignature()); message.toWire(out); sig.setSignature(sign(privkey, key.getPublicKey(), alg, out.toByteArray())); return sig; } static void verifyMessage(Message message, byte [] bytes, SIGRecord sig, SIGRecord previous, KEYRecord key) throws DNSSECException { if (!matches(sig, key)) throw new KeyMismatchException(key, sig); Date now = new Date(); if (now.compareTo(sig.getExpire()) > 0) throw new SignatureExpiredException(sig.getExpire(), now); if (now.compareTo(sig.getTimeSigned()) < 0) throw new SignatureNotYetValidException(sig.getTimeSigned(), now); DNSOutput out = new DNSOutput(); digestSIG(out, sig); if (previous != null) out.writeByteArray(previous.getSignature()); Header header = (Header) message.getHeader().clone(); header.decCount(Section.ADDITIONAL); out.writeByteArray(header.toWire()); out.writeByteArray(bytes, Header.LENGTH, message.sig0start - Header.LENGTH); verify(key.getPublicKey(), sig.getAlgorithm(), out.toByteArray(), sig.getSignature()); } static byte [] generateDS(DNSKEYRecord key, int digestid) { DNSOutput out = new DNSOutput(); out.writeU16(key.getFootprint()); out.writeU8(key.getAlgorithm()); out.writeU8(digestid); MessageDigest digest; try { switch (digestid) { case DSRecord.Digest.SHA1: digest = MessageDigest.getInstance("sha-1"); break; case DSRecord.Digest.SHA256: digest = MessageDigest.getInstance("sha-256"); break; default: throw new IllegalArgumentException( "unknown DS digest type " + digestid); } } catch (NoSuchAlgorithmException e) { throw new IllegalStateException("no message digest support"); } digest.update(key.getName().toWire()); digest.update(key.rdataToWireCanonical()); out.writeByteArray(digest.digest()); return out.toByteArray(); } }
samuelhehe/androidpn_enhanced_client
asmack/org/xbill/DNS/DNSSEC.java
Java
apache-2.0
20,852
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.sql.impl.plan.node; import com.hazelcast.sql.impl.SqlDataSerializerHook; import com.hazelcast.sql.impl.SqlTestSupport; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class RootPlanNodeTest extends SqlTestSupport { @Test public void testState() { int id = 1; MockPlanNode upstream = MockPlanNode.create(2); RootPlanNode node = new RootPlanNode(id, upstream); assertEquals(id, node.getId()); assertSame(upstream, node.getUpstream()); assertEquals(upstream.getSchema(), node.getSchema()); } @Test public void testEquality() { MockPlanNode upstream1 = MockPlanNode.create(1); MockPlanNode upstream2 = MockPlanNode.create(2); int id1 = 3; int id2 = 4; checkEquals(new RootPlanNode(id1, upstream1), new RootPlanNode(id1, upstream1), true); checkEquals(new RootPlanNode(id1, upstream1), new RootPlanNode(id2, upstream1), false); checkEquals(new RootPlanNode(id1, upstream1), new RootPlanNode(id1, upstream2), false); } @Test public void testSerialization() { RootPlanNode original = new RootPlanNode(1, MockPlanNode.create(2)); RootPlanNode restored = serializeAndCheck(original, SqlDataSerializerHook.NODE_ROOT); checkEquals(original, restored, true); } }
emre-aydin/hazelcast
hazelcast/src/test/java/com/hazelcast/sql/impl/plan/node/RootPlanNodeTest.java
Java
apache-2.0
2,383
package org.datacommons.util; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import java.io.IOException; import java.net.URI; import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.util.*; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.util.Strings; import org.datacommons.proto.Mcf; // This class checks the existence of typically schema-related, nodes or (select types of) // triples in the KG or local graph. // // Users of this class submit checks for node (submitNodeCheck) or triple (submitTripleCheck) // along with a logging callback (LogCb). The implementation batches calls to DC, and on // completion invokes the callback to notify on existence failures. At the very end, users // need to issue a final drain call (drainRemoteCalls). // This class is thread-safe. public class ExistenceChecker { private static final Logger logger = LogManager.getLogger(ExistenceChecker.class); // Use the autopush end-point so we get more recent schema additions that // haven't rolled out. private static final String API_ROOT = "https://autopush.api.datacommons.org/node/property-values"; // For now we only need checks for certain Property/Class props. private static final Set<String> SCHEMA_PROPERTIES = Set.of(Vocabulary.DOMAIN_INCLUDES, Vocabulary.RANGE_INCLUDES, Vocabulary.SUB_CLASS_OF); // Batching thresholds. Allow tests to set this. public static int DC_CALL_BATCH_LIMIT = 1000; public static int MAX_PENDING_CALLS = 100000; // Useful for mocking. private final HttpClient httpClient; // Logging stuff. private final boolean verbose; private final LogWrapper logCtx; // This is a combination of local KG data and prior cached checks. // Node is just the DCID. Triple is "s,p,o" and the property just includes SCHEMA_PROPERTIES. private final Set<String> existingNodesOrTriples; // Existence cache private final Set<String> missingNodesOrTriples; // Absence cache // To amortize DC call latency we batch calls up to DC_CALL_BATCH_LIMIT. The batching happens // per (triple) predicate. // // Batch map: predicate -> subject -> object -> list of pending call-contexts // // We batch based on the number of subjects in a predicate. To avoid worst case memory // usage, if all checks are for the same node, we have a global limit of max pending calls. private final Map<String, Map<String, Map<String, List<LogCb>>>> remoteBatchMap; private int totalPendingCallCount = 0; public ExistenceChecker(HttpClient httpClient, boolean verbose, LogWrapper logCtx) { this.httpClient = httpClient; this.logCtx = logCtx; this.verbose = verbose; existingNodesOrTriples = new HashSet<>(); missingNodesOrTriples = new HashSet<>(); remoteBatchMap = new HashMap<>(); } public synchronized void submitNodeCheck(String node, LogCb logCb) throws IOException, InterruptedException { logCtx.incrementInfoCounterBy("Existence_NumChecks", 1); if (checkLocal(node, Vocabulary.TYPE_OF, "", logCb)) { return; } batchRemoteCall(node, Vocabulary.TYPE_OF, "", logCb); } public synchronized void submitTripleCheck(String sub, String pred, String obj, LogCb logCb) throws IOException, InterruptedException { if (pred.equals(Vocabulary.DOMAIN_INCLUDES) && (sub.contains("/") || sub.equals("count"))) { // Don't bother with domain checks for schema-less properties. // Measured property 'count' is an aggregate that is not a property of an instance, but // of a set. return; } logCtx.incrementInfoCounterBy("Existence_NumChecks", 1); if (checkLocal(sub, pred, obj, logCb)) { return; } batchRemoteCall(sub, pred, obj, logCb); } public synchronized void addLocalGraph(Mcf.McfGraph graph) { for (Map.Entry<String, Mcf.McfGraph.PropertyValues> node : graph.getNodesMap().entrySet()) { // Skip doing anything with StatVarObs. String typeOf = McfUtil.getPropVal(node.getValue(), Vocabulary.TYPE_OF); if (typeOf.equals(Vocabulary.STAT_VAR_OBSERVATION_TYPE) || typeOf.equals(Vocabulary.LEGACY_OBSERVATION_TYPE_SUFFIX)) { continue; } String dcid = McfUtil.getPropVal(node.getValue(), Vocabulary.DCID); if (dcid.isEmpty()) { continue; } existingNodesOrTriples.add(dcid); if (missingNodesOrTriples.contains(dcid)) { missingNodesOrTriples.remove(dcid); } if (!typeOf.equals(Vocabulary.CLASS_TYPE) && !typeOf.equals(Vocabulary.PROPERTY_TYPE)) { continue; } for (Map.Entry<String, Mcf.McfGraph.Values> pv : node.getValue().getPvsMap().entrySet()) { if (SCHEMA_PROPERTIES.contains(pv.getKey())) { for (Mcf.McfGraph.TypedValue tv : pv.getValue().getTypedValuesList()) { var key = makeKey(dcid, pv.getKey(), tv.getValue()); existingNodesOrTriples.add(key); if (missingNodesOrTriples.contains(key)) { missingNodesOrTriples.remove(key); } } } } } } public synchronized void drainRemoteCalls() throws IOException, InterruptedException { // To avoid mutating map while iterating, get the keys first. List<String> preds = new ArrayList<>(remoteBatchMap.keySet()); for (var pred : preds) { if (verbose) { logger.info("Draining " + remoteBatchMap.get(pred).size() + " dcids for predicate " + pred); } drainRemoteCallsForPredicate(pred, remoteBatchMap.get(pred)); remoteBatchMap.remove(pred); } } private void batchRemoteCall(String sub, String pred, String obj, LogCb logCb) throws IOException, InterruptedException { Map<String, Map<String, List<LogCb>>> subMap = null; if (remoteBatchMap.containsKey(pred)) { subMap = remoteBatchMap.get(pred); } else { subMap = new HashMap<>(); } Map<String, List<LogCb>> objMap = null; if (subMap.containsKey(sub)) { objMap = subMap.get(sub); } else { objMap = new HashMap<>(); } List<LogCb> calls = null; if (objMap.containsKey(obj)) { calls = objMap.get(obj); } else { calls = new ArrayList<>(); } // Add pending call. calls.add(logCb); objMap.put(obj, calls); subMap.put(sub, objMap); totalPendingCallCount++; remoteBatchMap.put(pred, subMap); // Maybe drain the batch. if (totalPendingCallCount >= MAX_PENDING_CALLS) { if (verbose) logger.info("Draining remote calls due to MAX_PENDING_CALLS"); drainRemoteCalls(); } else if (subMap.size() >= DC_CALL_BATCH_LIMIT) { if (verbose) { logger.info( "Draining due to batching limit with " + subMap.size() + " dcids for " + "predicate " + pred); } drainRemoteCallsForPredicate(pred, subMap); remoteBatchMap.remove(pred); } } private void drainRemoteCallsForPredicate( String pred, Map<String, Map<String, List<LogCb>>> subMap) throws IOException, InterruptedException { performDcCall(pred, new ArrayList<>(subMap.keySet()), subMap); } private void performDcCall( String pred, List<String> subs, Map<String, Map<String, List<LogCb>>> subMap) throws IOException, InterruptedException { logCtx.incrementInfoCounterBy("Existence_NumDcCalls", 1); var dataJson = callDc(subs, pred); if (dataJson == null) { if (verbose) { logger.info("DC call failed for - " + Strings.join(subs, ',') + ", " + pred); } // Important: If the dcid is malformed, Mixer can return failure. Also, if the URI is too // long, then too this happens. So issue independent RPCs now. If this happens often enough, // we can revisit. logger.warn("DC Call failed (bad DCID or URI length). Issuing individual calls now."); for (String sub : subs) { performDcCall(pred, List.of(sub), subMap); } return; } if (dataJson.entrySet().size() != subs.size()) { // Should not really happen, so throw exception throw new IOException( "Invalid results payload from Staging DC API endpoint for: '" + Strings.join(subs, ',') + "'," + " '" + pred + "': " + dataJson); } for (var entry : dataJson.entrySet()) { var sub = entry.getKey(); var nodeJson = entry.getValue().getAsJsonObject(); var objMap = subMap.get(sub); for (var kv : objMap.entrySet()) { var obj = kv.getKey(); var cbs = kv.getValue(); var key = makeKey(sub, pred, obj); if (checkOneResult(obj, nodeJson)) { existingNodesOrTriples.add(key); } else { if (verbose) { logger.info("Missing " + (obj.isEmpty() ? "node" : "triple") + " in DC " + key); } missingNodesOrTriples.add(key); // Log the missing details. for (var cb : cbs) { logEntry(cb, obj); } } totalPendingCallCount -= cbs.size(); } subMap.remove(sub); } } private boolean checkOneResult(String obj, JsonObject nodeJson) { if (nodeJson.has("out")) { if (obj.isEmpty()) { // Node existence case. if (nodeJson.getAsJsonArray("out").size() > 0) { return true; } } else { // Triple existence case. for (var objVal : nodeJson.getAsJsonArray("out")) { if (objVal.getAsJsonObject().getAsJsonPrimitive("dcid").getAsString().equals(obj)) { return true; } } } } return false; } // Returns true if we were able to complete the check locally. private boolean checkLocal(String sub, String pred, String obj, LogCb logCb) { String key = makeKey(sub, pred, obj); if (existingNodesOrTriples.contains(key)) { return true; } if (missingNodesOrTriples.contains(key)) { logEntry(logCb, obj); return true; } return false; } private JsonObject callDc(List<String> nodes, String property) throws IOException, InterruptedException { List<String> args = new ArrayList<>(); JsonObject arg = new JsonObject(); JsonArray dcids = new JsonArray(); for (var node : nodes) { dcids.add(node); } arg.add("dcids", dcids); arg.addProperty("property", property); arg.addProperty("direction", "out"); var request = HttpRequest.newBuilder(URI.create(API_ROOT)) .version(HttpClient.Version.HTTP_1_1) .header("accept", "application/json") .POST(HttpRequest.BodyPublishers.ofString(arg.toString())) .build(); var response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); var payloadJson = new JsonParser().parse(response.body().trim()).getAsJsonObject(); if (payloadJson == null || !payloadJson.has("payload")) return null; return new JsonParser().parse(payloadJson.get("payload").getAsString()).getAsJsonObject(); } private static void logEntry(LogCb logCb, String obj) { String message, counter; if (obj.isEmpty()) { counter = "Existence_MissingReference"; message = "Failed reference existence check"; } else { counter = "Existence_MissingTriple"; message = "Failed triple existence check"; } logCb.logError(counter, message); } private static String makeKey(String s, String p, String o) { if (o.isEmpty()) { return s; } return s + "," + p + "," + o; } }
datacommonsorg/import
util/src/main/java/org/datacommons/util/ExistenceChecker.java
Java
apache-2.0
11,782
package jstamp.intruder; public class List_Node { Object dataPtr; List_Node nextPtr; public List_Node() { } }
DeuceSTM/DeuceSTM
src/test/jstamp/intruder/List_Node.java
Java
apache-2.0
116
/** * Copyright (C) 2015 Bruno Candido Volpato da Cunha (brunocvcunha@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.brunocvcunha.digesteroids.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Repeatable; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.brunocvcunha.digesteroids.ReferenceTypeEnum; import org.brunocvcunha.digesteroids.rule.DigesterRule; /** * Annotation used to map field with another table * @author Bruno Candido Volpato da Cunha * */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) @Repeatable(value = DigesterMappings.class) public @interface DigesterMapping { /** * @return source */ String source() default ""; /** * @return refType */ ReferenceTypeEnum refType() default ReferenceTypeEnum.NORMAL; /** * @return value */ String value(); /** * @return rule */ Class<? extends DigesterRule> rule() default DigesterRule.class; /** * @return htmlText */ boolean htmlText() default true; /** * @return textNode */ int textNode() default -1; /** * @return trim */ boolean trim() default true; /** * @return attribute */ String attribute() default ""; /** * @return mandatory */ boolean mandatory() default false; }
brunocvcunha/digesteroids
src/main/java/org/brunocvcunha/digesteroids/annotation/DigesterMapping.java
Java
apache-2.0
2,020
package de.fiverx.handling; import org.apache.commons.io.IOUtils; import org.junit.Assert; import org.junit.Test; import java.io.IOException; import java.util.List; /** * project: fiverxlinksecurity-service * author: Pascal Knueppel * created at: 10.03.2016 * */ public class ResourceHolderTest { @Test public void testGetSchemaVersions() throws IOException { List<String> schemaversions = ResourceHolder.getSchemaVersions(); System.out.println(IOUtils.toString(getClass().getResourceAsStream ("/de/fiverx/sv0100/schemas/RZeRezept_02_00_33.xsd"))); Assert.assertEquals(2, schemaversions.size()); } }
Captain-P-Goldfish/fiverx-api-java
src/test/java/de/fiverx/handling/ResourceHolderTest.java
Java
apache-2.0
662
package cn.aezo.bigdata.hdoop_project_analyse.transformer.mr.nu; import java.io.IOException; import java.sql.Connection; import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import cn.aezo.bigdata.hdoop_project_analyse.common.DateEnum; import cn.aezo.bigdata.hdoop_project_analyse.common.EventLogConstants; import cn.aezo.bigdata.hdoop_project_analyse.common.GlobalConstants; import cn.aezo.bigdata.hdoop_project_analyse.transformer.model.dim.StatsUserDimension; import cn.aezo.bigdata.hdoop_project_analyse.transformer.model.dim.base.DateDimension; import cn.aezo.bigdata.hdoop_project_analyse.transformer.model.value.map.TimeOutputValue; import cn.aezo.bigdata.hdoop_project_analyse.transformer.model.value.reduce.MapWritableValue; import cn.aezo.bigdata.hdoop_project_analyse.transformer.mr.TransformerBaseRunner; import cn.aezo.bigdata.hdoop_project_analyse.util.JdbcManager; import cn.aezo.bigdata.hdoop_project_analyse.util.TimeUtil; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Job; import org.apache.log4j.Logger; /** * 计算新增用户入口类 */ public class NewInstallUserRunner extends TransformerBaseRunner { private static final Logger logger = Logger.getLogger(NewInstallUserRunner.class); /** * 入口main方法 * * @param args */ public static void main(String[] args) { NewInstallUserRunner runner = new NewInstallUserRunner(); runner.setupRunner("new_install_user", NewInstallUserRunner.class, NewInstallUserMapper.class, NewInstallUserReducer.class, StatsUserDimension.class, TimeOutputValue.class, StatsUserDimension.class, MapWritableValue.class); try { runner.startRunner(args); } catch (Exception e) { logger.error("运行计算新用户的job出现异常", e); throw new RuntimeException(e); } } @Override protected Filter fetchHbaseFilter() { FilterList filterList = new FilterList(); // 过滤数据,只分析launch事件 filterList.addFilter(new SingleColumnValueFilter(Bytes.toBytes(EventLogConstants.EVENT_LOGS_FAMILY_NAME), Bytes.toBytes(EventLogConstants.LOG_COLUMN_NAME_EVENT_NAME), CompareOp.EQUAL, Bytes.toBytes(EventLogConstants.EventEnum.LAUNCH.alias))); // 定义mapper中需要获取的列名 String[] columns = new String[] { EventLogConstants.LOG_COLUMN_NAME_EVENT_NAME, EventLogConstants.LOG_COLUMN_NAME_UUID, EventLogConstants.LOG_COLUMN_NAME_SERVER_TIME, EventLogConstants.LOG_COLUMN_NAME_PLATFORM, EventLogConstants.LOG_COLUMN_NAME_BROWSER_NAME, EventLogConstants.LOG_COLUMN_NAME_BROWSER_VERSION }; filterList.addFilter(this.getColumnFilter(columns)); return filterList; } @Override protected void afterRunJob(Job job, Throwable error) throws IOException { try { if (error == null && job.isSuccessful()) { // job运行没有异常,而且运行成功,那么进行计算total user的代码 this.calculateTotalUsers(job.getConfiguration()); } else if (error == null) { // job运行没有产生异常,但是运行失败 throw new RuntimeException("job 运行失败"); } } catch (Throwable e) { if (error != null) { error = e; } throw new IOException("调用afterRunJob产生异常", e); } finally { super.afterRunJob(job, error); } } /** * 计算总用户 * * @param conf */ private void calculateTotalUsers(Configuration conf) { Connection conn = null; PreparedStatement pstmt = null; ResultSet rs = null; try { long date = TimeUtil.parseString2Long(conf.get(GlobalConstants.RUNNING_DATE_PARAMES)); // 获取今天的date dimension DateDimension todayDimension = DateDimension.buildDate(date, DateEnum.DAY); // 获取昨天的date dimension DateDimension yesterdayDimension = DateDimension.buildDate(date - GlobalConstants.DAY_OF_MILLISECONDS, DateEnum.DAY); int yesterdayDimensionId = -1; int todayDimensionId = -1; // 1. 获取时间id conn = JdbcManager.getConnection(conf, GlobalConstants.WAREHOUSE_OF_REPORT); // 获取执行时间的昨天的 pstmt = conn.prepareStatement("SELECT `id` FROM `dimension_date` WHERE `year` = ? AND `season` = ? AND `month` = ? AND `week` = ? AND `day` = ? AND `type` = ? AND `calendar` = ?"); int i = 0; pstmt.setInt(++i, yesterdayDimension.getYear()); pstmt.setInt(++i, yesterdayDimension.getSeason()); pstmt.setInt(++i, yesterdayDimension.getMonth()); pstmt.setInt(++i, yesterdayDimension.getWeek()); pstmt.setInt(++i, yesterdayDimension.getDay()); pstmt.setString(++i, yesterdayDimension.getType()); pstmt.setDate(++i, new Date(yesterdayDimension.getCalendar().getTime())); rs = pstmt.executeQuery(); if (rs.next()) { yesterdayDimensionId = rs.getInt(1); } // 获取执行时间当天的id pstmt = conn.prepareStatement("SELECT `id` FROM `dimension_date` WHERE `year` = ? AND `season` = ? AND `month` = ? AND `week` = ? AND `day` = ? AND `type` = ? AND `calendar` = ?"); i = 0; pstmt.setInt(++i, todayDimension.getYear()); pstmt.setInt(++i, todayDimension.getSeason()); pstmt.setInt(++i, todayDimension.getMonth()); pstmt.setInt(++i, todayDimension.getWeek()); pstmt.setInt(++i, todayDimension.getDay()); pstmt.setString(++i, todayDimension.getType()); pstmt.setDate(++i, new Date(todayDimension.getCalendar().getTime())); rs = pstmt.executeQuery(); if (rs.next()) { todayDimensionId = rs.getInt(1); } // 2.获取昨天的原始数据,存储格式为:platformid = totalusers Map<String, Integer> oldValueMap = new HashMap<String, Integer>(); // 开始更新stats_user if (yesterdayDimensionId > -1) { pstmt = conn.prepareStatement("select `platform_dimension_id`,`total_install_users` from `stats_user` where `date_dimension_id`=?"); pstmt.setInt(1, yesterdayDimensionId); rs = pstmt.executeQuery(); while (rs.next()) { int platformId = rs.getInt("platform_dimension_id"); int totalUsers = rs.getInt("total_install_users"); oldValueMap.put("" + platformId, totalUsers); } } // 添加今天的总用户 pstmt = conn.prepareStatement("select `platform_dimension_id`,`new_install_users` from `stats_user` where `date_dimension_id`=?"); pstmt.setInt(1, todayDimensionId); rs = pstmt.executeQuery(); while (rs.next()) { int platformId = rs.getInt("platform_dimension_id"); int newUsers = rs.getInt("new_install_users"); if (oldValueMap.containsKey("" + platformId)) { newUsers += oldValueMap.get("" + platformId); } oldValueMap.put("" + platformId, newUsers); } // 更新操作 pstmt = conn.prepareStatement("INSERT INTO `stats_user`(`platform_dimension_id`,`date_dimension_id`,`total_install_users`) VALUES(?, ?, ?) ON DUPLICATE KEY UPDATE `total_install_users` = ?"); for (Map.Entry<String, Integer> entry : oldValueMap.entrySet()) { pstmt.setInt(1, Integer.valueOf(entry.getKey())); pstmt.setInt(2, todayDimensionId); pstmt.setInt(3, entry.getValue()); pstmt.setInt(4, entry.getValue()); pstmt.execute(); } // 开始更新stats_device_browser oldValueMap.clear(); if (yesterdayDimensionId > -1) { pstmt = conn.prepareStatement("select `platform_dimension_id`,`browser_dimension_id`,`total_install_users` from `stats_device_browser` where `date_dimension_id`=?"); pstmt.setInt(1, yesterdayDimensionId); rs = pstmt.executeQuery(); while (rs.next()) { int platformId = rs.getInt("platform_dimension_id"); int browserId = rs.getInt("browser_dimension_id"); int totalUsers = rs.getInt("total_install_users"); oldValueMap.put(platformId + "_" + browserId, totalUsers); } } // 添加今天的总用户 pstmt = conn.prepareStatement("select `platform_dimension_id`,`browser_dimension_id`,`new_install_users` from `stats_device_browser` where `date_dimension_id`=?"); pstmt.setInt(1, todayDimensionId); rs = pstmt.executeQuery(); while (rs.next()) { int platformId = rs.getInt("platform_dimension_id"); int browserId = rs.getInt("browser_dimension_id"); int newUsers = rs.getInt("new_install_users"); String key = platformId + "_" + browserId; if (oldValueMap.containsKey(key)) { newUsers += oldValueMap.get(key); } oldValueMap.put(key, newUsers); } // 更新操作 pstmt = conn.prepareStatement("INSERT INTO `stats_device_browser`(`platform_dimension_id`,`browser_dimension_id`,`date_dimension_id`,`total_install_users`) VALUES(?, ?, ?, ?) ON DUPLICATE KEY UPDATE `total_install_users` = ?"); for (Map.Entry<String, Integer> entry : oldValueMap.entrySet()) { String[] key = entry.getKey().split("_"); pstmt.setInt(1, Integer.valueOf(key[0])); pstmt.setInt(2, Integer.valueOf(key[1])); pstmt.setInt(3, todayDimensionId); pstmt.setInt(4, entry.getValue()); pstmt.setInt(5, entry.getValue()); pstmt.execute(); } } catch (SQLException e) { e.printStackTrace(); } } }
oldinaction/smjava
bigdata-hadoop-project/hadoop_project_analyse/src/main/java/cn/aezo/bigdata/hdoop_project_analyse/transformer/mr/nu/NewInstallUserRunner.java
Java
apache-2.0
10,813
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import groovy.lang.Closure; import groovy.lang.MissingPropertyException; import groovy.util.ObservableList; import org.codehaus.groovy.runtime.InvokerInvocationException; import org.gradle.api.Action; import org.gradle.api.AntBuilder; import org.gradle.api.Describable; import org.gradle.api.InvalidUserDataException; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.internal.file.FileCollectionFactory; import org.gradle.api.internal.file.temp.TemporaryFileProvider; import org.gradle.api.internal.project.ProjectInternal; import org.gradle.api.internal.project.taskfactory.TaskIdentity; import org.gradle.api.internal.tasks.DefaultTaskDependency; import org.gradle.api.internal.tasks.DefaultTaskDestroyables; import org.gradle.api.internal.tasks.DefaultTaskInputs; import org.gradle.api.internal.tasks.DefaultTaskLocalState; import org.gradle.api.internal.tasks.DefaultTaskOutputs; import org.gradle.api.internal.tasks.InputChangesAwareTaskAction; import org.gradle.api.internal.tasks.TaskContainerInternal; import org.gradle.api.internal.tasks.TaskDependencyInternal; import org.gradle.api.internal.tasks.TaskLocalStateInternal; import org.gradle.api.internal.tasks.TaskMutator; import org.gradle.api.internal.tasks.TaskStateInternal; import org.gradle.api.internal.tasks.execution.TaskExecutionAccessListener; import org.gradle.api.internal.tasks.properties.PropertyWalker; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; import org.gradle.api.plugins.Convention; import org.gradle.api.plugins.ExtensionContainer; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; import org.gradle.api.services.BuildService; import org.gradle.api.services.internal.BuildServiceRegistryInternal; import org.gradle.api.specs.AndSpec; import org.gradle.api.specs.Spec; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.TaskDependency; import org.gradle.api.tasks.TaskDestroyables; import org.gradle.api.tasks.TaskInstantiationException; import org.gradle.api.tasks.TaskLocalState; import org.gradle.configuration.internal.UserCodeApplicationContext; import org.gradle.internal.Cast; import org.gradle.internal.Factory; import org.gradle.internal.event.ListenerManager; import org.gradle.internal.execution.history.changes.InputChangesInternal; import org.gradle.internal.extensibility.ExtensibleDynamicObject; import org.gradle.internal.hash.ClassLoaderHierarchyHasher; import org.gradle.internal.instantiation.InstanceGenerator; import org.gradle.internal.logging.LoggingManagerInternal; import org.gradle.internal.logging.StandardOutputCapture; import org.gradle.internal.logging.slf4j.ContextAwareTaskLogger; import org.gradle.internal.logging.slf4j.DefaultContextAwareTaskLogger; import org.gradle.internal.metaobject.DynamicObject; import org.gradle.internal.resources.ResourceLock; import org.gradle.internal.resources.SharedResource; import org.gradle.internal.scripts.ScriptOrigin; import org.gradle.internal.serialization.Cached; import org.gradle.internal.service.ServiceRegistry; import org.gradle.internal.snapshot.impl.ImplementationSnapshot; import org.gradle.util.Path; import org.gradle.util.internal.ConfigureUtil; import org.gradle.util.internal.GFileUtils; import org.gradle.work.DisableCachingByDefault; import javax.annotation.Nullable; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.File; import java.time.Duration; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; import static org.gradle.api.internal.lambdas.SerializableLambdas.factory; import static org.gradle.util.internal.GUtil.uncheckedCall; /** * @deprecated This class will be removed in Gradle 8.0. Please use {@link org.gradle.api.DefaultTask} instead. */ @Deprecated @DisableCachingByDefault(because = "Abstract super-class, not to be instantiated directly") public abstract class AbstractTask implements TaskInternal, DynamicObjectAware { private static final Logger BUILD_LOGGER = Logging.getLogger(Task.class); private static final ThreadLocal<TaskInfo> NEXT_INSTANCE = new ThreadLocal<TaskInfo>(); private final TaskIdentity<?> identity; private final ProjectInternal project; private List<InputChangesAwareTaskAction> actions; private boolean enabled = true; private final DefaultTaskDependency dependencies; /** * "lifecycle dependencies" are dependencies declared via an explicit {@link Task#dependsOn(Object...)} */ private final DefaultTaskDependency lifecycleDependencies; private final DefaultTaskDependency mustRunAfter; private final DefaultTaskDependency finalizedBy; private final DefaultTaskDependency shouldRunAfter; private ExtensibleDynamicObject extensibleDynamicObject; private String description; private String group; private final Property<Duration> timeout; private AndSpec<Task> onlyIfSpec = createNewOnlyIfSpec(); private String reasonNotToTrackState; private String reasonIncompatibleWithConfigurationCache; private final ServiceRegistry services; private final TaskStateInternal state; private final ContextAwareTaskLogger logger = new DefaultContextAwareTaskLogger(BUILD_LOGGER); private final TaskMutator taskMutator; private ObservableList observableActionList; private boolean impliesSubProjects; private boolean hasCustomActions; private final TaskInputsInternal taskInputs; private final TaskOutputsInternal taskOutputs; private final TaskDestroyables taskDestroyables; private final TaskLocalStateInternal taskLocalState; private LoggingManagerInternal loggingManager; private Set<Provider<? extends BuildService<?>>> requiredServices; protected AbstractTask() { this(taskInfo()); } private static TaskInfo taskInfo() { return NEXT_INSTANCE.get(); } private AbstractTask(TaskInfo taskInfo) { if (taskInfo == null) { throw new TaskInstantiationException(String.format("Task of type '%s' has been instantiated directly which is not supported. Tasks can only be created using the Gradle API or DSL.", getClass().getName())); } this.identity = taskInfo.identity; this.project = taskInfo.project; assert project != null; assert identity.name != null; this.state = new TaskStateInternal(); TaskContainerInternal tasks = project.getTasks(); this.mustRunAfter = new DefaultTaskDependency(tasks); this.finalizedBy = new DefaultTaskDependency(tasks); this.shouldRunAfter = new DefaultTaskDependency(tasks); this.lifecycleDependencies = new DefaultTaskDependency(tasks); this.services = project.getServices(); PropertyWalker propertyWalker = services.get(PropertyWalker.class); FileCollectionFactory fileCollectionFactory = services.get(FileCollectionFactory.class); taskMutator = new TaskMutator(this); taskInputs = new DefaultTaskInputs(this, taskMutator, propertyWalker, fileCollectionFactory); taskOutputs = new DefaultTaskOutputs(this, taskMutator, propertyWalker, fileCollectionFactory); taskDestroyables = new DefaultTaskDestroyables(taskMutator, fileCollectionFactory); taskLocalState = new DefaultTaskLocalState(taskMutator, fileCollectionFactory); this.dependencies = new DefaultTaskDependency(tasks, ImmutableSet.of(taskInputs, lifecycleDependencies)); this.timeout = project.getObjects().property(Duration.class); } private void assertDynamicObject() { if (extensibleDynamicObject == null) { extensibleDynamicObject = new ExtensibleDynamicObject(this, identity.type, services.get(InstanceGenerator.class)); } } public static <T extends Task> T injectIntoNewInstance(ProjectInternal project, TaskIdentity<T> identity, Callable<T> factory) { NEXT_INSTANCE.set(new TaskInfo(identity, project)); try { return uncheckedCall(factory); } finally { NEXT_INSTANCE.set(null); } } @Internal @Override public TaskStateInternal getState() { return state; } @Override @Internal public AntBuilder getAnt() { return project.getAnt(); } @Internal @Override public Project getProject() { notifyProjectAccess(); return project; } @Internal @Override public String getName() { return identity.name; } @Override public TaskIdentity<?> getTaskIdentity() { return identity; } @Internal @Override public List<Action<? super Task>> getActions() { if (observableActionList == null) { observableActionList = new ObservableActionWrapperList(getTaskActions()); observableActionList.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { taskMutator.assertMutable("Task.getActions()", evt); } }); } return Cast.uncheckedNonnullCast(observableActionList); } @Override public List<InputChangesAwareTaskAction> getTaskActions() { if (actions == null) { actions = new ArrayList<InputChangesAwareTaskAction>(3); } return actions; } @Override public boolean hasTaskActions() { return actions != null && !actions.isEmpty(); } @Override public void setActions(final List<Action<? super Task>> replacements) { taskMutator.mutate("Task.setActions(List<Action>)", new Runnable() { @Override public void run() { getTaskActions().clear(); for (Action<? super Task> action : replacements) { doLast(action); } } }); } @Internal @Override public TaskDependencyInternal getTaskDependencies() { notifyTaskDependenciesAccess("Task.taskDependencies"); return dependencies; } @Internal @Override public TaskDependencyInternal getLifecycleDependencies() { return lifecycleDependencies; } @Internal @Override public Set<Object> getDependsOn() { notifyTaskDependenciesAccess("Task.dependsOn"); return lifecycleDependencies.getMutableValues(); } @Override public void setDependsOn(final Iterable<?> dependsOn) { taskMutator.mutate("Task.setDependsOn(Iterable)", new Runnable() { @Override public void run() { lifecycleDependencies.setValues(dependsOn); } }); } @Override public void onlyIf(final Closure onlyIfClosure) { taskMutator.mutate("Task.onlyIf(Closure)", new Runnable() { @Override public void run() { onlyIfSpec = onlyIfSpec.and(onlyIfClosure); } }); } @Override public void onlyIf(final Spec<? super Task> spec) { taskMutator.mutate("Task.onlyIf(Spec)", new Runnable() { @Override public void run() { onlyIfSpec = onlyIfSpec.and(spec); } }); } @Override public void setOnlyIf(final Spec<? super Task> spec) { taskMutator.mutate("Task.setOnlyIf(Spec)", new Runnable() { @Override public void run() { onlyIfSpec = createNewOnlyIfSpec().and(spec); } }); } @Override public void setOnlyIf(final Closure onlyIfClosure) { taskMutator.mutate("Task.setOnlyIf(Closure)", new Runnable() { @Override public void run() { onlyIfSpec = createNewOnlyIfSpec().and(onlyIfClosure); } }); } private AndSpec<Task> createNewOnlyIfSpec() { return new AndSpec<Task>(new Spec<Task>() { @Override public boolean isSatisfiedBy(Task element) { return element == AbstractTask.this && enabled; } }); } @Override public Spec<? super TaskInternal> getOnlyIf() { return onlyIfSpec; } @Override public void doNotTrackState(String reasonNotToTrackState) { if (reasonNotToTrackState == null) { throw new InvalidUserDataException("notTrackingReason must not be null!"); } taskMutator.mutate("Task.doNotTrackState(String)", () -> this.reasonNotToTrackState = reasonNotToTrackState ); } @Override public Optional<String> getReasonNotToTrackState() { return Optional.ofNullable(reasonNotToTrackState); } @Override public void notCompatibleWithConfigurationCache(String reason) { taskMutator.mutate("Task.notCompatibleWithConfigurationCache(String)", () -> { reasonIncompatibleWithConfigurationCache = reason; }); } @Override public boolean isCompatibleWithConfigurationCache() { return reasonIncompatibleWithConfigurationCache == null; } @Override public Optional<String> getReasonTaskIsIncompatibleWithConfigurationCache() { return Optional.ofNullable(reasonIncompatibleWithConfigurationCache); } @Internal @Override public boolean getDidWork() { return state.getDidWork(); } @Override public void setDidWork(boolean didWork) { state.setDidWork(didWork); } @Internal public boolean isEnabled() { return enabled; } @Internal @Override public boolean getEnabled() { return enabled; } @Override public void setEnabled(final boolean enabled) { taskMutator.mutate("Task.setEnabled(boolean)", new Runnable() { @Override public void run() { AbstractTask.this.enabled = enabled; } }); } @Override public boolean getImpliesSubProjects() { return impliesSubProjects; } @Override public void setImpliesSubProjects(boolean impliesSubProjects) { this.impliesSubProjects = impliesSubProjects; } @Internal @Override public String getPath() { return identity.projectPath.toString(); } @Override public Path getIdentityPath() { return identity.identityPath; } @Override public Task dependsOn(final Object... paths) { taskMutator.mutate("Task.dependsOn(Object...)", new Runnable() { @Override public void run() { lifecycleDependencies.add(paths); } }); return this; } @Override public Task doFirst(final Action<? super Task> action) { return doFirst("doFirst {} action", action); } @Override public Task doFirst(final String actionName, final Action<? super Task> action) { hasCustomActions = true; if (action == null) { throw new InvalidUserDataException("Action must not be null!"); } taskMutator.mutate("Task.doFirst(Action)", new Runnable() { @Override public void run() { getTaskActions().add(0, wrap(action, actionName)); } }); return this; } @Override public Task doLast(final Action<? super Task> action) { return doLast("doLast {} action", action); } @Override public Task doLast(final String actionName, final Action<? super Task> action) { hasCustomActions = true; if (action == null) { throw new InvalidUserDataException("Action must not be null!"); } taskMutator.mutate("Task.doLast(Action)", new Runnable() { @Override public void run() { getTaskActions().add(wrap(action, actionName)); } }); return this; } @Override public int compareTo(Task otherTask) { int depthCompare = project.compareTo(otherTask.getProject()); if (depthCompare == 0) { return getPath().compareTo(otherTask.getPath()); } else { return depthCompare; } } @Internal @Override public Logger getLogger() { return logger; } @Internal @Override public org.gradle.api.logging.LoggingManager getLogging() { return loggingManager(); } @Override public StandardOutputCapture getStandardOutputCapture() { return loggingManager(); } private LoggingManagerInternal loggingManager() { if (loggingManager == null) { loggingManager = services.getFactory(org.gradle.internal.logging.LoggingManagerInternal.class).create(); } return loggingManager; } @Override public Object property(String propertyName) throws MissingPropertyException { assertDynamicObject(); return extensibleDynamicObject.getProperty(propertyName); } @Override public boolean hasProperty(String propertyName) { assertDynamicObject(); return extensibleDynamicObject.hasProperty(propertyName); } @Override public void setProperty(String name, Object value) { assertDynamicObject(); extensibleDynamicObject.setProperty(name, value); } @Internal @Override @Deprecated public Convention getConvention() { assertDynamicObject(); return extensibleDynamicObject.getConvention(); } @Internal @Override public ExtensionContainer getExtensions() { return getConvention(); } @Internal @Override public DynamicObject getAsDynamicObject() { assertDynamicObject(); return extensibleDynamicObject; } @Internal @Override public String getDescription() { return description; } @Override public void setDescription(String description) { this.description = description; } @Internal @Override public String getGroup() { return group; } @Override public void setGroup(String group) { this.group = group; } @Internal @Override public TaskInputsInternal getInputs() { return taskInputs; } @Internal @Override public TaskOutputsInternal getOutputs() { return taskOutputs; } @Internal @Override public TaskDestroyables getDestroyables() { return taskDestroyables; } @Internal @Override public TaskLocalState getLocalState() { return taskLocalState; } @Internal protected ServiceRegistry getServices() { return services; } @Override public Task doFirst(final Closure action) { hasCustomActions = true; if (action == null) { throw new InvalidUserDataException("Action must not be null!"); } taskMutator.mutate("Task.doFirst(Closure)", new Runnable() { @Override public void run() { getTaskActions().add(0, convertClosureToAction(action, "doFirst {} action")); } }); return this; } @Override public Task doLast(final Closure action) { hasCustomActions = true; if (action == null) { throw new InvalidUserDataException("Action must not be null!"); } taskMutator.mutate("Task.doLast(Closure)", new Runnable() { @Override public void run() { getTaskActions().add(convertClosureToAction(action, "doLast {} action")); } }); return this; } @Override public Task configure(Closure closure) { return ConfigureUtil.configureSelf(closure, this); } @Internal @Override public File getTemporaryDir() { File dir = getServices().get(TemporaryFileProvider.class).newTemporaryFile(getName()); GFileUtils.mkdirs(dir); return dir; } // note: this method is on TaskInternal @Override public Factory<File> getTemporaryDirFactory() { // Cached during serialization so it can be isolated from this task final Cached<File> temporaryDir = Cached.of(this::getTemporaryDir); return factory(temporaryDir::get); } private InputChangesAwareTaskAction convertClosureToAction(Closure actionClosure, String actionName) { return new ClosureTaskAction(actionClosure, actionName, getServices().get(UserCodeApplicationContext.class).current()); } private InputChangesAwareTaskAction wrap(final Action<? super Task> action) { return wrap(action, "unnamed action"); } private InputChangesAwareTaskAction wrap(final Action<? super Task> action, String actionName) { if (action instanceof InputChangesAwareTaskAction) { return (InputChangesAwareTaskAction) action; } return new TaskActionWrapper(action, actionName); } private static class TaskInfo { private final TaskIdentity<?> identity; private final ProjectInternal project; private TaskInfo(TaskIdentity<?> identity, ProjectInternal project) { this.identity = identity; this.project = project; } } private static class ClosureTaskAction implements InputChangesAwareTaskAction { private final Closure<?> closure; private final String actionName; @Nullable private final UserCodeApplicationContext.Application application; private ClosureTaskAction(Closure<?> closure, String actionName, @Nullable UserCodeApplicationContext.Application application) { this.closure = closure; this.actionName = actionName; this.application = application; } @Override public void setInputChanges(InputChangesInternal inputChanges) { } @Override public void clearInputChanges() { } @Override public void execute(Task task) { if (application == null) { doExecute(task); } else { application.reapply(() -> doExecute(task)); } } private void doExecute(Task task) { closure.setDelegate(task); closure.setResolveStrategy(Closure.DELEGATE_FIRST); ClassLoader original = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(closure.getClass().getClassLoader()); try { if (closure.getMaximumNumberOfParameters() == 0) { closure.call(); } else { closure.call(task); } } catch (InvokerInvocationException e) { Throwable cause = e.getCause(); if (cause instanceof RuntimeException) { throw (RuntimeException) cause; } throw e; } finally { Thread.currentThread().setContextClassLoader(original); } } @Override public ImplementationSnapshot getActionImplementation(ClassLoaderHierarchyHasher hasher) { return ImplementationSnapshot.of(AbstractTask.getActionClassName(closure), hasher.getClassLoaderHash(closure.getClass().getClassLoader())); } @Override public String getDisplayName() { return "Execute " + actionName; } } private static class TaskActionWrapper implements InputChangesAwareTaskAction { private final Action<? super Task> action; private final String maybeActionName; /** * The <i>action name</i> is used to construct a human readable name for * the actions to be used in progress logging. It is only used if * the wrapped action does not already implement {@link Describable}. */ public TaskActionWrapper(Action<? super Task> action, String maybeActionName) { this.action = action; this.maybeActionName = maybeActionName; } @Override public void setInputChanges(InputChangesInternal inputChanges) { } @Override public void clearInputChanges() { } @Override public void execute(Task task) { ClassLoader original = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(action.getClass().getClassLoader()); try { action.execute(task); } finally { Thread.currentThread().setContextClassLoader(original); } } @Override public ImplementationSnapshot getActionImplementation(ClassLoaderHierarchyHasher hasher) { return ImplementationSnapshot.of(AbstractTask.getActionClassName(action), hasher.getClassLoaderHash(action.getClass().getClassLoader())); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof TaskActionWrapper)) { return false; } TaskActionWrapper that = (TaskActionWrapper) o; return action.equals(that.action); } @Override public int hashCode() { return action.hashCode(); } @Override public String getDisplayName() { if (action instanceof Describable) { return ((Describable) action).getDisplayName(); } return "Execute " + maybeActionName; } } private static String getActionClassName(Object action) { if (action instanceof ScriptOrigin) { ScriptOrigin origin = (ScriptOrigin) action; return origin.getOriginalClassName() + "_" + origin.getContentHash(); } else { return action.getClass().getName(); } } @Override public void setMustRunAfter(final Iterable<?> mustRunAfterTasks) { taskMutator.mutate("Task.setMustRunAfter(Iterable)", new Runnable() { @Override public void run() { mustRunAfter.setValues(mustRunAfterTasks); } }); } @Override public Task mustRunAfter(final Object... paths) { taskMutator.mutate("Task.mustRunAfter(Object...)", new Runnable() { @Override public void run() { mustRunAfter.add(paths); } }); return this; } @Internal @Override public TaskDependency getMustRunAfter() { return mustRunAfter; } @Override public void setFinalizedBy(final Iterable<?> finalizedByTasks) { taskMutator.mutate("Task.setFinalizedBy(Iterable)", new Runnable() { @Override public void run() { finalizedBy.setValues(finalizedByTasks); } }); } @Override public Task finalizedBy(final Object... paths) { taskMutator.mutate("Task.finalizedBy(Object...)", new Runnable() { @Override public void run() { finalizedBy.add(paths); } }); return this; } @Internal @Override public TaskDependency getFinalizedBy() { return finalizedBy; } @Override public TaskDependency shouldRunAfter(final Object... paths) { taskMutator.mutate("Task.shouldRunAfter(Object...)", new Runnable() { @Override public void run() { shouldRunAfter.add(paths); } }); return shouldRunAfter; } @Override public void setShouldRunAfter(final Iterable<?> shouldRunAfterTasks) { taskMutator.mutate("Task.setShouldRunAfter(Iterable)", new Runnable() { @Override public void run() { shouldRunAfter.setValues(shouldRunAfterTasks); } }); } @Internal @Override public TaskDependency getShouldRunAfter() { return shouldRunAfter; } private class ObservableActionWrapperList extends ObservableList { public ObservableActionWrapperList(List delegate) { super(delegate); } @Override public boolean add(Object action) { if (action == null) { throw new InvalidUserDataException("Action must not be null!"); } return super.add(wrap(Cast.uncheckedNonnullCast(action))); } @Override public void add(int index, Object action) { if (action == null) { throw new InvalidUserDataException("Action must not be null!"); } super.add(index, wrap(Cast.uncheckedNonnullCast(action))); } @Override public boolean addAll(Collection actions) { if (actions == null) { throw new InvalidUserDataException("Actions must not be null!"); } return super.addAll(transformToContextAwareTaskActions(Cast.uncheckedNonnullCast(actions))); } @Override public boolean addAll(int index, Collection actions) { if (actions == null) { throw new InvalidUserDataException("Actions must not be null!"); } return super.addAll(index, transformToContextAwareTaskActions(Cast.uncheckedNonnullCast(actions))); } @Override public Object set(int index, Object action) { if (action == null) { throw new InvalidUserDataException("Action must not be null!"); } return super.set(index, wrap(Cast.uncheckedNonnullCast(action))); } @Override public boolean removeAll(Collection actions) { return super.removeAll(transformToContextAwareTaskActions(Cast.uncheckedNonnullCast(actions))); } @Override public boolean remove(Object action) { return super.remove(wrap(Cast.uncheckedNonnullCast(action))); } private Collection<InputChangesAwareTaskAction> transformToContextAwareTaskActions(Collection<Object> c) { return Collections2.transform(c, input -> wrap(Cast.uncheckedCast(input))); } } @Override public void prependParallelSafeAction(final Action<? super Task> action) { if (action == null) { throw new InvalidUserDataException("Action must not be null!"); } getTaskActions().add(0, wrap(action)); } @Override public void appendParallelSafeAction(final Action<? super Task> action) { if (action == null) { throw new InvalidUserDataException("Action must not be null!"); } getTaskActions().add(wrap(action)); } @Override public boolean isHasCustomActions() { return hasCustomActions; } @Internal @Override public Property<Duration> getTimeout() { return timeout; } @Override public void usesService(Provider<? extends BuildService<?>> service) { taskMutator.mutate("Task.usesService(Provider)", () -> { if (requiredServices == null) { requiredServices = new HashSet<>(); } // TODO:configuration-cache assert build service is from the same build as the task requiredServices.add(service); }); } public Set<Provider<? extends BuildService<?>>> getRequiredServices() { if (requiredServices == null) { return Collections.emptySet(); } return requiredServices; } @Override public List<ResourceLock> getSharedResources() { if (requiredServices == null) { return Collections.emptyList(); } ImmutableList.Builder<ResourceLock> locks = ImmutableList.builder(); BuildServiceRegistryInternal serviceRegistry = getServices().get(BuildServiceRegistryInternal.class); for (Provider<? extends BuildService<?>> service : requiredServices) { SharedResource resource = serviceRegistry.forService(service); if (resource.getMaxUsages() > 0) { locks.add(resource.getResourceLock(1)); } } return locks.build(); } private void notifyProjectAccess() { if (state.getExecuting()) { getTaskExecutionAccessBroadcaster().onProjectAccess("Task.project", this); } } private void notifyTaskDependenciesAccess(String invocationDescription) { if (state.getExecuting()) { getTaskExecutionAccessBroadcaster().onTaskDependenciesAccess(invocationDescription, this); } } private TaskExecutionAccessListener getTaskExecutionAccessBroadcaster() { return services.get(ListenerManager.class).getBroadcaster(TaskExecutionAccessListener.class); } }
blindpirate/gradle
subprojects/core/src/main/java/org/gradle/api/internal/AbstractTask.java
Java
apache-2.0
34,161
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.fudgemsg; import static org.testng.AssertJUnit.assertEquals; import org.testng.annotations.Test; import com.opengamma.financial.security.equity.EquitySecurity; import com.opengamma.util.money.Currency; import com.opengamma.util.test.TestGroup; /** * Test. */ @Test(groups = TestGroup.UNIT) public class EquitySecurityFudgeEncodingTest extends FinancialTestBase { private static final EquitySecurity REF = new EquitySecurity("A", "B", "C", Currency.USD); @Test public void testCycle() { assertEquals(REF, cycleObject(EquitySecurity.class, REF)); } }
McLeodMoores/starling
projects/financial/src/test/java/com/opengamma/financial/fudgemsg/EquitySecurityFudgeEncodingTest.java
Java
apache-2.0
740
package data; public class Genotype { private int alleleLo; private int alleleHi; public Genotype(int alleleOne, int alleleTwo) { super(); if(alleleOne <= alleleTwo){ this.alleleLo = alleleOne; this.alleleHi = alleleTwo; } else{ this.alleleLo = alleleTwo; this.alleleHi = alleleOne; } } public int getAlleleLo() { return alleleLo; } public int getAlleleHi() { return alleleHi; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + alleleHi; result = prime * result + alleleLo; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Genotype other = (Genotype) obj; if (alleleHi != other.alleleHi) return false; if (alleleLo != other.alleleLo) return false; return true; } /** * * @param other * @return */ public int containsCount(Genotype other){ int ans = 0; if(contains(other.getAlleleHi())){ ans++; } if(contains(other.alleleLo)){ ans++; } return ans; } public boolean contains(int other){ return this.alleleHi == other || this.alleleLo == other; } }
rma350/kidneyExchange
kidneyMatching/src/data/Genotype.java
Java
apache-2.0
1,247
/* * - * #%L * Pipeline: AWS Steps * %% * Copyright (C) 2016 Taimos GmbH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package de.taimos.pipeline.aws.cloudformation; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.concurrent.ExecutionException; import org.apache.commons.lang.StringUtils; import org.apache.http.concurrent.BasicFuture; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.services.cloudformation.AmazonCloudFormationClient; import com.amazonaws.services.cloudformation.model.AmazonCloudFormationException; import com.amazonaws.services.cloudformation.model.DescribeStackEventsRequest; import com.amazonaws.services.cloudformation.model.DescribeStackEventsResult; import com.amazonaws.services.cloudformation.model.DescribeStacksRequest; import com.amazonaws.services.cloudformation.model.StackEvent; import com.amazonaws.waiters.Waiter; import com.amazonaws.waiters.WaiterHandler; import com.amazonaws.waiters.WaiterParameters; import hudson.model.TaskListener; public class EventPrinter { private final AmazonCloudFormationClient client; private final TaskListener listener; public EventPrinter(AmazonCloudFormationClient client, TaskListener listener) { this.client = client; this.listener = listener; } public void waitAndPrintStackEvents(String stack, Waiter<DescribeStacksRequest> waiter, long pollIntervalMillis) throws ExecutionException { Date startDate = new Date(); final BasicFuture<AmazonWebServiceRequest> waitResult = new BasicFuture<>(null); waiter.runAsync(new WaiterParameters<>(new DescribeStacksRequest().withStackName(stack)), new WaiterHandler() { @Override public void onWaitSuccess(AmazonWebServiceRequest request) { waitResult.completed(request); } @Override public void onWaitFailure(Exception e) { waitResult.failed(e); } }); String lastEventId = null; this.printLine(); this.printStackName(stack); this.printLine(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); if (pollIntervalMillis > 0) { while (!waitResult.isDone()) { try { DescribeStackEventsResult result = this.client.describeStackEvents(new DescribeStackEventsRequest().withStackName(stack)); List<StackEvent> stackEvents = new ArrayList<>(); for (StackEvent event : result.getStackEvents()) { if (event.getEventId().equals(lastEventId) || event.getTimestamp().before(startDate)) { break; } stackEvents.add(event); } if (!stackEvents.isEmpty()) { Collections.reverse(stackEvents); for (StackEvent event : stackEvents) { this.printEvent(sdf, event); this.printLine(); } lastEventId = stackEvents.get(stackEvents.size() - 1).getEventId(); } } catch (AmazonCloudFormationException e) { // suppress and continue } try { Thread.sleep(pollIntervalMillis); } catch (InterruptedException e) { // suppress and continue } } } try { waitResult.get(); } catch (InterruptedException e) { this.listener.getLogger().format("Failed to wait for CFN action to complete: %s", e.getMessage()); } } private void printEvent(SimpleDateFormat sdf, StackEvent event) { String time = this.padRight(sdf.format(event.getTimestamp()), 25); String logicalResourceId = this.padRight(event.getLogicalResourceId(), 20); String resourceStatus = this.padRight(event.getResourceStatus(), 36); String resourceStatusReason = this.padRight(event.getResourceStatusReason(), 140); this.listener.getLogger().format("| %s | %s | %s | %s |%n", time, logicalResourceId, resourceStatus, resourceStatusReason); } private void printLine() { this.listener.getLogger().println(StringUtils.repeat("-", 231)); } private void printStackName(String stackName) { this.listener.getLogger().println("| " + this.padRight("Stack: " + stackName, 227) + " |"); } private String padRight(String s, int len) { return String.format("%1$-" + len + "s", (s != null ? s : "")).substring(0, len); } }
tekkamanendless/pipeline-aws-plugin
src/main/java/de/taimos/pipeline/aws/cloudformation/EventPrinter.java
Java
apache-2.0
4,692
package com.ihateflyingbugs.kidsm.schedule; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Calendar; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.app.Activity; import android.app.AlertDialog; import android.app.DatePickerDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.inputmethod.InputMethodManager; import android.widget.DatePicker; import android.widget.EditText; import com.ihateflyingbugs.kidsm.NetworkActivity; import com.ihateflyingbugs.kidsm.R; import com.ihateflyingbugs.kidsm.friend.FriendFragment; import com.ihateflyingbugs.kidsm.login.RegisterOrgItem; import com.ihateflyingbugs.kidsm.menu.Profile; import com.ihateflyingbugs.kidsm.menu.SlidingMenuMaker; import com.ihateflyingbugs.kidsm.uploadphoto.InputTag; import com.localytics.android.LocalyticsSession; public class AddScheduleActivity extends NetworkActivity { int year, month, day; ArrayList<String> tagList; int getClassStudentCounter; int setTimelineMessageCounter; String cal_srl; int alarmingCounter; boolean isSetTimelineMessageFinished; private LocalyticsSession localyticsSession; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_addschedule); getActionBar().setHomeButtonEnabled(true); getActionBar().setBackgroundDrawable(getResources().getDrawable(R.drawable.general_actionbar_function_bg)); getActionBar().setIcon(R.drawable.general_actionbar_back_btnset); Calendar calendar = Calendar.getInstance(); year = calendar.get(Calendar.YEAR); month = calendar.get(Calendar.MONTH); day = calendar.get(Calendar.DAY_OF_MONTH); ((EditText)findViewById(R.id.addschedule_date)).setText(""+year+"³â "+(month+1)+"¿ù "+day+"ÀÏ"); alarmingCounter = 0; isSetTimelineMessageFinished = false; tagList = new ArrayList<String>(); this.localyticsSession = new LocalyticsSession(this.getApplicationContext()); // Context used to access device resources this.localyticsSession.open(); // open the session this.localyticsSession.upload(); // upload any data } public void onResume() { super.onResume(); this.localyticsSession.open(); } public void onPause() { this.localyticsSession.close(); this.localyticsSession.upload(); super.onPause(); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.finish, menu); return true; } public boolean onOptionsItemSelected(MenuItem item) { switch(item.getItemId()) { case android.R.id.home: if(((EditText)findViewById(R.id.addschedule_message)).isFocused()) { InputMethodManager imm = (InputMethodManager) getSystemService( Context.INPUT_METHOD_SERVICE ); imm.hideSoftInputFromWindow( findViewById(R.id.addschedule_message).getWindowToken(), 0 ); } finish(); case R.id.finish: Profile profile = SlidingMenuMaker.getProfile(); String message = ((EditText)findViewById(R.id.addschedule_message)).getText().toString(); this.request_Calender_setCalender(message, profile.getCurrentClass().getClass_org_srl(), profile.member_srl, profile.getCurrentClass().getClass_srl(), ""+year, ""+(month+1), ""+day, "0:00", "0", "N"); return true; } return false; } public void OnSelectDate(View v) { final EditText date = (EditText)v; new DatePickerDialog(this, new DatePickerDialog.OnDateSetListener() { @Override public void onDateSet(DatePicker view, int iYear, int iMonth, int iDay) { year = iYear; month = iMonth; day = iDay; date.setText(""+year+"³â "+(month+1)+"¿ù "+day+"ÀÏ"); } }, year, month, day) .show(); } private void makeTagList() { getClassStudentCounter = 0; setTimelineMessageCounter = 0; Profile profile = SlidingMenuMaker.getProfile(); tagList.clear(); //tagList.add(profile.member_srl); switch(profile.member_type.charAt(0)) { case 'T': this.request_Organization_getOrganization(profile.member_org_srl); break; case 'M': for(int i = 0; i < profile.classList.size()-1; i++) for(int j = 0; j < profile.classList.get(i).getTeacherList().size(); j++) tagList.add(profile.classList.get(i).getTeacherList().get(j).teacher_member_srl); for(int i = 0; i < profile.classList.size()-1; i++) this.request_Class_getClassStudent(profile.member_org_srl, profile.classList.get(i).getClass_srl()); break; } } public void isFinished() { if( alarmingCounter == 0 && isSetTimelineMessageFinished ) { setResult(Activity.RESULT_OK, new Intent()); finish(); } } @Override public void response(String uri, String response) { try { if( response.isEmpty() ) return; JSONObject jsonObj = new JSONObject(response); String result = jsonObj.getString("result"); if( result.equals("OK") ) { if(uri.equals("Calender/setCalender")) { String nativeData = jsonObj.getString("data"); jsonObj = new JSONObject(nativeData); String cal_srl = jsonObj.getString("cal_srl"); String cal_org_srl = jsonObj.getString("cal_org_srl"); String cal_class_srl = jsonObj.getString("cal_class_srl"); String cal_member_srl = jsonObj.getString("cal_member_srl"); String cal_type = jsonObj.getString("cal_type"); String cal_year = jsonObj.getString("cal_year"); String cal_month = jsonObj.getString("cal_month"); String cal_day = jsonObj.getString("cal_day"); String cal_time = jsonObj.getString("cal_time"); String cal_timestamp = jsonObj.getString("cal_timestamp"); String cal_name = jsonObj.getString("cal_name"); String cal_created = jsonObj.getString("cal_created"); makeTagList(); this.cal_srl = cal_srl; } else if(uri.equals("Class/getClassStudent")) { String nativeData = jsonObj.getString("data"); JSONArray dataArray = new JSONArray(nativeData); for(int i = 0; i < dataArray.length(); i++) { String member_srl = dataArray.getJSONObject(i).getString("member_srl"); String member_name = dataArray.getJSONObject(i).getString("member_name"); String member_type = dataArray.getJSONObject(i).getString("member_type"); String member_org_srl = dataArray.getJSONObject(i).getString("member_org_srl"); String member_picture = dataArray.getJSONObject(i).getString("member_picture"); JSONObject studentObj = dataArray.getJSONObject(i).getJSONObject("student"); String student_srl = studentObj.getString("student_srl"); String student_member_srl = studentObj.getString("student_member_srl"); String student_class_srl = studentObj.getString("student_class_srl"); String student_parent_srl = studentObj.getString("student_parent_srl"); String student_teacher_srl = studentObj.getString("student_teacher_srl"); String student_shuttle_srl = studentObj.getString("student_shuttle_srl"); String student_birthday = studentObj.getString("student_birthday"); String student_parent_key = studentObj.getString("student_parent_key"); tagList.add(member_srl); } String targetList = ""; Profile profile = SlidingMenuMaker.getProfile(); switch(profile.member_type.charAt(0)) { case 'T': alarmingCounter = tagList.size(); for(int i = 0; i < tagList.size(); i++) { targetList += tagList.get(i) + ","; this.request_Member_getMember(tagList.get(i)); } this.request_Timeline_setTimelineMessage(SlidingMenuMaker.getProfile().member_srl, "S", cal_srl, targetList); // for(int i = 0 ; i < tagList.size(); i++) // this.request_Timeline_setTimelineMessage(SlidingMenuMaker.getProfile().member_srl, "S", cal_srl, tagList.get(i)); break; case 'M': if( ++getClassStudentCounter == profile.classList.size()-1 ) { alarmingCounter = tagList.size(); for(int i = 0 ; i < tagList.size(); i++) { targetList += tagList.get(i) + ","; this.request_Member_getMember(tagList.get(i)); } // ¼±»ý´Ô ¾Ë¸²Áà¾ßÁö // for( int i = 0; i < SlidingMenuMaker.getProfile().getCurrentClass().getTeacherList().size(); i++) // this.request_Service_notify_sendNotify(SlidingMenuMaker.getProfile().member_srl, SlidingMenuMaker.getProfile().getCurrentClass().getTeacherList().get(i).teacher_member_srl, "½ºÄÉÁÙ µî·Ï ¾È³»", SlidingMenuMaker.getProfile().member_name+"¿øÀå¼±»ý´ÔÀÌ »õ ½ºÄÉÁÙÀ» µî·ÏÇß½À´Ï´Ù.", "N"); this.request_Timeline_setTimelineMessage(SlidingMenuMaker.getProfile().member_srl, "S", cal_srl, targetList); // for(int i = 0 ; i < tagList.size(); i++) // this.request_Timeline_setTimelineMessage(SlidingMenuMaker.getProfile().member_srl, "S", cal_srl, tagList.get(i)); } break; } } else if(uri.equals("Member/getMember")) { String nativeData = jsonObj.getString("data"); jsonObj = new JSONObject(nativeData); String member_srl = jsonObj.getString("member_srl"); String member_name = jsonObj.getString("member_name"); String member_type = jsonObj.getString("member_type"); switch(member_type.charAt(0)) { case 'S': JSONObject studentObj = jsonObj.getJSONObject("student"); String student_parent_srl = studentObj.getString("student_parent_srl"); if( student_parent_srl.equals("0") ) { new Thread(new Runnable() { @Override public void run() { runOnUiThread(new Runnable(){ @Override public void run() { alarmingCounter--; isFinished(); } }); } }).start(); } else this.request_Member_getParent(student_parent_srl); break; case 'T': case 'M': switch(SlidingMenuMaker.getProfile().member_type.charAt(0)) { case 'T': this.request_Service_notify_sendNotify(SlidingMenuMaker.getProfile().member_srl, member_srl, "½ºÄÉÁÙ µî·Ï ¾È³»", SlidingMenuMaker.getProfile().member_name+"¼±»ý´ÔÀÌ »õ ½ºÄÉÁÙÀ» µî·ÏÇß½À´Ï´Ù.", "N"); break; case 'M': this.request_Service_notify_sendNotify(SlidingMenuMaker.getProfile().member_srl, member_srl, "½ºÄÉÁÙ µî·Ï ¾È³»", SlidingMenuMaker.getProfile().member_name+"¿øÀå¼±»ý´ÔÀÌ »õ ½ºÄÉÁÙÀ» µî·ÏÇß½À´Ï´Ù.", "N"); break; } new Thread(new Runnable() { @Override public void run() { runOnUiThread(new Runnable(){ @Override public void run() { alarmingCounter--; isFinished(); } }); } }).start(); break; } } else if(uri.equals("Member/getParent")) { String nativeData = jsonObj.getString("data"); jsonObj = new JSONObject(nativeData); String member_srl = jsonObj.getString("member_srl"); switch( SlidingMenuMaker.getProfile().member_type.charAt(0) ) { case 'T': this.request_Service_notify_sendNotify(SlidingMenuMaker.getProfile().member_srl, member_srl, "½ºÄÉÁÙ µî·Ï ¾È³»", SlidingMenuMaker.getProfile().member_name+"¼±»ý´ÔÀÌ »õ ½ºÄÉÁÙÀ» µî·ÏÇß½À´Ï´Ù.", "N"); break; case 'M': this.request_Service_notify_sendNotify(SlidingMenuMaker.getProfile().member_srl, member_srl, "½ºÄÉÁÙ µî·Ï ¾È³»", SlidingMenuMaker.getProfile().member_name+"¿øÀå¼±»ý´ÔÀÌ »õ ½ºÄÉÁÙÀ» µî·ÏÇß½À´Ï´Ù.", "N"); break; } new Thread(new Runnable() { @Override public void run() { runOnUiThread(new Runnable(){ @Override public void run() { alarmingCounter--; isFinished(); } }); } }).start(); } else if(uri.equals("Timeline/setTimelineMessage")) { new Thread(new Runnable() { @Override public void run() { runOnUiThread(new Runnable(){ @Override public void run() { isSetTimelineMessageFinished = true; isFinished(); } }); } }).start(); } else if(uri.equals("Organization/getOrganization")) { String nativeData = jsonObj.getString("data"); jsonObj = new JSONObject(nativeData); String org_srl = jsonObj.getString("org_srl"); String org_name = jsonObj.getString("org_name"); String org_manager_member_srl = jsonObj.getString("org_manager_member_srl"); String org_phone = jsonObj.getString("org_phone"); String org_address = jsonObj.getString("org_address"); String org_teacher_key = jsonObj.getString("org_teacher_key"); String org_created = jsonObj.getString("org_created"); String org_updated = jsonObj.getString("org_updated"); tagList.add(org_manager_member_srl); this.request_Class_getClassStudent(SlidingMenuMaker.getProfile().member_org_srl, SlidingMenuMaker.getProfile().getCurrentClass().getClass_srl()); } } else { } } catch(JSONException e) { StringWriter errors = new StringWriter(); e.printStackTrace(new PrintWriter(errors)); String s = errors.toString(); System.out.println(s); } } }
xxx4u/kidsm_for_android
MySlidingProject/src/com/ihateflyingbugs/kidsm/schedule/AddScheduleActivity.java
Java
apache-2.0
13,472
/* * Copyright 2008-2009 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package voldemort.store; import voldemort.VoldemortException; /** * Store should throw this Exception to indicate that the metadata (Cluster configuration/ Stores Configuration) at client is not in sync with this store metadata. * * @author bbansal * */ public class InvalidMetadataException extends VoldemortException { private static final long serialVersionUID = 1L; public InvalidMetadataException(String s) { super(s); } public InvalidMetadataException(String s, Throwable t) { super(s, t); } @Override public short getId() { return 9; } }
nassim-git/project-voldemort
src/java/voldemort/store/InvalidMetadataException.java
Java
apache-2.0
1,217
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.jrh3k5.mojo.flume.process; import static org.fest.assertions.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.UUID; import org.junit.Test; /** * Unit tests for {@link AgentProcessContainer}. * * @author Joshua Hyde * @since 1.2 */ public class AgentProcessContainerTest { /** * Test the stopping of a stored agent process. * * @throws Exception * If any errors occur during the test run. */ @Test public void testStopAgentProcess() throws Exception { final String agentName = UUID.randomUUID().toString(); final AgentProcess agentProcess = mock(AgentProcess.class); when(agentProcess.getAgentName()).thenReturn(agentName); AgentProcessContainer.storeAgentProcess(agentProcess); AgentProcessContainer.stopAgentProcess(agentName); verify(agentProcess).stop(); // Calling stop on a previously-stopped agent should not call stop on it again AgentProcessContainer.stopAgentProcess(agentName); verify(agentProcess).stop(); } /** * If the agent process has already been stored in the container, then an {@link IllegalStateException} should be thrown. * * @throws Exception * If any errors occur during the test run. */ @Test public void testStoreAgentProcessDuplicate() throws Exception { final String agentName = UUID.randomUUID().toString(); final AgentProcess agentProcess = mock(AgentProcess.class); when(agentProcess.getAgentName()).thenReturn(agentName); AgentProcessContainer.storeAgentProcess(agentProcess); IllegalStateException caught = null; try { AgentProcessContainer.storeAgentProcess(agentProcess); } catch (IllegalStateException e) { caught = e; } assertThat(caught).isNotNull(); } }
jrh3k5/flume-agent-maven-plugin
src/test/java/com/github/jrh3k5/mojo/flume/process/AgentProcessContainerTest.java
Java
apache-2.0
2,823
package com.hs.mail.imap.processor.ext; import org.jboss.netty.channel.Channel; import com.hs.mail.imap.ImapSession; import com.hs.mail.imap.mailbox.Mailbox; import com.hs.mail.imap.mailbox.MailboxACL; import com.hs.mail.imap.mailbox.MailboxManager; import com.hs.mail.imap.mailbox.MailboxPath; import com.hs.mail.imap.message.request.ImapRequest; import com.hs.mail.imap.message.request.ext.GetACLRequest; import com.hs.mail.imap.message.responder.Responder; import com.hs.mail.imap.message.responder.ext.ACLResponder; import com.hs.mail.imap.message.response.HumanReadableText; import com.hs.mail.imap.message.response.ext.ACLResponse; /** * The GETACL command returns the access control list for mailbox in an untagged * ACL response. * * @author Wonchul Doh * @since December 2, 2016 * */ public class GetACLProcessor extends AbstractACLProcessor { @Override protected void doProcess(ImapSession session, ImapRequest message, Responder responder) throws Exception { doProcess(session, (GetACLRequest) message, (ACLResponder) responder); } protected void doProcess(ImapSession session, GetACLRequest request, ACLResponder responder) throws Exception { MailboxManager manager = getMailboxManager(); MailboxPath path = buildMailboxPath(session, request.getMailbox()); Mailbox mailbox = manager.getMailbox(path.getUserID(), path.getFullName()); if (mailbox == null) { responder.taggedNo(request, HumanReadableText.MAILBOX_NOT_FOUND); } else { String rights = manager.getRights(session.getUserID(), mailbox.getMailboxID(), true); if (rights.indexOf('l') == -1) { // RFC 4314 section 6 // If not have permission to LIST, respond with the same error // that would be used if the mailbox did not exist. responder.taggedNo(request, HumanReadableText.MAILBOX_NOT_FOUND); } else if (rights.indexOf('a') == -1) { responder.taggedNo(request, HumanReadableText.INSUFFICIENT_RIGHTS); } else { MailboxACL acl = manager.getACL(mailbox.getMailboxID()); acl.setMailbox(request.getMailbox()); responder.respond(new ACLResponse(acl)); responder.okCompleted(request); } } } @Override protected Responder createResponder(Channel channel, ImapRequest request) { return new ACLResponder(channel, request); } }
svn2github/hwmail-mirror
hedwig-server/src/main/java/com/hs/mail/imap/processor/ext/GetACLProcessor.java
Java
apache-2.0
2,378
package com.opi.cli; import com.beust.jcommander.JCommander; import com.beust.jcommander.ParameterException; import com.opi.cli.api.ICommand; import com.opi.cli.api.ISubCommand; import jline.UnsupportedTerminal; import jline.console.ConsoleReader; import java.io.*; import java.util.*; /** */ public class Cli { private static Cli instance = new Cli(); private boolean verbose = false; private String promt = ""; private boolean exit = false; private Map<String, ICommand> commandMap; private Cli() {} public static Cli getInstance() { return instance; } public void run( String mainArgv[] ) throws Exception { if( System.getProperty( "verbose" ) != null ) { verbose = true; } PrintWriter pw = new PrintWriter( System.out, true ); /*PrintWriter pw = new PrintWriter( new OutputStreamWriter( System.out, System.getProperty( "jline.WindowsTerminal.output.encoding", System.getProperty("file.encoding") ) ) );*/ //PrintWriter pw = new PrintWriter( new FileOutputStream( FileDescriptor.out ) ); promt = System.getProperty( "user.name" ) + " > "; // ConsoleReader consoleReader = new ConsoleReader( is, pw, null, new UnsupportedTerminal()); ConsoleReader consoleReader = getConsoleReader( mainArgv, System.out ); setupCommands(pw); while( !exit ) { // read line from console String line = consoleReader.readLine( promt ); // System.out.print( line.length() ); if( line != null ) { // create argv array String[] argv = line.trim().split( "\\s+" ); if( argv != null && argv.length > 0 ) { // create commands argv array CommandApi api = new CommandApi(); String[] cmdArgv = new String[ argv.length - 1 ]; System.arraycopy( argv, 1, cmdArgv, 0, argv.length - 1 ); api.setArgv( cmdArgv ); api.setPrintWriter( pw ); handleCommand( argv[0], api ); } } else { exit = true; } } } public List<ICommand> getCommands() { List<ICommand> commands = new ArrayList<ICommand>( commandMap.values() ); Collections.sort( commands, new Comparator<ICommand>() { @Override public int compare(ICommand o1, ICommand o2) { return o1.getName().compareTo( o2.getName() ); } }); return commands; } public void setExit( boolean exit ) { this.exit = exit; } private void handleCommand(String commandName, CommandApi api) { try { // get command ICommand command = getCommand( commandName ); if( command != null ) { // setup commander object JCommander commander = setupCommander( command ); // parse the parameters commander.parse( api.getArgv() ); // execute command or sub command executeCommand( commander, command, api ); } else { if( commandName.length() > 0 ) { api.getPrintWriter().println( String.format( "unknown command: '%s'", commandName) ); } } } catch( ParameterException pe ) { api.getPrintWriter().println( commandName + " " + pe.getMessage() ); } catch( Exception e ) { printException(e, api.getPrintWriter()); } } private JCommander setupCommander(ICommand command) { JCommander commander = new JCommander(); commander.addObject( command ); List<ISubCommand> subCommands = command.getSubCommands(); if( subCommands != null ) { for( ISubCommand subCommand : subCommands ) { commander.addCommand( subCommand ); } } return commander; } private ICommand getCommand(String commandName) throws IllegalAccessException, InstantiationException { ICommand command = null; command = commandMap.get( commandName ); if( command != null ) { command = command.getClass().newInstance(); } return command; } private void executeCommand(JCommander commander, ICommand command, CommandApi api) throws Exception { String subCommandName = commander.getParsedCommand(); if( subCommandName == null ) { command.execute( api ); } else { ISubCommand subCommand = (ISubCommand) commander.getObjects().get( 0 ); subCommand.execute( command, api ); } } private void setupCommands(PrintWriter pw) { commandMap = new Hashtable<String, ICommand>(); ServiceLoader<ICommand> serviceLoader = ServiceLoader.load( ICommand.class ); Iterator<ICommand> iter = serviceLoader.iterator(); while( iter.hasNext() ) { ICommand command = iter.next(); String commandName = command.getName(); if( commandName != null ) { if( !commandMap.containsKey( commandName ) ) { commandMap.put( commandName, command ); } } } for( ICommand command : getCommands() ) { CommandApi commandApi = new CommandApi(); commandApi.setPrintWriter( pw ); command.init( commandApi ); } } private ConsoleReader getConsoleReader( String[] mainArgv, OutputStream os ) throws IOException { ConsoleReader consoleReader = null; // setup input: system input or a file provided by the main command line InputStream is = new FileInputStream( FileDescriptor.in ); if( mainArgv != null && mainArgv.length > 0 ) { is = new FileInputStream( mainArgv[ 0 ] ); final BufferedReader br = new BufferedReader( new InputStreamReader( is ) ); consoleReader = new ConsoleReader() { @Override public String readLine( String promt ) throws IOException { return br.readLine(); } }; } else { try { //consoleReader = new ConsoleReader( is, os ); consoleReader = new ConsoleReader(); } catch( Throwable t ) { consoleReader = new ConsoleReader( is, os, new UnsupportedTerminal() ); } } return consoleReader; } private void printException( Exception e, PrintWriter pw ) { if( verbose ) { e.printStackTrace( pw ); } else { Throwable t = e; while( t != null && t != t.getCause() ) { String msg = t.getMessage(); pw.println( msg ); t = t.getCause(); } } } }
peegees/java-command-shell
ArchiveConsole/Shell/src/com/opi/cli/Cli.java
Java
apache-2.0
7,152
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.truth.Truth.assertThat; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; import com.google.javascript.jscomp.CompilerOptions.PropertyCollapseLevel; import com.google.javascript.rhino.Node; import java.util.ArrayList; import java.util.HashSet; import java.util.Set; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** @author johnlenz@google.com (John Lenz) */ @RunWith(JUnit4.class) public final class NormalizeTest extends CompilerTestCase { private static final String EXTERNS = "var window; var Arguments;"; public NormalizeTest() { super(EXTERNS); } @Override @Before public void setUp() throws Exception { super.setUp(); setAcceptedLanguage(LanguageMode.ECMASCRIPT_2017); } @Override protected CompilerPass getProcessor(final Compiler compiler) { return new Normalize(compiler, false); } @Override protected int getNumRepetitions() { // The normalize pass is only run once. return 1; } @Test public void testSplitVar() { testSame("var a"); test("var a, b", "var a; var b"); test("var a, b, c", "var a; var b; var c"); testSame("var a = 0 "); test("var a = 0 , b = foo()", "var a = 0; var b = foo()"); test("var a = 0, b = 1, c = 2", "var a = 0; var b = 1; var c = 2"); test("var a = foo(1), b = foo(2), c = foo(3)", "var a = foo(1); var b = foo(2); var c = foo(3)"); test("try{var b = foo(1), c = foo(2);} finally { foo(3) }", "try{var b = foo(1); var c = foo(2)} finally { foo(3); }"); test("try{var b = foo(1),c = foo(2);} finally {}", "try{var b = foo(1); var c = foo(2)} finally {}"); test("try{foo(0);} finally { var b = foo(1), c = foo(2); }", "try{foo(0);} finally {var b = foo(1); var c = foo(2)}"); test("switch(a) {default: var b = foo(1), c = foo(2); break;}", "switch(a) {default: var b = foo(1); var c = foo(2); break;}"); test("do var a = foo(1), b; while(false);", "do{var a = foo(1); var b} while(false);"); test("a:var a,b,c;", "a:{ var a;var b; var c; }"); test("if (true) a:var a,b;", "if (true)a:{ var a; var b; }"); } @Test public void testSplitVar_forLoop() { // Verify vars extracted from FOR nodes are split. test( "for(var a = 0, b = foo(1), c = 1; c < b; c++) foo(2)", "var a = 0; var b = foo(1); var c = 1; for(; c < b; c++) foo(2)"); // Verify split vars properly introduce blocks when needed. test("for(;;) var b = foo(1), c = foo(2);", "for(;;){var b = foo(1); var c = foo(2)}"); test("for(;;){var b = foo(1), c = foo(2);}", "for(;;){var b = foo(1); var c = foo(2)}"); test("a:for(var a,b,c;;);", "var a;var b; var c;a:for(;;);"); } @Test public void testSplitLet() { testSame("let a"); test("let a, b", "let a; let b"); test("let a, b, c", "let a; let b; let c"); testSame("let a = 0 "); test("let a = 0 , b = foo()", "let a = 0; let b = foo()"); test("let a = 0, b = 1, c = 2", "let a = 0; let b = 1; let c = 2"); test( "let a = foo(1), b = foo(2), c = foo(3)", "let a = foo(1); let b = foo(2); let c = foo(3)"); testSame("for (let a = 0, b = 1;;) {}"); } @Test public void testLetManyBlocks() { test( lines( "let a = 'outer';", "{ let a = 'inner1'; }", "{ let a = 'inner2'; }", "{ let a = 'inner3'; }", "{ let a = 'inner4'; }"), lines( "let a = 'outer';", "{ let a$jscomp$1 = 'inner1'; }", "{ let a$jscomp$2 = 'inner2'; }", "{ let a$jscomp$3 = 'inner3'; }", "{ let a$jscomp$4 = 'inner4'; }")); } @Test public void testLetOutsideAndInsideForLoop() { test( lines( "let a = 'outer';", "for (let a = 'inner';;) {", " break;", "}", "alert(a);"), lines( "let a = 'outer';", "for (let a$jscomp$1 = 'inner';;) {", " break;", "}", "alert(a);")); } @Test public void testLetOutsideAndInsideBlock() { test( lines( "let a = 'outer';", "{", " let a = 'inner';", "}", "alert(a);"), lines( "let a = 'outer';", "{", " let a$jscomp$1 = 'inner';", "}", "alert(a);")); } @Test public void testLetOutsideAndInsideFn() { test( lines( "let a = 'outer';", "function f() {", " let a = 'inner';", "}", "alert(a);"), lines( "let a = 'outer';", "function f() {", " let a$jscomp$1 = 'inner';", "}", "alert(a);")); } @Test public void testRemoveEmptiesFromClass() { test( lines( "class Foo {", " m1() {};", " m2() {};", "}"), lines( "class Foo {", " m1() {}", " m2() {}", "}")); } @Test public void testClassInForLoop() { testSame("for (class a {};;) { break; }"); } @Test public void testFunctionInForLoop() { testSame("for (function a() {};;) { break; }"); } @Test public void testLetInGlobalHoistScope() { testSame( lines( "if (true) {", " let x = 1; alert(x);", "}")); test( lines( "if (true) {", " let x = 1; alert(x);", "} else {", " let x = 1; alert(x);", "}"), lines( "if (true) {", " let x = 1; alert(x);", "} else {", " let x$jscomp$1 = 1; alert(x$jscomp$1);", "}")); } @Test public void testConstInGlobalHoistScope() { testSame( lines( "if (true) {", " const x = 1; alert(x);", "}")); test( lines( "if (true) {", " const x = 1; alert(x);", "} else {", " const x = 1; alert(x);", "}"), lines( "if (true) {", " const x = 1; alert(x);", "} else {", " const x$jscomp$1 = 1; alert(x$jscomp$1);", "}")); } @Test public void testVarReferencedInHoistedFunction() { test( lines( "var f1 = function() {", " var x;", "};", "", "(function () {", " {", " var x = 0;", " }", " function f2() {", " alert(x);", " }", " f2();", "})();"), lines( "var f1 = function() {", " var x;", "};", "", "(function () {", " function f2() {", " alert(x$jscomp$1);", " }", " {", " var x$jscomp$1 = 0;", " }", " f2();", "})();")); } @Test public void testAssignShorthand() { test("x |= 1;", "x = x | 1;"); test("x ^= 1;", "x = x ^ 1;"); test("x &= 1;", "x = x & 1;"); test("x <<= 1;", "x = x << 1;"); test("x >>= 1;", "x = x >> 1;"); test("x >>>= 1;", "x = x >>> 1;"); test("x += 1;", "x = x + 1;"); test("x -= 1;", "x = x - 1;"); test("x *= 1;", "x = x * 1;"); test("x /= 1;", "x = x / 1;"); test("x %= 1;", "x = x % 1;"); test("/** @suppress {const} */ x += 1;", "/** @suppress {const} */ x = x + 1;"); } @Test public void testDuplicateVarInExterns() { test( externs("var extern;"), srcs("/** @suppress {duplicate} */ var extern = 3;"), expected("/** @suppress {duplicate} */ var extern = 3;")); } @Test public void testUnhandled() { testSame("var x = y = 1"); } @Test public void testFor() { // Verify assignments are extracted from the FOR init node. test("for(a = 0; a < 2 ; a++) foo();", "a = 0; for(; a < 2 ; a++) foo()"); // Verify vars are extracted from the FOR init node. test("for(var a = 0; c < b ; c++) foo()", "var a = 0; for(; c < b ; c++) foo()"); // Verify vars are extracted from the FOR init before the label node. test("a:for(var a = 0; c < b ; c++) foo()", "var a = 0; a:for(; c < b ; c++) foo()"); // Verify vars are extracted from the FOR init before the labels node. test("a:b:for(var a = 0; c < b ; c++) foo()", "var a = 0; a:b:for(; c < b ; c++) foo()"); // Verify block are properly introduced for ifs. test("if(x) for(var a = 0; c < b ; c++) foo()", "if(x){var a = 0; for(; c < b ; c++) foo()}"); // Any other expression. test("for(init(); a < 2 ; a++) foo();", "init(); for(; a < 2 ; a++) foo()"); // Verify destructuring var declarations are extracted. test("for (var [a, b] = [1, 2]; a < 2; a = b++) foo();", "var [a, b] = [1, 2]; for (; a < 2; a = b++) foo();"); } @Test public void testForIn1() { // Verify nothing happens with simple for-in testSame("for(a in b) foo();"); // Verify vars are extracted from the FOR-IN node. test("for(var a in b) foo()", "var a; for(a in b) foo()"); // Verify vars are extracted from the FOR init before the label node. test("a:for(var a in b) foo()", "var a; a:for(a in b) foo()"); // Verify vars are extracted from the FOR init before the labels node. test("a:b:for(var a in b) foo()", "var a; a:b:for(a in b) foo()"); // Verify block are properly introduced for ifs. test("if (x) for(var a in b) foo()", "if (x) { var a; for(a in b) foo() }"); // Verify names in destructuring declarations are individually declared. test("for (var [a, b] in c) foo();", "var a; var b; for ([a, b] in c) foo();"); test("for (var {a, b} in c) foo();", "var a; var b; for ({a: a, b: b} in c) foo();"); } @Test public void testForIn2() { setExpectParseWarningsThisTest(); setAcceptedLanguage(LanguageMode.ECMASCRIPT5); // Verify vars are extracted from the FOR-IN node. test("for(var a = foo() in b) foo()", "var a = foo(); for(a in b) foo()"); } @Test public void testForOf() { // Verify nothing happens with simple for-of testSame("for (a of b) foo();"); // Verify vars are extracted from the FOR-OF node. test("for (var a of b) foo()", "var a; for (a of b) foo()"); // Verify vars are extracted from the FOR init before the label node. test("a:for (var a of b) foo()", "var a; a: for (a of b) foo()"); // Verify vars are extracted from the FOR init before the labels node. test("a: b: for (var a of b) foo()", "var a; a: b: for (a of b) foo()"); // Verify block are properly introduced for ifs. test("if (x) for (var a of b) foo()", "if (x) { var a; for (a of b) foo() }"); // Verify names in destructuring declarations are individually declared. test("for (var [a, b] of c) foo();", "var a; var b; for ([a, b] of c) foo();"); test("for (var {a, b} of c) foo();", "var a; var b; for ({a: a, b: b} of c) foo();"); } @Test public void testWhile() { // Verify while loops are converted to FOR loops. test("while(c < b) foo()", "for(; c < b;) foo()"); } @Test public void testMoveFunctions1() { test("function f() { if (x) return; foo(); function foo() {} }", "function f() {function foo() {} if (x) return; foo(); }"); test( lines( "function f() { ", " function foo() {} ", " if (x) return;", " foo(); ", " function bar() {} ", "}"), lines( "function f() {", " function foo() {}", " function bar() {}", " if (x) return;", " foo();", "}")); } @Test public void testMoveFunctions2() { testSame("function f() { function foo() {} }"); test("function f() { f(); {function bar() {}}}", "function f() { f(); {var bar = function () {}}}"); test("function f() { f(); if (true) {function bar() {}}}", "function f() { f(); if (true) {var bar = function () {}}}"); } private static String inFunction(String code) { return "(function(){" + code + "})"; } private void testSameInFunction(String code) { testSame(inFunction(code)); } private void testInFunction(String code, String expected) { test(inFunction(code), inFunction(expected)); } @Test public void testNormalizeFunctionDeclarations() { testSame("function f() {}"); testSame("var f = function () {}"); test("var f = function f() {}", "var f = function f$jscomp$1() {}"); testSame("var f = function g() {}"); test("{function g() {}}", "{var g = function () {}}"); testSame("if (function g() {}) {}"); test("if (true) {function g() {}}", "if (true) {var g = function () {}}"); test("if (true) {} else {function g() {}}", "if (true) {} else {var g = function () {}}"); testSame("switch (function g() {}) {}"); test("switch (1) { case 1: function g() {}}", "switch (1) { case 1: var g = function () {}}"); test("if (true) {function g() {} function h() {}}", "if (true) {var h = function() {}; var g = function () {}}"); testSameInFunction("function f() {}"); testInFunction("f(); {function g() {}}", "f(); {var g = function () {}}"); testInFunction("f(); if (true) {function g() {}}", "f(); if (true) {var g = function () {}}"); testInFunction("if (true) {} else {function g() {}}", "if (true) {} else {var g = function () {}}"); } @Test public void testMakeLocalNamesUnique() { // Verify global names are untouched. testSame("var a;"); // Verify global names are untouched. testSame("a;"); // Local names are made unique. test("var a;function foo(a){var b;a}", "var a;function foo(a$jscomp$1){var b;a$jscomp$1}"); test("var a;function foo(){var b;a}function boo(){var b;a}", "var a;function foo(){var b;a}function boo(){var b$jscomp$1;a}"); test("function foo(a){var b} function boo(a){var b}", "function foo(a){var b} function boo(a$jscomp$1){var b$jscomp$1}"); // Verify function expressions are renamed. test("var a = function foo(){foo()};var b = function foo(){foo()};", "var a = function foo(){foo()};var b = function foo$jscomp$1(){foo$jscomp$1()};"); // Verify catch exceptions names are made unique testSame("try { } catch(e) {e;}"); test("try { } catch(e) {e;}; try { } catch(e) {e;}", "try { } catch(e) {e;}; try { } catch(e$jscomp$1) {e$jscomp$1;}"); test("try { } catch(e) {e; try { } catch(e) {e;}};", "try { } catch(e) {e; try { } catch(e$jscomp$1) {e$jscomp$1;} }; "); // Verify the 1st global redefinition of extern definition is not removed. testSame("/** @suppress {duplicate} */ var window;"); // Verify the 2nd global redefinition of extern definition is removed. test("/** @suppress {duplicate} */ var window; /** @suppress {duplicate} */ var window;", "/** @suppress {duplicate} */ var window;"); // Verify local masking extern made unique. test("function f() {var window}", "function f() {var window$jscomp$1}"); // Verify import * as <alias> is renamed. test( new String[] {"let a = 5;", "import * as a from './a.js'; const TAU = 2 * a.PI;"}, new String[] { "let a = 5;", "import * as a$jscomp$1 from './a.js'; const TAU = 2 * a$jscomp$1.PI" }); // Verify exported and imported names are untouched. test( new String[] {"var a;", "let a; export {a as a};"}, new String[] {"var a;", "let a$jscomp$1; export {a$jscomp$1 as a};"}); test( new String[] {"var a;", "import {a as a} from './foo.js'; let b = a;"}, new String[] {"var a;", "import {a as a$jscomp$1} from './foo.js'; let b = a$jscomp$1;"}); } @Test public void testMakeParamNamesUnique() { test( "function f(x) { x; }\nfunction g(x) { x; }", "function f(x) { x; }\nfunction g(x$jscomp$1) { x$jscomp$1; }"); test( "function f(x) { x; }\nfunction g(...x) { x; }", "function f(x) { x; }\nfunction g(...x$jscomp$1) { x$jscomp$1; }"); test( "function f(x) { x; }\nfunction g({x: x}) { x; }", "function f(x) { x; }\nfunction g({x: x$jscomp$1}) { x$jscomp$1; }"); test( "function f(x) { x; }\nfunction g({x}) { x; }", "function f(x) { x; }\nfunction g({x: x$jscomp$1}) { x$jscomp$1; }"); test( "function f(x) { x; }\nfunction g({y: {x}}) { x; }", "function f(x) { x; }\nfunction g({y: {x: x$jscomp$1}}) { x$jscomp$1; }"); } @Test public void testNoRenameParamNames() { testSame("function f(x) { x; }"); testSame("function f(...x) { x; }"); testSame("function f({x: x}) { x; }"); test("function f({x}) { x; }", "function f({x: x}) { x; }"); test("function f({y: {x}}) { x; }", "function f({y: {x: x}}) { x; }"); } @Test public void testRemoveDuplicateVarDeclarations1() { test("function f() { var a; var a }", "function f() { var a; }"); test("function f() { var a = 1; var a = 2 }", "function f() { var a = 1; a = 2 }"); test("var a = 1; function f(){ var a = 2 }", "var a = 1; function f(){ var a$jscomp$1 = 2 }"); test( "function f() { var a = 1; label1:var a = 2 }", "function f() { var a = 1; label1:{a = 2}}"); test("function f() { var a = 1; label1:var a }", "function f() { var a = 1; label1:{} }"); test("function f() { var a = 1; for(var a in b); }", "function f() { var a = 1; for(a in b); }"); } @Test public void testRemoveDuplicateVarDeclarations2() { test("var e = 1; function f(){ try {} catch (e) {} var e = 2 }", "var e = 1; function f(){ try {} catch (e$jscomp$2) {} var e$jscomp$1 = 2 }"); } @Test public void testRemoveDuplicateVarDeclarations3() { test("var f = 1; function f(){}", "f = 1; function f(){}"); test("var f; function f(){}", "function f(){}"); test("if (a) { var f = 1; } else { function f(){} }", "if (a) { var f = 1; } else { f = function (){} }"); test("function f(){} var f = 1;", "function f(){} f = 1;"); test("function f(){} var f;", "function f(){}"); test("if (a) { function f(){} } else { var f = 1; }", "if (a) { var f = function (){} } else { f = 1; }"); // TODO(johnlenz): Do we need to handle this differently for "third_party" // mode? Remove the previous function definitions? testSame("function f(){} function f(){}"); test("if (a) { function f(){} } else { function f(){} }", "if (a) { var f = function (){} } else { f = function (){} }"); } // It's important that we not remove this var completely. See // http://blickly.github.io/closure-compiler-issues/#290 @Test public void testRemoveDuplicateVarDeclarations4() { testSame("if (!Arguments) { /** @suppress {duplicate} */ var Arguments = {}; }"); } // If there are multiple duplicates, it's okay to remove all but the first. @Test public void testRemoveDuplicateVarDeclarations5() { test("var Arguments = {}; var Arguments = {};", "var Arguments = {}; Arguments = {};"); } @Test public void testRemoveVarDeclarationDuplicatesParam1() { test( "function f(x) { alert(x); var x = 0; alert(x); }", "function f(x) { alert(x); x = 0; alert(x); }"); } @Test public void testRemoveVarDeclarationDuplicatesParam2() { test( "function f(x) { alert(x); var x; alert(x); }", "function f(x) { alert(x); alert(x); }"); } @Test public void testRenamingConstants() { testSame("var ACONST = 4; var b = ACONST;"); test("var a, ACONST = 4;var b = ACONST;", "var a; var ACONST = 4; var b = ACONST;"); testSame("var ACONST; ACONST = 4; var b = ACONST;"); testSame("var ACONST = new Foo(); var b = ACONST;"); testSame("/** @const */ var aa; aa = 1;"); } @Test public void testSkipRenamingExterns() { test( externs("var EXTERN; var ext; ext.FOO;"), srcs("var b = EXTERN; var c = ext.FOO"), expected("var b = EXTERN; var c = ext.FOO")); } @Test public void testIssue166e() { test("var e = 2; try { throw 1 } catch(e) {}", "var e = 2; try { throw 1 } catch(e$jscomp$1) {}"); } @Test public void testIssue166f() { test( lines( "function a() {", " var e = 2;", " try { throw 1 } catch(e) {}", "}"), lines( "function a() {", " var e = 2;", " try { throw 1 } catch(e$jscomp$1) {}", "}")); } @Test public void testIssue166g() { test( lines( "function a() {", " try { throw 1 } catch(e) {}", " var e = 2;", "}"), lines( "function a() {", " try { throw 1 } catch(e$jscomp$1) {}", " var e = 2;", "}")); } @Test public void testLetsInSeparateBlocks() { test( lines( "if (x) {", " let e;", " alert(e);", "}", "if (y) {", " let e;", " alert(e);", "}"), lines( "if (x) {", " let e;", " alert(e);", "}", "if (y) {", " let e$jscomp$1;", " alert(e$jscomp$1);", "}")); } @Test public void testCatchesInSeparateBlocks() { test( lines( "if (x) {", " try {", " throw 1;", " } catch (e) {", " alert(e);", " }", "}", "if (y) {", " try {", " throw 2;", " } catch (e) {", " alert(e);", " }", "}"), lines( "if (x) {", " try {", " throw 1;", " } catch (e) {", " alert(e);", " }", "}", "if (y) {", " try {", " throw 2;", " } catch (e$jscomp$1) {", " alert(e$jscomp$1);", " }", "}")); } @Test public void testDeclInCatchBlock() { test( lines( "var x;", "try {", "} catch (e) {", " let x;", "}"), lines( "var x;", "try {", "} catch (e) {", " let x$jscomp$1", "}")); } @Test public void testIssue() { allowExternsChanges(); test( externs("var a,b,c; var a,b"), srcs("a(), b()"), expected("a(), b()")); } @Test public void testNormalizeSyntheticCode() { Compiler compiler = new Compiler(); CompilerOptions options = new CompilerOptions(); options.setEmitUseStrict(false); compiler.init( new ArrayList<SourceFile>(), new ArrayList<SourceFile>(), options); String code = "function f(x) {} function g(x) {}"; Node ast = compiler.parseSyntheticCode(code); Normalize.normalizeSyntheticCode(compiler, ast, "prefix_"); assertThat(compiler.toSource(ast)) .isEqualTo("function f(x$jscomp$prefix_0){}function g(x$jscomp$prefix_1){}"); } @Test public void testIsConstant() { testSame("var CONST = 3; var b = CONST;"); Node n = getLastCompiler().getRoot(); Set<Node> constantNodes = findNodesWithProperty(n, Node.IS_CONSTANT_NAME); assertThat(constantNodes).hasSize(2); for (Node hasProp : constantNodes) { assertThat(hasProp.getString()).isEqualTo("CONST"); } } @Test public void testIsConstantByDestructuring() { test( "var {CONST} = {CONST:3}; var b = CONST;", "var {CONST: CONST} = {CONST:3}; var b = CONST;"); Node n = getLastCompiler().getRoot(); Set<Node> constantNodes = findNodesWithProperty(n, Node.IS_CONSTANT_NAME); assertThat(constantNodes).hasSize(4); for (Node hasProp : constantNodes) { assertThat(hasProp.getString()).isEqualTo("CONST"); } } @Test public void testIsConstantByDestructuringWithDefault() { test("var {CONST = 3} = {}; var b = CONST;", "var {CONST: CONST = 3} = {}; var b = CONST;"); Node n = getLastCompiler().getRoot(); Set<Node> constantNodes = findNodesWithProperty(n, Node.IS_CONSTANT_NAME); assertThat(constantNodes).hasSize(3); for (Node hasProp : constantNodes) { assertThat(hasProp.getString()).isEqualTo("CONST"); } } @Test public void testPropertyIsConstant1() { testSame("var a = {}; a.CONST = 3; var b = a.CONST;"); Node n = getLastCompiler().getRoot(); Set<Node> constantNodes = findNodesWithProperty(n, Node.IS_CONSTANT_NAME); assertThat(constantNodes).hasSize(2); for (Node hasProp : constantNodes) { assertThat(hasProp.getString()).isEqualTo("CONST"); } } @Test public void testPropertyIsConstant2() { testSame("var a = {CONST: 3}; var b = a.CONST;"); Node n = getLastCompiler().getRoot(); Set<Node> constantNodes = findNodesWithProperty(n, Node.IS_CONSTANT_NAME); assertThat(constantNodes).hasSize(2); for (Node hasProp : constantNodes) { assertThat(hasProp.getString()).isEqualTo("CONST"); } } @Test public void testGetterPropertyIsConstant() { testSame("var a = { get CONST() {return 3} }; var b = a.CONST;"); Node n = getLastCompiler().getRoot(); Set<Node> constantNodes = findNodesWithProperty(n, Node.IS_CONSTANT_NAME); assertThat(constantNodes).hasSize(2); for (Node hasProp : constantNodes) { assertThat(hasProp.getString()).isEqualTo("CONST"); } } @Test public void testSetterPropertyIsConstant() { // Verifying that a SET is properly annotated. testSame("var a = { set CONST(b) {throw 'invalid'} }; var c = a.CONST;"); Node n = getLastCompiler().getRoot(); Set<Node> constantNodes = findNodesWithProperty(n, Node.IS_CONSTANT_NAME); assertThat(constantNodes).hasSize(2); for (Node hasProp : constantNodes) { assertThat(hasProp.getString()).isEqualTo("CONST"); } } @Test public void testExposeSimple() { test("var x = {}; /** @expose */ x.y = 3; x.y = 5;", "var x = {}; /** @expose */ x['y'] = 3; x['y'] = 5;"); } @Test public void testExposeComplex() { test("var x = {/** @expose */ a: 1, b: 2}; x.a = 3; /** @expose */ x.b = 5;", "var x = {/** @expose */ 'a': 1, 'b': 2}; x['a'] = 3; /** @expose */ x['b'] = 5;"); } @Test public void testShadowFunctionName() { test( lines( "function f() {", " var f = 'test';", " console.log(f);", "}"), lines( "function f() {", " var f$jscomp$1 = 'test';", " console.log(f$jscomp$1);", "}")); } private Set<Node> findNodesWithProperty(Node root, final byte prop) { final Set<Node> set = new HashSet<>(); NodeTraversal.traversePostOrder( getLastCompiler(), root, (NodeTraversal t, Node node, Node parent) -> { if (node.getBooleanProp(prop)) { set.add(node); } }); return set; } @Test public void testRenamingConstantProperties() throws Exception { // In order to detect that foo.BAR is a constant, we need collapse // properties to run first so that we can tell if the initial value is // non-null and immutable. The Normalize pass doesn't modify the code // in these examples, it just infers const-ness of some variables, so // we call enableNormalize to make the Normalize.VerifyConstants pass run. // TODO(johnlenz): fix this so it is just another test case. CompilerTestCase tester = new CompilerTestCase() { @Override protected int getNumRepetitions() { // The normalize pass is only run once. return 1; } @Override protected CompilerPass getProcessor(Compiler compiler) { return new CollapseProperties(compiler, PropertyCollapseLevel.ALL); } }; tester.setUp(); tester.enableNormalize(); tester.test( "var a={}; a.ACONST = 4;var b = 1; b = a.ACONST;", "var a$ACONST = 4; var b = 1; b = a$ACONST;"); tester.test( "var a={b:{}}; a.b.ACONST = 4;var b = 1; b = a.b.ACONST;", "var a$b$ACONST = 4;var b = 1; b = a$b$ACONST;"); tester.test( "var a = {FOO: 1};var b = 1; b = a.FOO;", "var a$FOO = 1; var b = 1; b = a$FOO;"); tester.testSame( externs("var EXTERN; var ext; ext.FOO;"), srcs("var b = EXTERN; var c = ext.FOO")); tester.test( "var a={}; a.ACONST = 4; var b = 1; b = a.ACONST;", "var a$ACONST = 4; var b = 1; b = a$ACONST;"); tester.test( "var a = {}; function foo() { var d = a.CONST; }; (function(){a.CONST=4})();", "var a$CONST;function foo(){var d = a$CONST;}; (function(){a$CONST = 4})();"); tester.test( "var a = {}; a.ACONST = new Foo(); var b = 1; b = a.ACONST;", "var a$ACONST = new Foo(); var b = 1; b = a$ACONST;"); tester.tearDown(); } @Test public void testFunctionBlock1() { test("() => 1;", "() => { return 1; }"); } @Test public void testFunctionBlock2() { test("var args = 1; var foo = () => args;", "var args = 1; var foo = () => { return args; }"); } @Test public void testArrowFunctionInFunction() { test( lines( "function foo() {", " var x = () => 1;", " return x();", "}"), lines( "function foo() {", " var x = () => { return 1; };", " return x();", "}")); } @Test public void testES6ShorthandPropertySyntax01() { test("obj = {x, y};", "obj = {x: x, y: y}"); } @Test public void testES6ShorthandPropertySyntax02() { test("var foo = {x, y};", "var foo = {x: x, y: y}"); } @Test public void testES6ShorthandPropertySyntax03() { test( lines( "function foo(a, b, c) {", " return {", " a,", " b,", " c", " };", "}"), lines( "function foo(a, b, c) {", " return {", " a: a,", " b: b,", " c: c", " };", "}")); } @Test public void testES6ShorthandPropertySyntax04() { test("var foo = {x};", "var foo = {x: x}"); } @Test public void testES6ShorthandPropertySyntax05() { test("var {a = 5} = obj;", "var {a: a = 5} = obj;"); } @Test public void testES6ShorthandPropertySyntax06() { test("var {a = 5, b = 3} = obj;", "var {a: a = 5, b: b = 3} = obj;"); } @Test public void testES6ShorthandPropertySyntax07() { test("var {a: a = 5, b = 3} = obj;", "var {a: a = 5, b: b = 3} = obj;"); } @Test public void testES6ShorthandPropertySyntax08() { test("var {a, b} = obj;", "var {a: a, b: b} = obj;"); } @Test public void testES6ShorthandPropertySyntax09() { test("({a = 5} = obj);", "({a: a = 5} = obj);"); } @Test public void testES6ShorthandPropertySyntax10() { testSame("function f(a = 5) {}"); } @Test public void testES6ShorthandPropertySyntax11() { testSame("[a = 5] = obj;"); } @Test public void testES6ShorthandPropertySyntax12() { testSame("({a: a = 5} = obj)"); } @Test public void testES6ShorthandPropertySyntax13() { testSame("({['a']: a = 5} = obj);"); } @Test public void testRewriteExportSpecShorthand1() { test("var a; export {a};", "var a; export {a as a};"); } @Test public void testRewriteExportSpecShorthand2() { test("export {a, b as c, d};", "export {a as a, b as c, d as d};"); } @Test public void testSplitExportDeclarationWithVar() { test("export var a;", "var a; export {a as a};"); test("export var a = 4;", "var a = 4; export {a as a};"); test( "export var a, b;", lines( "var a;", "var b;", "export {a as a, b as b};")); } @Test public void testSplitExportDeclarationWithShorthandProperty() { test("export var a = {b};", "var a = {b: b}; export {a as a};"); } @Test public void testSplitExportDeclarationWithDestructuring() { test("export var {} = {};", "var {} = {}; export {};"); test(lines( "let obj = {a: 3, b: 2};", "export var {a, b: d, e: f = 2} = obj;"), lines( "let obj = {a: 3, b: 2};", "var {a: a, b: d, e: f = 2} = obj;", "export {a as a, d as d, f as f};")); } @Test public void testSplitExportDeclarationWithLet() { test("export let a;", "let a; export {a as a};"); } @Test public void testSplitExportDeclarationWithConst() { test("export const a = 17;", "const a = 17; export {a as a};"); } @Test public void testSplitExportDeclarationOfFunction() { test("export function bar() {};", lines( "function bar() {}", "export {bar as bar};" )); // Don't need to split declarations in default exports since they are either unnamed, or the // name is declared in the module scope only. testSame("export default function() {};"); testSame("export default function foo() {};"); } @Test public void testSplitExportDeclarationOfClass() { test("export class Foo {};", lines("class Foo {}", "export {Foo as Foo};")); testSame("export default class Bar {}"); testSame("export default class {}"); } }
tiobe/closure-compiler
test/com/google/javascript/jscomp/NormalizeTest.java
Java
apache-2.0
35,046