gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package de.mpii.rdf3x; import java.io.InputStream; import java.io.Reader; import java.sql.Array; import java.sql.Blob; import java.sql.Clob; import java.sql.Date; import java.sql.NClob; import java.sql.Ref; import java.sql.RowId; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLWarning; import java.sql.SQLXML; import java.sql.Time; import java.sql.Timestamp; import java.util.Map; // RDF-3X // (c) 2009 Thomas Neumann. Web site: http://www.mpi-inf.mpg.de/~neumann/rdf3x // // This work is licensed under the Creative Commons // Attribution-Noncommercial-Share Alike 3.0 Unported License. To view a copy // of this license, visit http://creativecommons.org/licenses/by-nc-sa/3.0/ // or send a letter to Creative Commons, 171 Second Street, Suite 300, // San Francisco, California, 94105, USA. public final class ResultSet implements java.sql.ResultSet { // The header private String[] header; // The data private String[][] data; // The current position private int row; // The last column private int lastCol; // Constructor ResultSet(String[] header,String[][] data) { this.header=header; this.data=data; row=-1; } // Move absolutely public boolean absolute(int row) { if (row>0) { if (row>(data.length+1)) return false; this.row=row-1; return true; } else { if ((-row)>data.length) return false; this.row=data.length-row; return true; } } // Move after the last entry public void afterLast() { row=data.length; } // Move before the first entry public void beforeFirst() throws SQLException { throw new SQLFeatureNotSupportedException(); } // Cancel all updates public void cancelRowUpdates() throws SQLException { throw new SQLFeatureNotSupportedException(); } // Clear all warnings public void clearWarnings() {} // Releases resources public void close() { data=null; } // Deletes the current row public void deleteRow() throws SQLException { throw new SQLFeatureNotSupportedException(); } // Find a column public int findColumn(String columnLabel) throws SQLException { for (int index=0;index<header.length;index++) if (header[index].equals(columnLabel)) return index+1; throw new SQLException(); } // Go to the first entry public boolean first() { row=0; return row<data.length; } // Get an entry as array public Array getArray(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as array public Array getArray(String columnLabel) throws SQLException { return getArray(findColumn(columnLabel)); } // Get an entry as ascii stream public InputStream getAsciiStream(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as ascii stream public InputStream getAsciiStream(String columnLabel) throws SQLException { return getAsciiStream(findColumn(columnLabel)); } // Get an entry as big decimal public java.math.BigDecimal getBigDecimal(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } /** * Get an entry as big decimal * @deprecated */ public java.math.BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as big decimal public java.math.BigDecimal getBigDecimal(String columnLabel) throws SQLException { return getBigDecimal(findColumn(columnLabel)); } /** * Get an entry as big decimal. * @deprecated */ public java.math.BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { return getBigDecimal(findColumn(columnLabel),scale); } // Get an entry as binary stream public InputStream getBinaryStream(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as binary stream public InputStream getBinaryStream(String columnLabel) throws SQLException { return getBinaryStream(findColumn(columnLabel)); } // Get an entry as blob public Blob getBlob(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as blob public Blob getBlob(String columnLabel) throws SQLException { return getBlob(findColumn(columnLabel)); } // Get an entry as boolean public boolean getBoolean(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as boolean public boolean getBoolean(String columnLabel) throws SQLException { return getBoolean(findColumn(columnLabel)); } // Get an entry as byte public byte getByte(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as byte public byte getByte(String columnLabel) throws SQLException { return getByte(findColumn(columnLabel)); } // Get an entry as bytes public byte[] getBytes(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as bytes public byte[] getBytes(String columnLabel) throws SQLException { return getBytes(findColumn(columnLabel)); } // Get an entry as character stream public Reader getCharacterStream(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as character stream public Reader getCharacterStream(String columnLabel) throws SQLException { return getCharacterStream(findColumn(columnLabel)); } // Get an entry as clob public Clob getClob(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as clob public Clob getClob(String columnLabel) throws SQLException { return getClob(findColumn(columnLabel)); } // Get the concurrency setting public int getConcurrency() { return java.sql.ResultSet.CONCUR_READ_ONLY; } // Get the cursor name public String getCursorName() throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as date public Date getDate(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as date public Date getDate(int columnIndex, java.util.Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as date public Date getDate(String columnLabel) throws SQLException { return getDate(findColumn(columnLabel)); } // Get an entry as date public Date getDate(String columnLabel, java.util.Calendar cal) throws SQLException { return getDate(findColumn(columnLabel),cal); } // Get an entry as double public double getDouble(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as double public double getDouble(String columnLabel) throws SQLException { return getDouble(findColumn(columnLabel)); } // Get the fetch direction public int getFetchDirection() { return java.sql.ResultSet.FETCH_FORWARD; } // Get the fetch size public int getFetchSize() { return 0; } // Get an entry as float public float getFloat(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as float public float getFloat(String columnLabel) throws SQLException { return getFloat(findColumn(columnLabel)); } // Get the holdability public int getHoldability() { return java.sql.ResultSet.CLOSE_CURSORS_AT_COMMIT; } // Get an entry as int public int getInt(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as int public int getInt(String columnLabel) throws SQLException { return getInt(findColumn(columnLabel)); } // Get an entry as long public long getLong(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as long public long getLong(String columnLabel) throws SQLException { return getLong(findColumn(columnLabel)); } // Get the meta data public java.sql.ResultSetMetaData getMetaData() { return new ResultSetMetaData(header); } // Get an entry as stream public Reader getNCharacterStream(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as stream public Reader getNCharacterStream(String columnLabel) throws SQLException { return getNCharacterStream(findColumn(columnLabel)); } // Get an entry as nclob public NClob getNClob(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as nclob public NClob getNClob(String columnLabel) throws SQLException { return getNClob(findColumn(columnLabel)); } // Get an entry as string public String getNString(int columnIndex) throws SQLException { return getString(columnIndex); } // Get an entry as string public String getNString(String columnLabel) throws SQLException { return getNString(findColumn(columnLabel)); } // Get an entry public Object getObject(int columnIndex) throws SQLException { return getString(columnIndex); } // Get an entry public Object getObject(int columnIndex, Map<String,Class<?>> map) throws SQLException { return getString(columnIndex); } // Get an entry public Object getObject(String columnLabel) throws SQLException { return getObject(findColumn(columnLabel)); } // Get an entry public Object getObject(String columnLabel, Map<String,Class<?>> map) throws SQLException { return getObject(findColumn(columnLabel),map); } // Get an entry as ref public Ref getRef(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as ref public Ref getRef(String columnLabel) throws SQLException { return getRef(findColumn(columnLabel)); } // Get the current row number public int getRow() { return row+1; } // Get an entry as rowid public RowId getRowId(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as rowid public RowId getRowId(String columnLabel) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as short public short getShort(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as short public short getShort(String columnLabel) throws SQLException { return getShort(findColumn(columnLabel)); } // Get an entry as SQL public SQLXML getSQLXML(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as SQL public SQLXML getSQLXML(String columnLabel) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get the corresponding statement public Statement getStatement() throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as string public String getString(int columnIndex) throws SQLException { if ((row>=data.length)||(columnIndex<1)||(columnIndex>data[row].length)) throw new SQLException(); String s=data[row][columnIndex-1]; lastCol=columnIndex; if ("NULL".equals(s)) return null; else return s; } // Get an entry as string public String getString(String columnLabel) throws SQLException { return getString(findColumn(columnLabel)); } // Get an entry as time public Time getTime(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as time public Time getTime(int columnIndex, java.util.Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as time public Time getTime(String columnLabel) throws SQLException { return getTime(findColumn(columnLabel)); } // Get an entry as tme public Time getTime(String columnLabel, java.util.Calendar cal) throws SQLException { return getTime(findColumn(columnLabel),cal); } // Get an entry as timestamp public Timestamp getTimestamp(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as timestamp public Timestamp getTimestamp(int columnIndex, java.util.Calendar cal) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as timestamp public Timestamp getTimestamp(String columnLabel) throws SQLException { return getTimestamp(findColumn(columnLabel)); } // Get an entry as timestamp public Timestamp getTimestamp(String columnLabel, java.util.Calendar cal) throws SQLException { return getTimestamp(findColumn(columnLabel),cal); } // Get the type public int getType() { return java.sql.ResultSet.TYPE_FORWARD_ONLY; } /** * Get an entry as unicode stream * @deprecated */ public InputStream getUnicodeStream(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } /** * Get an entry as unicode stream * @deprecated */ public InputStream getUnicodeStream(String columnLabel) throws SQLException { return getUnicodeStream(findColumn(columnLabel)); } // Get an entry as URL public java.net.URL getURL(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Get an entry as URL public java.net.URL getURL(String columnLabel) throws SQLException { return getURL(findColumn(columnLabel)); } // Get warnings public SQLWarning getWarnings() { return null; } // Insert a row public void insertRow() throws SQLException { throw new SQLFeatureNotSupportedException(); } // After the last row public boolean isAfterLast() { return row>=data.length; } // Before the first row public boolean isBeforeFirst() { return false; } // Closed public boolean isClosed() { return data==null; } // At first row public boolean isFirst() { return row==0; } // At last row public boolean isLast() { return row==(data.length-1); } // Go to the last row public boolean last() { if (data.length>0) { row=data.length-1; return true; } else return false; } // Move the cursor public void moveToCurrentRow() throws SQLException { throw new SQLFeatureNotSupportedException(); } // Move the cursor public void moveToInsertRow() throws SQLException { throw new SQLFeatureNotSupportedException(); } // Go to the next row public boolean next() { if (row>=data.length) return false; ++row; return row<data.length; } // Go to the previous row public boolean previous() { if (row==0) return false; --row; return true; } // Refresh the current tow public void refreshRow() {} // Move the cursor relatively public boolean relative(int rows) { if (rows>=0) { if (row+rows>=data.length) { row=data.length; return false; } else { row+=rows; return true; } } else { if (row+rows<0) { row=0; return true; } else { row+=rows; return true; } } } // Deleted public boolean rowDeleted() { return false; } // Inserted public boolean rowInserted() { return false; } // Updated public boolean rowUpdated() { return false; } // Fetch direction public void setFetchDirection(int direction) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Fetch size public void setFetchSize(int rows) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateArray(int columnIndex, Array x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateArray(String columnLabel, Array x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBigDecimal(int columnIndex, java.math.BigDecimal x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBigDecimal(String columnLabel, java.math.BigDecimal x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Updare public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBlob(int columnIndex, Blob x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBlob(String columnLabel, Blob x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBoolean(int columnIndex, boolean x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBoolean(String columnLabel, boolean x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateByte(int columnIndex, byte x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateByte(String columnLabel, byte x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBytes(int columnIndex, byte[] x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateBytes(String columnLabel, byte[] x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateClob(int columnIndex, Clob x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateClob(int columnIndex, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateClob(String columnLabel, Clob x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateClob(String columnLabel, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateDate(int columnIndex, Date x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateDate(String columnLabel, Date x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateDouble(int columnIndex, double x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateDouble(String columnLabel, double x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateFloat(int columnIndex, float x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateFloat(String columnLabel, float x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateInt(int columnIndex, int x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateInt(String columnLabel, int x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateLong(int columnIndex, long x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateLong(String columnLabel, long x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNClob(int columnIndex, NClob nClob) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNClob(int columnIndex, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNClob(String columnLabel, NClob nClob) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNClob(String columnLabel, Reader reader) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNString(int columnIndex, String nString) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNString(String columnLabel, String nString) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNull(int columnIndex) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateNull(String columnLabel) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateObject(int columnIndex, Object x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateObject(String columnLabel, Object x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateRef(int columnIndex, Ref x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateRef(String columnLabel, Ref x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateRow() throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateRowId(int columnIndex, RowId x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateRowId(String columnLabel, RowId x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateShort(int columnIndex, short x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateShort(String columnLabel, short x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateString(int columnIndex, String x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateString(String columnLabel, String x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateTime(int columnIndex, Time x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateTime(String columnLabel, Time x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Update public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { throw new SQLFeatureNotSupportedException(); } // Was the last column NULL? public boolean wasNull() throws SQLException { return getString(lastCol)==null; } // Wrapper? public boolean isWrapperFor(Class<?> iface) { return false; } // Unwrap public <T> T unwrap(Class<T> iface) throws SQLException { throw new SQLException(); } public <T> T getObject(int columnIndex, Class<T> type) throws SQLException { // TODO Auto-generated method stub return null; } public <T> T getObject(String columnLabel, Class<T> type) throws SQLException { // TODO Auto-generated method stub return null; } }
/* * Copyright 2011 Greg Haines * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.greghaines.jesque.meta.dao.impl; import static net.greghaines.jesque.utils.ResqueConstants.FAILED; import static net.greghaines.jesque.utils.ResqueConstants.QUEUE; import static net.greghaines.jesque.utils.ResqueConstants.QUEUES; import static net.greghaines.jesque.utils.ResqueConstants.STAT; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.UUID; import net.greghaines.jesque.Config; import net.greghaines.jesque.Job; import net.greghaines.jesque.JobFailure; import net.greghaines.jesque.json.ObjectMapperFactory; import net.greghaines.jesque.meta.dao.FailureDAO; import net.greghaines.jesque.utils.JesqueUtils; import net.greghaines.jesque.utils.PoolUtils; import net.greghaines.jesque.utils.PoolUtils.PoolWork; import redis.clients.jedis.Jedis; import redis.clients.jedis.util.Pool; /** * Accesses failure information about Jesque/Resque from Redis. * * @author Greg Haines */ public class FailureDAORedisImpl implements FailureDAO { private final Config config; private final Pool<Jedis> jedisPool; /** * Constructor. * @param config the Jesque configuration * @param jedisPool the connection pool to Redis */ public FailureDAORedisImpl(final Config config, final Pool<Jedis> jedisPool) { if (config == null) { throw new IllegalArgumentException("config must not be null"); } if (jedisPool == null) { throw new IllegalArgumentException("jedisPool must not be null"); } this.config = config; this.jedisPool = jedisPool; } /** * {@inheritDoc} */ @Override public long getCount() { return PoolUtils.doWorkInPoolNicely(this.jedisPool, new PoolWork<Jedis, Long>() { /** * {@inheritDoc} */ @Override public Long doWork(final Jedis jedis) throws Exception { final String failedStr = jedis.get(key(STAT, FAILED)); return (failedStr == null) ? 0L : Long.parseLong(failedStr); } }); } /** * {@inheritDoc} */ @Override public long getFailQueueJobCount() { return PoolUtils.doWorkInPoolNicely(this.jedisPool, new PoolWork<Jedis, Long>() { /** * {@inheritDoc} */ @Override public Long doWork(final Jedis jedis) throws Exception { return jedis.llen(key(FAILED)); } }); } /** * {@inheritDoc} */ @Override public List<JobFailure> getFailures(final long offset, final long count) { return PoolUtils.doWorkInPoolNicely(this.jedisPool, new PoolWork<Jedis, List<JobFailure>>() { /** * {@inheritDoc} */ @Override public List<JobFailure> doWork(final Jedis jedis) throws Exception { final List<String> payloads = jedis.lrange(key(FAILED), offset, offset + count - 1); final List<JobFailure> failures = new ArrayList<JobFailure>(payloads.size()); for (final String payload : payloads) { if (payload.charAt(0) == '{') { // Ignore non-JSON strings failures.add(ObjectMapperFactory.get().readValue(payload, JobFailure.class)); } } return failures; } }); } /** * {@inheritDoc} */ @Override public void clear() { PoolUtils.doWorkInPoolNicely(this.jedisPool, new PoolWork<Jedis, Void>() { /** * {@inheritDoc} */ @Override public Void doWork(final Jedis jedis) throws Exception { jedis.del(key(FAILED)); return null; } }); } /** * {@inheritDoc} */ @Override public Date requeue(final long index) { Date retryDate = null; final List<JobFailure> failures = getFailures(index, 1); if (!failures.isEmpty()) { retryDate = PoolUtils.doWorkInPoolNicely(this.jedisPool, new PoolWork<Jedis, Date>() { /** * {@inheritDoc} */ @Override public Date doWork(final Jedis jedis) throws Exception { final Date retriedAt = new Date(); final JobFailure failure = failures.get(0); failure.setRetriedAt(retriedAt); jedis.lset(key(FAILED), index, ObjectMapperFactory.get().writeValueAsString(failure)); enqueue(jedis, failure.getQueue(), failure.getPayload()); return retriedAt; } }); } return retryDate; } /** * {@inheritDoc} */ @Override public void remove(final long index) { PoolUtils.doWorkInPoolNicely(this.jedisPool, new PoolWork<Jedis, Void>() { /** * {@inheritDoc} */ @Override public Void doWork(final Jedis jedis) throws Exception { final String failedKey = key(FAILED); final String randId = UUID.randomUUID().toString(); jedis.lset(failedKey, index, randId); jedis.lrem(failedKey, 1, randId); return null; } }); } protected void enqueue(final Jedis jedis, final String queue, final Job job) throws IOException { if (queue == null || "".equals(queue)) { throw new IllegalArgumentException("queue must not be null or empty: " + queue); } if (job == null) { throw new IllegalArgumentException("job must not be null"); } if (!job.isValid()) { throw new IllegalStateException("job is not valid: " + job); } final String msg = ObjectMapperFactory.get().writeValueAsString(job); jedis.sadd(key(QUEUES), queue); jedis.rpush(key(QUEUE, queue), msg); } /** * Builds a namespaced Redis key with the given arguments. * * @param parts * the key parts to be joined * @return an assembled String key */ private String key(final String... parts) { return JesqueUtils.createKey(this.config.getNamespace(), parts); } }
/** * Portions Copyright 2001 Sun Microsystems, Inc. * Portions Copyright 1999-2001 Language Technologies Institute, * Carnegie Mellon University. * All Rights Reserved. Use is subject to license terms. * * See the file "license.terms" for information on usage and * redistribution of this file, and for a DISCLAIMER OF ALL * WARRANTIES. */ package com.sun.speech.freetts; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.io.Reader; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.Text; import com.sun.speech.freetts.audio.AudioPlayer; import com.sun.speech.freetts.lexicon.Lexicon; import com.sun.speech.freetts.relp.LPCResult; import com.sun.speech.freetts.util.BulkTimer; import com.sun.speech.freetts.util.Utilities; /** * Performs text-to-speech using a series of * <code>UtteranceProcessors</code>. It is the main conduit to the FreeTTS * speech synthesizer. It can perform TTS on ASCII text, * a JSML document, an <code>InputStream</code>, or a * <code>FreeTTSSpeakable</code>, by invoking the method <code>speak</code>. * * <p>Before a Voice can perform TTS, it must have a * <code>Lexicon</code>, from which it gets the vocabulary, and * an <code>AudioPlayer</code>, to which it sends the synthesized output. * * <p><b>Example</b> (using the <code>CMUDiphoneVoice</code>, * <code>CMULexicon</code> and <code>JavaClipAudioPlayer</code>): * * <pre> * Voice voice = new CMUDiphoneVoice(); * * // sets the Lexicon * voice.setLexicon(new CMULexicon()); * * // sets the AudioPlayer * voice.setAudioPlayer(new JavaClipAudioPlayer()); * * // loads the Voice * voice.allocate(); * * // start talking * voice.speak("I can talk forever without getting tired!"); * </pre> * * * <p>A user can override the AudioPlayer to use by defining the * "com.sun.speech.freetts.voice.defaultAudioPlayer" system property. * The value of this property must be the name of a class that * implements the AudioPlayer interface, and which also has a no-arg * constructor. * * @see VoiceManager * @see VoiceDirectory */ public abstract class Voice implements UtteranceProcessor, Dumpable { /** Logger instance. */ private static final Logger LOGGER = Logger.getLogger(Voice.class.getName()); /** * Constant that describes the name of the unit database used by * this voice. */ public final static String DATABASE_NAME = "databaseName"; private List utteranceProcessors; private Map featureProcessors; private FeatureSetImpl features; private boolean metrics = false; private boolean detailedMetrics = false; private boolean dumpUtterance = false; private boolean dumpRelations = false; private String runTitle = "unnamed run"; private Lexicon lexicon = null; private AudioPlayer defaultAudioPlayer = null; private AudioPlayer audioPlayer = null; private UtteranceProcessor audioOutput; private OutputQueue outputQueue = null; private String waveDumpFile = null; private BulkTimer runTimer = new BulkTimer(); private BulkTimer threadTimer = new BulkTimer(); private boolean externalOutputQueue = false; private boolean externalAudioPlayer = false; private float nominalRate = 150; // nominal speaking rate for this voice private float pitch = 100; // pitch baseline (hertz) private float range = 10; // pitch range (hertz) private float pitchShift = 1; // F0 Shift private float volume = 0.8f; // the volume (range 0 to 1) private float durationStretch = 1f; // the duration stretch private boolean loaded = false; private String name = "default_name"; private Age age = Age.DONT_CARE; private Gender gender = Gender.DONT_CARE; private String description = "default description"; private Locale locale = Locale.getDefault(); private String domain = "general"; private String style = "standard"; private String organization = "unknown"; /** * Prefix for System property names. */ public final static String PROP_PREFIX = "com.sun.speech.freetts.voice."; /** * Feature name for the silence phone string. */ public final static String FEATURE_SILENCE = "silence"; /** * Feature name for the join type string. */ public final static String FEATURE_JOIN_TYPE = "join_type"; /** * Feature name for the default AudioPlayer class to use. */ public final static String DEFAULT_AUDIO_PLAYER = PROP_PREFIX + "defaultAudioPlayer"; /** * The default class to use for the DEFAULT_AUDIO_PLAYER. */ public final static String DEFAULT_AUDIO_PLAYER_DEFAULT = "com.sun.speech.freetts.audio.JavaStreamingAudioPlayer"; /** * Creates a new Voice. Utterances are sent to an * output queue to be rendered as audio. Utterances are placed * on the queue by an output thread. This * queue is usually created via a call to 'createOutputThread,' * which creates a thread that waits on the queue and sends the * output to the audio player associated with this voice. If * the queue is null, the output is rendered in the calling * thread. * * @see #createOutputThread */ public Voice() { /* Make the utteranceProcessors a synchronized list to avoid * some threading issues. */ utteranceProcessors = Collections.synchronizedList(new ArrayList()); features = new FeatureSetImpl(); featureProcessors = new HashMap(); try { nominalRate = Float.parseFloat( Utilities.getProperty(PROP_PREFIX + "speakingRate","150")); pitch = Float.parseFloat( Utilities.getProperty(PROP_PREFIX + "pitch","100")); range = Float.parseFloat( Utilities.getProperty(PROP_PREFIX + "range","10")); volume = Float.parseFloat( Utilities.getProperty(PROP_PREFIX + "volume","1.0")); } catch (SecurityException se) { // can't get properties, just use defaults } outputQueue = null; audioPlayer = null; defaultAudioPlayer = null; } /** * Creates a new Voice like above, except that it also * stores the properties of the voice. * @param name the name of the voice * @param gender the gender of the voice * @param age the age of the voice * @param description a human-readable string providing a * description that can be displayed for the users. * @param locale the locale of the voice * @param domain the domain of this voice. For example, * @param organization the organization which created the voice * &quot;general&quot;, &quot;time&quot;, or * &quot;weather&quot;. * * @see #Voice() */ public Voice(String name, Gender gender, Age age, String description, Locale locale, String domain, String organization) { this(); setName(name); setGender(gender); setAge(age); setDescription(description); setLocale(locale); setDomain(domain); setOrganization(organization); } /** * Speaks the given text. * * @param text the text to speak * * @return <code>true</code> if the given text is spoken properly; * otherwise <code>false</code> */ public boolean speak(String text) { return speak(new FreeTTSSpeakableImpl(text)); } /** * Speaks the given document. * * @param doc the JSML document to speak * * @return <code>true</code> if the given document is spoken properly; * otherwise <code>false</code> */ public boolean speak(Document doc) { return speak(new FreeTTSSpeakableImpl(doc)); } /** * Speaks the input stream. * * @param inputStream the inputStream to speak * * @return <code>true</code> if the given input stream is spoken properly; * otherwise <code>false</code> */ public boolean speak(InputStream inputStream) { return speak(new FreeTTSSpeakableImpl(inputStream)); } /** * Speak the given queue item. This is a synchronous method that * does not return until the speakable is completely * spoken or has been cancelled. * * @param speakable the item to speak * * @return <code>true</code> if the utterance was spoken properly, * <code>false</code> otherwise */ public boolean speak(FreeTTSSpeakable speakable) { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("speak(FreeTTSSpeakable) called"); } boolean ok = true; boolean posted = false; getAudioPlayer().startFirstSampleTimer(); for (Iterator i = tokenize(speakable); !speakable.isCompleted() && i.hasNext() ; ) { try { Utterance utterance = (Utterance) i.next(); if (utterance != null) { processUtterance(utterance); posted = true; } } catch (ProcessException pe) { ok = false; } } if (ok && posted) { runTimer.start("WaitAudio"); ok = speakable.waitCompleted(); runTimer.stop("WaitAudio"); } if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("speak(FreeTTSSpeakable) completed"); } return ok; } /** * @deprecated As of FreeTTS 1.2, replaced by {@link #allocate}. */ public void load() { allocate(); } /** * Allocate this Voice. It loads the lexicon and the * audio output handler, and creates an audio output thread by * invoking <code>createOutputThread()</code>, if * one is not already created. It then calls the <code>loader()</code> * method to load Voice-specific data, which include utterance processors. */ public void allocate() { if (isLoaded()) { return; } BulkTimer.LOAD.start(); if (!lexicon.isLoaded()) { try { lexicon.load(); } catch (IOException ioe) { LOGGER.severe("Can't load voice " + ioe); throw new Error(ioe); } } try { audioOutput = getAudioOutput(); } catch (IOException ioe) { LOGGER.severe("Can't load audio output handler for voice " + ioe); throw new Error(ioe); } if (outputQueue == null) { outputQueue = createOutputThread(); } try { loader(); } catch (IOException ioe) { LOGGER.severe("Can't load voice " + ioe); throw new Error(ioe); } BulkTimer.LOAD.stop(); if (isMetrics()) { BulkTimer.LOAD.show("loading " + toString() + " for " + getRunTitle()); } setLoaded(true); } /** * Returns true if this voice is loaded. * * @return <code>true</code> if the voice is loaded; * otherwise <code>false</code> */ public boolean isLoaded() { return loaded; } /** * Sets the loaded state * * @param loaded the new loaded state * otherwise <code>false</code> */ protected void setLoaded(boolean loaded) { this.loaded = loaded; } /** * Processes the given Utterance by passing it to each * UtteranceProcessor managed by this Voice. The * UtteranceProcessors are called in the order they were added to * the Voice. * * @param u the Utterance to process * * @throws ProcessException if an exception occurred while performing * operations on the Utterance */ public void processUtterance(Utterance u) throws ProcessException { UtteranceProcessor[] processors; if (utteranceProcessors == null) { return; } if (u == null) { throw new ProcessException("Utterance is null."); } runTimer.start("processing"); processors = new UtteranceProcessor[utteranceProcessors.size()]; processors = (UtteranceProcessor[]) utteranceProcessors.toArray(processors); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Processing Utterance: " + u.getString("input_text")); } try { for (int i = 0; i < processors.length && !u.getSpeakable().isCompleted(); i++) { runProcessor(processors[i], u, runTimer); } if (!u.getSpeakable().isCompleted()) { if (outputQueue == null) { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("To AudioOutput"); } outputUtterance(u, runTimer); } else { runTimer.start("..post"); outputQueue.post(u); runTimer.stop("..post"); } } } catch (ProcessException pe) { System.err.println("Processing Utterance: " + pe); } catch (Exception e) { System.err.println("Trouble while processing utterance " + e); e.printStackTrace(); u.getSpeakable().cancelled(); } if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Done Processing Utterance: " + u.getString("input_text")); } runTimer.stop("processing"); if (dumpUtterance) { u.dump("Utterance"); } if (dumpRelations) { u.dumpRelations("Utterance"); } dumpASCII(u); } /** * Dumps the wave for the given utterance. * * @param utterance the utterance of interest */ private void dumpASCII(Utterance utterance) { if (waveDumpFile != null) { LPCResult lpcResult = (LPCResult) utterance.getObject("target_lpcres"); try { if (waveDumpFile.equals("-")) { lpcResult.dumpASCII(); } else { lpcResult.dumpASCII(waveDumpFile); } } catch (IOException ioe) { LOGGER.severe("Can't dump file to " + waveDumpFile + " " + ioe); throw new Error(ioe); } } } /** * Creates an output thread that will asynchronously * output utterances that are generated by this voice (and other * voices). * * @return the queue where utterances should be placed. */ public static OutputQueue createOutputThread() { final OutputQueue queue = new OutputQueue(); Thread t = new Thread() { public void run() { Utterance utterance = null; do { utterance = queue.pend(); if (utterance != null) { Voice voice = utterance.getVoice(); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("OUT: " + utterance.getString("input_text")); } voice.outputUtterance(utterance, voice.threadTimer); } } while (utterance != null); } }; t.setDaemon(true); t.start(); return queue; } /** * Sends the given utterance to the audio output processor * associated with this voice. If the queue item associated with * this utterance is completed, then this set of utterances has * been cancelled or otherwise aborted and the utterance should * not be output. * * @param utterance the utterance to be output * @param timer the timer for gathering performance metrics * * @return true if the utterance was output properly; otherwise * false */ private boolean outputUtterance(Utterance utterance, BulkTimer timer) { boolean ok = true; FreeTTSSpeakable speakable = utterance.getSpeakable(); if (!speakable.isCompleted()) { if (utterance.isFirst()) { getAudioPlayer().reset(); speakable.started(); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(" --- started ---"); } } // log(" utt: " + utterance.getString("input_text")); try { if (!speakable.isCompleted()) { runProcessor(audioOutput, utterance, timer); } else { ok = false; } } catch (ProcessException pe) { ok = false; } if (ok && utterance.isLast()) { getAudioPlayer().drain(); speakable.completed(); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(" --- completed ---"); } } else if (!ok) { // getAudioPlayer().drain(); speakable.cancelled(); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(" --- cancelled ---"); } } else { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(" --- not last: " + speakable.getText() + " --- "); } } if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Calling speakable.completed() on " + speakable.getText()); } } else { ok = false; if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("STRANGE: speakable already completed: " + speakable.getText()); } } return ok; } /** * Runs the given utterance processor. * * @param processor the processor to run. If the processor * is null, it is ignored * @param utterance the utterance to process * * @throws ProcessException if an exceptin occurs while processing * the utterance */ private void runProcessor(UtteranceProcessor processor, Utterance utterance, BulkTimer timer) throws ProcessException { if (processor != null) { String processorName = ".." + processor.toString(); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(" Running " + processorName); } timer.start(processorName); processor.processUtterance(utterance); timer.stop(processorName); } } /** * Returns the tokenizer associated with this voice. * * @return the tokenizer */ public abstract Tokenizer getTokenizer(); /** * Return the list of UtteranceProcessor instances. Applications * should use this to obtain and modify the contents of the * UtteranceProcessor list. * * @return a List containing UtteranceProcessor instances */ public List getUtteranceProcessors() { return utteranceProcessors; } /** * Returns the feature set associated with this voice. * * @return the feature set. */ public FeatureSet getFeatures() { return features; } /** * Starts a batch of utterances. Utterances are sometimes * batched in groups for timing purposes. * * @see #endBatch */ public void startBatch() { runTimer.setVerbose(detailedMetrics); runTimer.start(); } /** * Ends a batch of utterances. * * @see #startBatch */ public void endBatch() { runTimer.stop(); if (metrics) { runTimer.show(getRunTitle() + " run"); threadTimer.show(getRunTitle() + " thread"); getAudioPlayer().showMetrics(); long totalMemory = Runtime.getRuntime().totalMemory(); LOGGER.info ("Memory Use : " + (totalMemory - Runtime.getRuntime().freeMemory()) / 1024 + "k of " + totalMemory / 1024 + "k"); } } /** * Sets the output queue for this voice. If no output queue is set * for the voice when the voice is loaded, a queue and thread will * be created when the voice is loaded. If the outputQueue is set * by an external entity by calling setOutputQueue, the caller is * responsible for shutting down the output thread. That is, if * you call 'setOutputQueue' then you are responsible for shutting * down the output thread on your own. This is necessary since the * output queue may be shared by a number of voices. * * <p>Utterances are placed on the * queue to be output by an output thread. This queue is * usually created via a call to 'createOutputThread' which * creates a thread that waits on the queue and sends the * output to the audio player associated with this voice. If * the queue is null, the output is rendered in the calling * thread. * * @param queue the output queue */ public void setOutputQueue(OutputQueue queue) { externalOutputQueue = true; outputQueue = queue; } /** * Returns the output queue associated with this voice. * * @return the output queue associated with this voice */ public OutputQueue getOutputQueue() { return outputQueue; } /** * Loads voice specific data. Subclasses of voice should * implement this to perform class specific loading. */ protected abstract void loader() throws IOException; /** * tokenizes the given the queue item. * * @return an iterator that will yield a series of utterances */ private Iterator tokenize(FreeTTSSpeakable speakable) { return new FreeTTSSpeakableTokenizer(speakable).iterator(); } /** * Converts the document to a string (a placeholder for more * sophisticated logic to be done). * * @param dom the jsml document * * @return the document as a string. */ private String documentToString(Document dom) { StringBuffer buf = new StringBuffer(); linearize(dom, buf); return buf.toString(); } /** * Appends the text for this node to the given StringBuffer. * * @param n the node to traverse in depth-first order * @param buf the buffer to append text to */ private void linearize(Node n, StringBuffer buf) { StringBuffer endText = processNode(n, buf); for (Node child = n.getFirstChild(); child != null; child = child.getNextSibling()) { linearize(child, buf); } if (endText != null) { buf.append(endText); } } /** * Adds text for just this node and returns any text that might * be needed to undo the effects of this node after it is * processed. * * @param n the node to traverse in depth-first order * @param buf the buffer to append text to * * @return a <code>String</code> containing text to undo the * effects of the node */ protected StringBuffer processNode(Node n, StringBuffer buf) { StringBuffer endText = null; int type = n.getNodeType(); switch (type) { case Node.ATTRIBUTE_NODE: break; case Node.DOCUMENT_NODE: break; case Node.ELEMENT_NODE: // endText = processElement((Element) n, buf); break; case Node.TEXT_NODE: buf.append(((Text) n).getData()); break; // Pass processing instructions (e.g., <?blah?> // right on to the synthesizer. These types of things // probably should not be used. Instead the 'engine' // element is probably the best thing to do. // case Node.PROCESSING_INSTRUCTION_NODE: break; // The document type had better be JSML. // case Node.DOCUMENT_TYPE_NODE: break; // I think NOTATION nodes are only DTD's. // case Node.NOTATION_NODE: break; // Should not get COMMENTS because the JSMLParser // ignores them. // case Node.COMMENT_NODE: break; // Should not get CDATA because the JSMLParser is // coalescing. // case Node.CDATA_SECTION_NODE: break; // Should not get ENTITY related notes because // entities are expanded by the JSMLParser // case Node.ENTITY_NODE: case Node.ENTITY_REFERENCE_NODE: break; // Should not get DOCUMENT_FRAGMENT nodes because I // [[[WDW]]] think they are only created via the API's // and cannot be defined via content. // case Node.DOCUMENT_FRAGMENT_NODE: break; default: break; } return endText; } /** * Dumps the voice in textual form. * * @param output where to send the formatted output * @param pad the initial padding * @param title the title to print when dumping out */ public void dump(PrintWriter output, int pad, String title) { Utilities.dump(output, pad, title); features.dump(output, pad + 4, title + " Features"); dumpProcessors(output, pad + 4, title + " Processors"); } /** * Dumps the voice processors. * * @param output where to send the formatted output * @param pad the initial padding * @param title the title to print when dumping out */ public void dumpProcessors(PrintWriter output, int pad, String title) { UtteranceProcessor[] processors; if (utteranceProcessors == null) { return; } processors = new UtteranceProcessor[utteranceProcessors.size()]; processors = (UtteranceProcessor[]) utteranceProcessors.toArray(processors); Utilities.dump(output, pad, title); for (int i = 0; i < processors.length; i++) { Utilities.dump(output, pad + 4, processors[i].toString()); } } /** * Returns a language/voice specific Feature Processor. * * @param name the name of the processor * * @return the processor associated with the name or null if none * could be found */ public FeatureProcessor getFeatureProcessor(String name) { return (FeatureProcessor) featureProcessors.get(name); } /** * Adds a language/voice specific Feature Processor to the set of * FeatureProcessors supported by this voice. * * @param name the name of the processor * @param fp the processor */ public void addFeatureProcessor(String name, FeatureProcessor fp) { featureProcessors.put(name, fp); } /** * Gets the state of the metrics mode. * * @return true if metrics mode is on */ public boolean isMetrics() { return metrics; } /** * Sets the metrics mode. * * @param metrics true if metrics mode should be on */ public void setMetrics(boolean metrics) { this.metrics = metrics; if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Metrics mode is " + metrics); } } /** * Gets the state of the detailedMetrics mode. * * @return true if detailedMetrics mode is on */ public boolean isDetailedMetrics() { return detailedMetrics; } /** * Sets the state of the detailedMetrics mode. * * @param detailedMetrics true if detailedMetrics mode should be on */ public void setDetailedMetrics(boolean detailedMetrics) { this.detailedMetrics = detailedMetrics; if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("DetailedMetrics mode is " + detailedMetrics); } } /** * Gets the state of the dumpUtterance mode. * * @return true if dumpUtterance mode is on */ public boolean isDumpUtterance() { return dumpUtterance; } /** * Sets the state of the dumpUtterance mode. * * @param dumpUtterance true if dumpUtterance mode should be on */ public void setDumpUtterance(boolean dumpUtterance) { this.dumpUtterance = dumpUtterance; if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("DumpUtterance mode is " + dumpUtterance); } } /** * Gets the state of the dumpRelations mode. * * @return true if dumpRelations mode is on */ public boolean isDumpRelations() { return dumpRelations; } /** * Sets the state of the dumpRelations mode. * * @param dumpRelations true if dumpRelations mode should be on */ public void setDumpRelations(boolean dumpRelations) { this.dumpRelations = dumpRelations; if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("DumpRelations mode is " + dumpRelations); } } /** * Sets the title for this run. * * @param runTitle the title for the run */ public void setRunTitle(String runTitle) { this.runTitle = runTitle; } /** * Gets the title for this run. * * @return the title for the run */ public String getRunTitle() { return runTitle; } /** * Given a phoneme and a feature name, returns the feature. * * @param phone the phoneme of interest * @param featureName the name of the feature of interest * * @return the feature with the given name */ public String getPhoneFeature(String phone, String featureName) { return null; } /** * Shuts down the voice processing. */ public void deallocate() { setLoaded(false); if (!externalAudioPlayer) { if (audioPlayer != null) { audioPlayer.close(); audioPlayer = null; } } if (!externalOutputQueue) { outputQueue.close(); } } /** * Sets the baseline pitch. * * @param hertz the baseline pitch in hertz */ public void setPitch(float hertz) { this.pitch = hertz; } /** * Retreives the baseline pitch. * * @return the baseline pitch in hertz */ public float getPitch() { return pitch; } /** * Sets the pitch range. * * @param range the range in hertz */ public void setPitchRange(float range) { this.range = range; } /** * Gets the pitch range. * * @return the range in hertz */ public float getPitchRange() { return range; } /** * Sets the pitch shift * * @param shift the pitch shift (1.0 is no shift) */ public void setPitchShift(float shift) { this.pitchShift = shift; } /** * Gets the pitch shift. * * @return the pitch shift */ public float getPitchShift() { return pitchShift; } /** * Sets the duration stretch * * @param stretch the duration stretch (1.0 is no stretch) */ public void setDurationStretch(float stretch) { this.durationStretch = stretch; } /** * Gets the duration Stretch * * @return the duration stretch */ public float getDurationStretch() { return durationStretch; } /** * Sets the rate of speech. * * @param wpm words per minute */ public void setRate(float wpm) { if (wpm > 0 && wpm < 1000) { setDurationStretch(nominalRate / wpm); } } /** * Gets the rate of speech. * * @return words per minute */ public float getRate() { return durationStretch * nominalRate; } /** * Sets the volume. * * @param vol the volume (0 to 1.0) */ public void setVolume(float vol) { volume = vol; } /** * Gets the volume. * * @return the volume (0 to 1.0) */ public float getVolume() { return volume; } /** * Gets the lexicon for this voice. * * @return the lexicon (or null if there is no lexicon) */ public Lexicon getLexicon() { return lexicon; } /** * Sets the lexicon to be used by this voice. * * @param lexicon the lexicon to use */ public void setLexicon(Lexicon lexicon) { this.lexicon = lexicon; } /** * Sets the dumpfile for this voice. * * @param waveDumpFile the dumpfile */ public void setWaveDumpFile(String waveDumpFile) { this.waveDumpFile = waveDumpFile; } /** * Gets the dumpfile for this voice. * * @return the dumpfile */ public String getWaveDumpFile() { return waveDumpFile; } /** * Sets the audio player associated with this voice. The caller is * responsible for closing this player. * * @param player the audio player */ public void setAudioPlayer(AudioPlayer player) { audioPlayer = player; externalAudioPlayer = true; } /** * Gets the default audio player for this voice. The return * value will be non-null only if the DEFAULT_AUDIO_PLAYER * system property has been set to the name of an AudioPlayer * class, and that class is able to be instantiated via a * no arg constructor. getAudioPlayer will automatically set * the audio player for this voice to the default audio player * if the audio player has not yet been set. * * @see #DEFAULT_AUDIO_PLAYER * @see #getAudioPlayer * @return the default AudioPlayer */ public AudioPlayer getDefaultAudioPlayer() throws InstantiationException { if (defaultAudioPlayer != null) { return defaultAudioPlayer; } String className = Utilities.getProperty( DEFAULT_AUDIO_PLAYER, DEFAULT_AUDIO_PLAYER_DEFAULT); try { Class cls = Class.forName(className); defaultAudioPlayer = (AudioPlayer) cls.newInstance(); return defaultAudioPlayer; } catch (ClassNotFoundException e) { throw new InstantiationException("Can't find class " + className); } catch (IllegalAccessException e) { throw new InstantiationException("Can't find class " + className); } catch (ClassCastException e) { throw new InstantiationException(className + " cannot be cast " + "to AudioPlayer"); } } /** * Gets the audio player associated with this voice. If the * audio player has not yet been set, the value will default * to the return value of getDefaultAudioPlayer. * * @see #getDefaultAudioPlayer * @return the audio player */ public AudioPlayer getAudioPlayer() { if (audioPlayer == null) { try { audioPlayer = getDefaultAudioPlayer(); } catch (InstantiationException e) { e.printStackTrace(); } } return audioPlayer; } /** * Get a resource for this voice. * By default, the voice is searched for in the package * to which the voice class belongs. Subclasses are free to * override this behaviour. */ protected URL getResource(String resource) { return this.getClass().getResource(resource); } /** * Set the name of this voice. * [[[TODO: any standard format to the name?]]] * * @param name the name to assign this voice */ protected void setName(String name) { this.name = name; } /** * Get the name of this voice. * * @return the name */ public String getName() { return name; } /** * Returns the name of this Voice. * * @return the name of this Voice */ public String toString() { return getName(); } /** * Set the gender of this voice. * * @param gender the gender to assign */ protected void setGender(Gender gender) { this.gender = gender; } /** * Get the gender of this voice. * * @return the gender of this voice */ public Gender getGender() { return gender; } /** * Set the age of this voice. * * @param age the age to assign */ protected void setAge(Age age) { this.age = age; } /** * Get the age of this voice. * * @return the age of this voice */ public Age getAge() { return age; } /** * Set the description of this voice. * * @param description the human readable description to assign */ protected void setDescription(String description) { this.description = description; } /** * Get the description of this voice. * * @return the human readable description of this voice */ public String getDescription() { return description; } /** * Set the locale of this voice. * * @param locale the locale of this voice. */ protected void setLocale(Locale locale) { this.locale = locale; } /** * Get the locale of this voice. * * @return the locale of this voice. */ public Locale getLocale() { return locale; } /** * Set the domain of this voice. * * @param domain the domain of this voice. For example, * &quot;general&quot;, &quot;time&quot;, or * &quot;weather&quot;. */ protected void setDomain(String domain) { this.domain = domain; } /** * Get the domain of this voice. * * @return the domain of this voice. For example, * &quot;general&quot;, &quot;time&quot;, or * &quot;weather&quot;. */ public String getDomain() { return domain; } /** * Sets the voice style. This parameter is designed for human * interpretation. Values might include "business", "casual", * "robotic", "breathy" * * @param style the stile of this voice. */ public void setStyle(String style) { this.style = style; } /** * Gets the voice style. This parameter is designed for human * interpretation. Values might include "business", "casual", * "robotic", "breathy". */ public String getStyle() { return style; } /** * Sets the organization which created this voice. For example * "cmu", "sun", ... * * @param organization the name of the organization */ protected void setOrganization(String organization) { this.organization = organization; } /** * Gets the organization which created this voice. For example * "cmu", "sun", ... * * @return the name of the organization */ public String getOrganization() { return organization; } /** * Returns the AudioOutput processor to be used by this voice. * Derived voices typically override this to customize behaviors. * * @return the audio output processor * * @throws IOException if an IO error occurs while getting * processor */ protected abstract UtteranceProcessor getAudioOutput() throws IOException ; /** * Tokenizes a FreeTTSSpeakable */ private class FreeTTSSpeakableTokenizer { FreeTTSSpeakable speakable; Tokenizer tok = getTokenizer(); /** * Constructor. * * @param speakable the queue item to be pretokenized */ public FreeTTSSpeakableTokenizer(FreeTTSSpeakable speakable) { this.speakable = speakable; if (speakable.isPlainText()) { tok.setInputText(speakable.getText()); } else if (speakable.isStream()) { Reader reader = new BufferedReader( new InputStreamReader(speakable.getInputStream())); tok.setInputReader(reader); } else if (speakable.isDocument()) { tok.setInputText(documentToString(speakable.getDocument())); } } /** * Returns an iterator for this text item. */ public Iterator iterator() { return new Iterator() { boolean first = true; Token savedToken = null; /** * Determines if there are more utterances * * @return true if there are more tokens */ public boolean hasNext() { return savedToken != null || tok.hasMoreTokens(); } /** * Returns the next utterance. * * @return the next utterance (as an object) or * null if there is are no utterances left */ public Object next() { ArrayList tokenList = new ArrayList(); Utterance utterance = null; if (savedToken != null) { tokenList.add(savedToken); savedToken = null; } while (tok.hasMoreTokens()) { Token token = tok.getNextToken(); if ((token.getWord().length() == 0) || (tokenList.size() > 500) || tok.isBreak()) { savedToken = token; break; } tokenList.add(token); } utterance = new Utterance(Voice.this, tokenList); utterance.setSpeakable(speakable); utterance.setFirst(first); first = false; boolean isLast = (!tok.hasMoreTokens() && (savedToken == null || savedToken.getWord().length() == 0)); utterance.setLast(isLast); return utterance; } public void remove() { throw new UnsupportedOperationException("remove"); } }; } } }
/* * ==================================================================== * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl.cookie; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.http.annotation.NotThreadSafe; import org.apache.http.Header; import org.apache.http.HeaderElement; import org.apache.http.NameValuePair; import org.apache.http.cookie.ClientCookie; import org.apache.http.cookie.Cookie; import org.apache.http.cookie.CookieAttributeHandler; import org.apache.http.cookie.CookieOrigin; import org.apache.http.cookie.CookieSpec; import org.apache.http.cookie.MalformedCookieException; import org.apache.http.cookie.SM; import org.apache.http.message.BufferedHeader; import org.apache.http.util.CharArrayBuffer; /** * RFC 2965 compliant {@link CookieSpec} implementation. * * @since 4.0 */ @NotThreadSafe // superclass is @NotThreadSafe public class RFC2965Spec extends RFC2109Spec { /** * Default constructor * */ public RFC2965Spec() { this(null, false); } public RFC2965Spec(final String[] datepatterns, boolean oneHeader) { super(datepatterns, oneHeader); registerAttribHandler(ClientCookie.DOMAIN_ATTR, new RFC2965DomainAttributeHandler()); registerAttribHandler(ClientCookie.PORT_ATTR, new RFC2965PortAttributeHandler()); registerAttribHandler(ClientCookie.COMMENTURL_ATTR, new RFC2965CommentUrlAttributeHandler()); registerAttribHandler(ClientCookie.DISCARD_ATTR, new RFC2965DiscardAttributeHandler()); registerAttribHandler(ClientCookie.VERSION_ATTR, new RFC2965VersionAttributeHandler()); } @Override public List<Cookie> parse( final Header header, CookieOrigin origin) throws MalformedCookieException { if (header == null) { throw new IllegalArgumentException("Header may not be null"); } if (origin == null) { throw new IllegalArgumentException("Cookie origin may not be null"); } if (!header.getName().equalsIgnoreCase(SM.SET_COOKIE2)) { throw new MalformedCookieException("Unrecognized cookie header '" + header.toString() + "'"); } origin = adjustEffectiveHost(origin); HeaderElement[] elems = header.getElements(); return createCookies(elems, origin); } @Override protected List<Cookie> parse( final HeaderElement[] elems, CookieOrigin origin) throws MalformedCookieException { origin = adjustEffectiveHost(origin); return createCookies(elems, origin); } private List<Cookie> createCookies( final HeaderElement[] elems, final CookieOrigin origin) throws MalformedCookieException { List<Cookie> cookies = new ArrayList<Cookie>(elems.length); for (HeaderElement headerelement : elems) { String name = headerelement.getName(); String value = headerelement.getValue(); if (name == null || name.length() == 0) { throw new MalformedCookieException("Cookie name may not be empty"); } BasicClientCookie2 cookie = new BasicClientCookie2(name, value); cookie.setPath(getDefaultPath(origin)); cookie.setDomain(getDefaultDomain(origin)); cookie.setPorts(new int [] { origin.getPort() }); // cycle through the parameters NameValuePair[] attribs = headerelement.getParameters(); // Eliminate duplicate attributes. The first occurrence takes precedence // See RFC2965: 3.2 Origin Server Role Map<String, NameValuePair> attribmap = new HashMap<String, NameValuePair>(attribs.length); for (int j = attribs.length - 1; j >= 0; j--) { NameValuePair param = attribs[j]; attribmap.put(param.getName().toLowerCase(Locale.ENGLISH), param); } for (Map.Entry<String, NameValuePair> entry : attribmap.entrySet()) { NameValuePair attrib = entry.getValue(); String s = attrib.getName().toLowerCase(Locale.ENGLISH); cookie.setAttribute(s, attrib.getValue()); CookieAttributeHandler handler = findAttribHandler(s); if (handler != null) { handler.parse(cookie, attrib.getValue()); } } cookies.add(cookie); } return cookies; } @Override public void validate(final Cookie cookie, CookieOrigin origin) throws MalformedCookieException { if (cookie == null) { throw new IllegalArgumentException("Cookie may not be null"); } if (origin == null) { throw new IllegalArgumentException("Cookie origin may not be null"); } origin = adjustEffectiveHost(origin); super.validate(cookie, origin); } @Override public boolean match(final Cookie cookie, CookieOrigin origin) { if (cookie == null) { throw new IllegalArgumentException("Cookie may not be null"); } if (origin == null) { throw new IllegalArgumentException("Cookie origin may not be null"); } origin = adjustEffectiveHost(origin); return super.match(cookie, origin); } /** * Adds valid Port attribute value, e.g. "8000,8001,8002" */ @Override protected void formatCookieAsVer(final CharArrayBuffer buffer, final Cookie cookie, int version) { super.formatCookieAsVer(buffer, cookie, version); // format port attribute if (cookie instanceof ClientCookie) { // Test if the port attribute as set by the origin server is not blank String s = ((ClientCookie) cookie).getAttribute(ClientCookie.PORT_ATTR); if (s != null) { buffer.append("; $Port"); buffer.append("=\""); if (s.trim().length() > 0) { int[] ports = cookie.getPorts(); if (ports != null) { for (int i = 0, len = ports.length; i < len; i++) { if (i > 0) { buffer.append(","); } buffer.append(Integer.toString(ports[i])); } } } buffer.append("\""); } } } /** * Set 'effective host name' as defined in RFC 2965. * <p> * If a host name contains no dots, the effective host name is * that name with the string .local appended to it. Otherwise * the effective host name is the same as the host name. Note * that all effective host names contain at least one dot. * * @param origin origin where cookie is received from or being sent to. */ private static CookieOrigin adjustEffectiveHost(final CookieOrigin origin) { String host = origin.getHost(); // Test if the host name appears to be a fully qualified DNS name, // IPv4 address or IPv6 address boolean isLocalHost = true; for (int i = 0; i < host.length(); i++) { char ch = host.charAt(i); if (ch == '.' || ch == ':') { isLocalHost = false; break; } } if (isLocalHost) { host += ".local"; return new CookieOrigin( host, origin.getPort(), origin.getPath(), origin.isSecure()); } else { return origin; } } @Override public int getVersion() { return 1; } @Override public Header getVersionHeader() { CharArrayBuffer buffer = new CharArrayBuffer(40); buffer.append(SM.COOKIE2); buffer.append(": "); buffer.append("$Version="); buffer.append(Integer.toString(getVersion())); return new BufferedHeader(buffer); } @Override public String toString() { return "rfc2965"; } }
/*************************************************************************** * Copyright 2017 Kieker Project (http://kieker-monitoring.net) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************/ package kieker.common.record.misc; import java.nio.BufferOverflowException; import kieker.common.record.AbstractMonitoringRecord; import kieker.common.record.IMonitoringRecord; import kieker.common.record.io.IValueDeserializer; import kieker.common.record.io.IValueSerializer; import kieker.common.util.registry.IRegistry; /** * @author Jan Waller * API compatibility: Kieker 1.13.0 * * @since 1.7 */ public class KiekerMetadataRecord extends AbstractMonitoringRecord implements IMonitoringRecord.Factory, IMonitoringRecord.BinaryFactory { private static final long serialVersionUID = 8241152536143822747L; /** Descriptive definition of the serialization size of the record. */ public static final int SIZE = TYPE_SIZE_STRING // KiekerMetadataRecord.version + TYPE_SIZE_STRING // KiekerMetadataRecord.controllerName + TYPE_SIZE_STRING // KiekerMetadataRecord.hostname + TYPE_SIZE_INT // KiekerMetadataRecord.experimentId + TYPE_SIZE_BOOLEAN // KiekerMetadataRecord.debugMode + TYPE_SIZE_LONG // KiekerMetadataRecord.timeOffset + TYPE_SIZE_STRING // KiekerMetadataRecord.timeUnit + TYPE_SIZE_LONG // KiekerMetadataRecord.numberOfRecords ; public static final Class<?>[] TYPES = { String.class, // KiekerMetadataRecord.version String.class, // KiekerMetadataRecord.controllerName String.class, // KiekerMetadataRecord.hostname int.class, // KiekerMetadataRecord.experimentId boolean.class, // KiekerMetadataRecord.debugMode long.class, // KiekerMetadataRecord.timeOffset String.class, // KiekerMetadataRecord.timeUnit long.class, // KiekerMetadataRecord.numberOfRecords }; /** user-defined constants. */ public static final String NO_CONTROLLERNAME = "<no-controller-name>"; public static final String NO_HOSTNAME = "<no-hostname>"; public static final String NO_TIMESOURCE = "<no-timesource>"; public static final String NO_TIMEUNIT = "NANOSECONDS"; /** default constants. */ public static final String VERSION = kieker.common.util.Version.getVERSION(); public static final String CONTROLLER_NAME = NO_CONTROLLERNAME; public static final String HOSTNAME = NO_HOSTNAME; public static final int EXPERIMENT_ID = 0; public static final boolean DEBUG_MODE = false; public static final long TIME_OFFSET = 0L; public static final String TIME_UNIT = NO_TIMEUNIT; public static final long NUMBER_OF_RECORDS = 0L; /** property name array. */ private static final String[] PROPERTY_NAMES = { "version", "controllerName", "hostname", "experimentId", "debugMode", "timeOffset", "timeUnit", "numberOfRecords", }; /** property declarations. */ private final String version; private final String controllerName; private final String hostname; private final int experimentId; private final boolean debugMode; private final long timeOffset; private final String timeUnit; private final long numberOfRecords; /** * Creates a new instance of this class using the given parameters. * * @param version * version * @param controllerName * controllerName * @param hostname * hostname * @param experimentId * experimentId * @param debugMode * debugMode * @param timeOffset * timeOffset * @param timeUnit * timeUnit * @param numberOfRecords * numberOfRecords */ public KiekerMetadataRecord(final String version, final String controllerName, final String hostname, final int experimentId, final boolean debugMode, final long timeOffset, final String timeUnit, final long numberOfRecords) { this.version = version == null?VERSION:version; this.controllerName = controllerName == null?NO_CONTROLLERNAME:controllerName; this.hostname = hostname == null?NO_HOSTNAME:hostname; this.experimentId = experimentId; this.debugMode = debugMode; this.timeOffset = timeOffset; this.timeUnit = timeUnit == null?NO_TIMEUNIT:timeUnit; this.numberOfRecords = numberOfRecords; } /** * This constructor converts the given array into a record. * It is recommended to use the array which is the result of a call to {@link #toArray()}. * * @param values * The values for the record. * * @deprecated since 1.13. Use {@link #KiekerMetadataRecord(IValueDeserializer)} instead. */ @Deprecated public KiekerMetadataRecord(final Object[] values) { // NOPMD (direct store of values) AbstractMonitoringRecord.checkArray(values, TYPES); this.version = (String) values[0]; this.controllerName = (String) values[1]; this.hostname = (String) values[2]; this.experimentId = (Integer) values[3]; this.debugMode = (Boolean) values[4]; this.timeOffset = (Long) values[5]; this.timeUnit = (String) values[6]; this.numberOfRecords = (Long) values[7]; } /** * This constructor uses the given array to initialize the fields of this record. * * @param values * The values for the record. * @param valueTypes * The types of the elements in the first array. * * @deprecated since 1.13. Use {@link #KiekerMetadataRecord(IValueDeserializer)} instead. */ @Deprecated protected KiekerMetadataRecord(final Object[] values, final Class<?>[] valueTypes) { // NOPMD (values stored directly) AbstractMonitoringRecord.checkArray(values, valueTypes); this.version = (String) values[0]; this.controllerName = (String) values[1]; this.hostname = (String) values[2]; this.experimentId = (Integer) values[3]; this.debugMode = (Boolean) values[4]; this.timeOffset = (Long) values[5]; this.timeUnit = (String) values[6]; this.numberOfRecords = (Long) values[7]; } /** * @param deserializer * The deserializer to use */ public KiekerMetadataRecord(final IValueDeserializer deserializer) { this.version = deserializer.getString(); this.controllerName = deserializer.getString(); this.hostname = deserializer.getString(); this.experimentId = deserializer.getInt(); this.debugMode = deserializer.getBoolean(); this.timeOffset = deserializer.getLong(); this.timeUnit = deserializer.getString(); this.numberOfRecords = deserializer.getLong(); } /** * {@inheritDoc} * * @deprecated since 1.13. Use {@link #serialize(IValueSerializer)} with an array serializer instead. */ @Override @Deprecated public Object[] toArray() { return new Object[] { this.getVersion(), this.getControllerName(), this.getHostname(), this.getExperimentId(), this.isDebugMode(), this.getTimeOffset(), this.getTimeUnit(), this.getNumberOfRecords() }; } /** * {@inheritDoc} */ @Override public void registerStrings(final IRegistry<String> stringRegistry) { // NOPMD (generated code) stringRegistry.get(this.getVersion()); stringRegistry.get(this.getControllerName()); stringRegistry.get(this.getHostname()); stringRegistry.get(this.getTimeUnit()); } /** * {@inheritDoc} */ @Override public void serialize(final IValueSerializer serializer) throws BufferOverflowException { //super.serialize(serializer); serializer.putString(this.getVersion()); serializer.putString(this.getControllerName()); serializer.putString(this.getHostname()); serializer.putInt(this.getExperimentId()); serializer.putBoolean(this.isDebugMode()); serializer.putLong(this.getTimeOffset()); serializer.putString(this.getTimeUnit()); serializer.putLong(this.getNumberOfRecords()); } /** * {@inheritDoc} */ @Override public Class<?>[] getValueTypes() { return TYPES; // NOPMD } /** * {@inheritDoc} */ @Override public String[] getValueNames() { return PROPERTY_NAMES; // NOPMD } /** * {@inheritDoc} */ @Override public int getSize() { return SIZE; } /** * {@inheritDoc} * * @deprecated This record uses the {@link kieker.common.record.IMonitoringRecord.Factory} mechanism. Hence, this method is not implemented. */ @Override @Deprecated public void initFromArray(final Object[] values) { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public boolean equals(final Object obj) { if (obj == null) return false; if (obj == this) return true; if (obj.getClass() != this.getClass()) return false; final KiekerMetadataRecord castedRecord = (KiekerMetadataRecord) obj; if (this.getLoggingTimestamp() != castedRecord.getLoggingTimestamp()) return false; if (!this.getVersion().equals(castedRecord.getVersion())) return false; if (!this.getControllerName().equals(castedRecord.getControllerName())) return false; if (!this.getHostname().equals(castedRecord.getHostname())) return false; if (this.getExperimentId() != castedRecord.getExperimentId()) return false; if (this.isDebugMode() != castedRecord.isDebugMode()) return false; if (this.getTimeOffset() != castedRecord.getTimeOffset()) return false; if (!this.getTimeUnit().equals(castedRecord.getTimeUnit())) return false; if (this.getNumberOfRecords() != castedRecord.getNumberOfRecords()) return false; return true; } public final String getVersion() { return this.version; } public final String getControllerName() { return this.controllerName; } public final String getHostname() { return this.hostname; } public final int getExperimentId() { return this.experimentId; } public final boolean isDebugMode() { return this.debugMode; } public final long getTimeOffset() { return this.timeOffset; } public final String getTimeUnit() { return this.timeUnit; } public final long getNumberOfRecords() { return this.numberOfRecords; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a; import javax.annotation.Nullable; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; import com.amazonaws.services.s3.model.CompleteMultipartUploadResult; import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; import com.amazonaws.services.s3.model.MultipartUpload; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PartETag; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.PutObjectResult; import com.amazonaws.services.s3.model.SelectObjectContentRequest; import com.amazonaws.services.s3.model.SelectObjectContentResult; import com.amazonaws.services.s3.model.UploadPartRequest; import com.amazonaws.services.s3.model.UploadPartResult; import com.amazonaws.services.s3.transfer.model.UploadResult; import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.fs.s3a.api.RequestFactory; import org.apache.hadoop.fs.s3a.impl.StoreContext; import org.apache.hadoop.fs.s3a.statistics.S3AStatisticsContext; import org.apache.hadoop.fs.s3a.select.SelectBinding; import org.apache.hadoop.fs.store.audit.AuditSpan; import org.apache.hadoop.fs.store.audit.AuditSpanSource; import org.apache.hadoop.util.DurationInfo; import org.apache.hadoop.util.functional.CallableRaisingIOE; import static org.apache.hadoop.util.Preconditions.checkNotNull; import static org.apache.hadoop.fs.s3a.Invoker.*; import static org.apache.hadoop.fs.store.audit.AuditingFunctions.withinAuditSpan; /** * Helper for low-level operations against an S3 Bucket for writing data, * creating and committing pending writes, and other S3-layer operations. * <p> * It hides direct access to the S3 API * and is a location where the object operations can be evolved/enhanced. * <p> * Features * <ul> * <li>Methods to create and submit requests to S3, so avoiding * all direct interaction with the AWS APIs.</li> * <li>Some extra preflight checks of arguments, so failing fast on * errors.</li> * <li>Callbacks to let the FS know of events in the output stream * upload process.</li> * <li>Other low-level access to S3 functions, for private use.</li> * <li>Failure handling, including converting exceptions to IOEs.</li> * <li>Integration with instrumentation.</li> * <li>Evolution to add more low-level operations, such as S3 select.</li> * </ul> * * This API is for internal use only. * Span scoping: This helper is instantiated with span; it will be used * before operations which query/update S3 * * History * <pre> * - A nested class in S3AFileSystem * - Single shared instance created and reused. * - [HADOOP-13786] A separate class, single instance in S3AFS * - [HDFS-13934] Split into interface and implementation * - [HADOOP-15711] Adds audit tracking; one instance per use. * </pre> */ @InterfaceAudience.Private @InterfaceStability.Unstable public class WriteOperationHelper implements WriteOperations { private static final Logger LOG = LoggerFactory.getLogger(WriteOperationHelper.class); /** * Owning filesystem. */ private final S3AFileSystem owner; /** * Invoker for operations; uses the S3A retry policy and calls int * {@link #operationRetried(String, Exception, int, boolean)} on retries. */ private final Invoker invoker; /** Configuration of the owner. This is a reference, not a copy. */ private final Configuration conf; /** Bucket of the owner FS. */ private final String bucket; /** * statistics context. */ private final S3AStatisticsContext statisticsContext; /** * Store Context; extracted from owner. */ private final StoreContext storeContext; /** * Source of Audit spans. */ private final AuditSpanSource auditSpanSource; /** * Audit Span. */ private AuditSpan auditSpan; /** * Factory for AWS requests. */ private final RequestFactory requestFactory; /** * Constructor. * @param owner owner FS creating the helper * @param conf Configuration object * @param statisticsContext statistics context * @param auditSpanSource source of spans * @param auditSpan span to activate * */ protected WriteOperationHelper(S3AFileSystem owner, Configuration conf, S3AStatisticsContext statisticsContext, final AuditSpanSource auditSpanSource, final AuditSpan auditSpan) { this.owner = owner; this.invoker = new Invoker(new S3ARetryPolicy(conf), this::operationRetried); this.conf = conf; this.statisticsContext = statisticsContext; this.storeContext = owner.createStoreContext(); this.bucket = owner.getBucket(); this.auditSpanSource = auditSpanSource; this.auditSpan = checkNotNull(auditSpan); this.requestFactory = owner.getRequestFactory(); } /** * Callback from {@link Invoker} when an operation is retried. * @param text text of the operation * @param ex exception * @param retries number of retries * @param idempotent is the method idempotent */ void operationRetried(String text, Exception ex, int retries, boolean idempotent) { LOG.info("{}: Retried {}: {}", text, retries, ex.toString()); LOG.debug("Stack", ex); owner.operationRetried(text, ex, retries, idempotent); } /** * Execute a function with retry processing. * Also activates the current span. * @param <T> type of return value * @param action action to execute (used in error messages) * @param path path of work (used in error messages) * @param idempotent does the operation have semantics * which mean that it can be retried even if was already executed? * @param operation operation to execute * @return the result of the call * @throws IOException any IOE raised, or translated exception */ public <T> T retry(String action, String path, boolean idempotent, CallableRaisingIOE<T> operation) throws IOException { activateAuditSpan(); return invoker.retry(action, path, idempotent, operation); } /** * Get the audit span this object was created with. * @return the audit span */ public AuditSpan getAuditSpan() { return auditSpan; } /** * Activate the audit span. * @return the span */ private AuditSpan activateAuditSpan() { return auditSpan.activate(); } /** * Deactivate the audit span. */ private void deactivateAuditSpan() { auditSpan.deactivate(); } /** * Create a {@link PutObjectRequest} request against the specific key. * @param destKey destination key * @param inputStream source data. * @param length size, if known. Use -1 for not known * @param headers optional map of custom headers. * @return the request */ @Retries.OnceRaw public PutObjectRequest createPutObjectRequest(String destKey, InputStream inputStream, long length, final Map<String, String> headers) { activateAuditSpan(); ObjectMetadata objectMetadata = newObjectMetadata(length); if (headers != null) { objectMetadata.setUserMetadata(headers); } return getRequestFactory().newPutObjectRequest( destKey, objectMetadata, inputStream); } /** * Create a {@link PutObjectRequest} request to upload a file. * @param dest key to PUT to. * @param sourceFile source file * @return the request */ @Retries.OnceRaw public PutObjectRequest createPutObjectRequest(String dest, File sourceFile) { Preconditions.checkState(sourceFile.length() < Integer.MAX_VALUE, "File length is too big for a single PUT upload"); activateAuditSpan(); return getRequestFactory(). newPutObjectRequest(dest, newObjectMetadata((int) sourceFile.length()), sourceFile); } /** * Callback on a successful write. * @param length length of the write */ public void writeSuccessful(long length) { } /** * Callback on a write failure. * @param ex Any exception raised which triggered the failure. */ public void writeFailed(Exception ex) { LOG.debug("Write to {} failed", this, ex); } /** * Create a new object metadata instance. * Any standard metadata headers are added here, for example: * encryption. * @param length size, if known. Use -1 for not known * @return a new metadata instance */ public ObjectMetadata newObjectMetadata(long length) { return getRequestFactory().newObjectMetadata(length); } /** * Start the multipart upload process. * Retry policy: retrying, translated. * @param destKey destination of upload * @return the upload result containing the ID * @throws IOException IO problem */ @Retries.RetryTranslated public String initiateMultiPartUpload(String destKey) throws IOException { LOG.debug("Initiating Multipart upload to {}", destKey); try (AuditSpan span = activateAuditSpan()) { return retry("initiate MultiPartUpload", destKey, true, () -> { final InitiateMultipartUploadRequest initiateMPURequest = getRequestFactory().newMultipartUploadRequest( destKey); return owner.initiateMultipartUpload(initiateMPURequest) .getUploadId(); }); } } /** * Finalize a multipart PUT operation. * This completes the upload, and, if that works, calls * {@link S3AFileSystem#finishedWrite(String, long, String, String)} * to update the filesystem. * Retry policy: retrying, translated. * @param destKey destination of the commit * @param uploadId multipart operation Id * @param partETags list of partial uploads * @param length length of the upload * @param retrying retrying callback * @return the result of the operation. * @throws IOException on problems. */ @Retries.RetryTranslated private CompleteMultipartUploadResult finalizeMultipartUpload( String destKey, String uploadId, List<PartETag> partETags, long length, Retried retrying) throws IOException { if (partETags.isEmpty()) { throw new PathIOException(destKey, "No upload parts in multipart upload"); } try (AuditSpan span = activateAuditSpan()) { CompleteMultipartUploadResult uploadResult; uploadResult = invoker.retry("Completing multipart upload", destKey, true, retrying, () -> { final CompleteMultipartUploadRequest request = getRequestFactory().newCompleteMultipartUploadRequest( destKey, uploadId, partETags); return owner.getAmazonS3Client().completeMultipartUpload( request); }); owner.finishedWrite(destKey, length, uploadResult.getETag(), uploadResult.getVersionId()); return uploadResult; } } /** * This completes a multipart upload to the destination key via * {@code finalizeMultipartUpload()}. * Retry policy: retrying, translated. * Retries increment the {@code errorCount} counter. * @param destKey destination * @param uploadId multipart operation Id * @param partETags list of partial uploads * @param length length of the upload * @param errorCount a counter incremented by 1 on every error; for * use in statistics * @return the result of the operation. * @throws IOException if problems arose which could not be retried, or * the retry count was exceeded */ @Retries.RetryTranslated public CompleteMultipartUploadResult completeMPUwithRetries( String destKey, String uploadId, List<PartETag> partETags, long length, AtomicInteger errorCount) throws IOException { checkNotNull(uploadId); checkNotNull(partETags); LOG.debug("Completing multipart upload {} with {} parts", uploadId, partETags.size()); return finalizeMultipartUpload(destKey, uploadId, partETags, length, (text, e, r, i) -> errorCount.incrementAndGet() ); } /** * Abort a multipart upload operation. * @param destKey destination key of the upload * @param uploadId multipart operation Id * @param shouldRetry should failures trigger a retry? * @param retrying callback invoked on every retry * @throws IOException failure to abort * @throws FileNotFoundException if the abort ID is unknown */ @Retries.RetryTranslated public void abortMultipartUpload(String destKey, String uploadId, boolean shouldRetry, Retried retrying) throws IOException { if (shouldRetry) { // retrying option invoker.retry("Aborting multipart upload ID " + uploadId, destKey, true, retrying, withinAuditSpan(getAuditSpan(), () -> owner.abortMultipartUpload( destKey, uploadId))); } else { // single pass attempt. once("Aborting multipart upload ID " + uploadId, destKey, withinAuditSpan(getAuditSpan(), () -> owner.abortMultipartUpload( destKey, uploadId))); } } /** * Abort a multipart commit operation. * @param upload upload to abort. * @throws IOException on problems. */ @Retries.RetryTranslated public void abortMultipartUpload(MultipartUpload upload) throws IOException { invoker.retry("Aborting multipart commit", upload.getKey(), true, withinAuditSpan(getAuditSpan(), () -> owner.abortMultipartUpload(upload))); } /** * Abort multipart uploads under a path: limited to the first * few hundred. * @param prefix prefix for uploads to abort * @return a count of aborts * @throws IOException trouble; FileNotFoundExceptions are swallowed. */ @Retries.RetryTranslated public int abortMultipartUploadsUnderPath(String prefix) throws IOException { LOG.debug("Aborting multipart uploads under {}", prefix); int count = 0; List<MultipartUpload> multipartUploads = listMultipartUploads(prefix); LOG.debug("Number of outstanding uploads: {}", multipartUploads.size()); for (MultipartUpload upload: multipartUploads) { try { abortMultipartUpload(upload); count++; } catch (FileNotFoundException e) { LOG.debug("Already aborted: {}", upload.getKey(), e); } } return count; } @Override @Retries.RetryTranslated public List<MultipartUpload> listMultipartUploads(final String prefix) throws IOException { activateAuditSpan(); return owner.listMultipartUploads(prefix); } /** * Abort a multipart commit operation. * @param destKey destination key of ongoing operation * @param uploadId multipart operation Id * @throws IOException on problems. * @throws FileNotFoundException if the abort ID is unknown */ @Override @Retries.RetryTranslated public void abortMultipartCommit(String destKey, String uploadId) throws IOException { abortMultipartUpload(destKey, uploadId, true, invoker.getRetryCallback()); } /** * Create and initialize a part request of a multipart upload. * Exactly one of: {@code uploadStream} or {@code sourceFile} * must be specified. * A subset of the file may be posted, by providing the starting point * in {@code offset} and a length of block in {@code size} equal to * or less than the remaining bytes. * The part number must be less than 10000. * Retry policy is once-translated; to much effort * @param destKey destination key of ongoing operation * @param uploadId ID of ongoing upload * @param partNumber current part number of the upload * @param size amount of data * @param uploadStream source of data to upload * @param sourceFile optional source file. * @param offset offset in file to start reading. * @return the request. * @throws IllegalArgumentException if the parameters are invalid. * @throws PathIOException if the part number is out of range. */ @Override @Retries.OnceTranslated public UploadPartRequest newUploadPartRequest( String destKey, String uploadId, int partNumber, int size, InputStream uploadStream, File sourceFile, Long offset) throws IOException { return once("upload part request", destKey, withinAuditSpan(getAuditSpan(), () -> getRequestFactory().newUploadPartRequest( destKey, uploadId, partNumber, size, uploadStream, sourceFile, offset))); } /** * The toString method is intended to be used in logging/toString calls. * @return a string description. */ @Override public String toString() { final StringBuilder sb = new StringBuilder( "WriteOperationHelper {bucket=").append(bucket); sb.append('}'); return sb.toString(); } /** * PUT an object directly (i.e. not via the transfer manager). * Byte length is calculated from the file length, or, if there is no * file, from the content length of the header. * @param putObjectRequest the request * @return the upload initiated * @throws IOException on problems */ @Retries.RetryTranslated public PutObjectResult putObject(PutObjectRequest putObjectRequest) throws IOException { return retry("Writing Object", putObjectRequest.getKey(), true, withinAuditSpan(getAuditSpan(), () -> owner.putObjectDirect(putObjectRequest))); } /** * PUT an object via the transfer manager. * @param putObjectRequest the request * @return the result of the operation * @throws IOException on problems */ @Retries.RetryTranslated public UploadResult uploadObject(PutObjectRequest putObjectRequest) throws IOException { // no retry; rely on xfer manager logic return retry("Writing Object", putObjectRequest.getKey(), true, withinAuditSpan(getAuditSpan(), () -> owner.executePut(putObjectRequest, null))); } /** * Revert a commit by deleting the file. * Relies on retry code in filesystem * @throws IOException on problems * @param destKey destination key */ @Retries.OnceTranslated public void revertCommit(String destKey) throws IOException { once("revert commit", destKey, withinAuditSpan(getAuditSpan(), () -> { Path destPath = owner.keyToQualifiedPath(destKey); owner.deleteObjectAtPath(destPath, destKey, true); owner.maybeCreateFakeParentDirectory(destPath); })); } /** * This completes a multipart upload to the destination key via * {@code finalizeMultipartUpload()}. * Retry policy: retrying, translated. * Retries increment the {@code errorCount} counter. * @param destKey destination * @param uploadId multipart operation Id * @param partETags list of partial uploads * @param length length of the upload * @return the result of the operation. * @throws IOException if problems arose which could not be retried, or * the retry count was exceeded */ @Retries.RetryTranslated public CompleteMultipartUploadResult commitUpload( String destKey, String uploadId, List<PartETag> partETags, long length) throws IOException { checkNotNull(uploadId); checkNotNull(partETags); LOG.debug("Completing multipart upload {} with {} parts", uploadId, partETags.size()); return finalizeMultipartUpload(destKey, uploadId, partETags, length, Invoker.NO_OP ); } /** * Upload part of a multi-partition file. * @param request request * @return the result of the operation. * @throws IOException on problems */ @Retries.RetryTranslated public UploadPartResult uploadPart(UploadPartRequest request) throws IOException { return retry("upload part #" + request.getPartNumber() + " upload ID " + request.getUploadId(), request.getKey(), true, withinAuditSpan(getAuditSpan(), () -> owner.uploadPart(request))); } /** * Get the configuration of this instance; essentially the owning * filesystem configuration. * @return the configuration. */ public Configuration getConf() { return conf; } /** * Create a S3 Select request for the destination path. * This does not build the query. * @param path pre-qualified path for query * @return the request */ public SelectObjectContentRequest newSelectRequest(Path path) { try (AuditSpan span = getAuditSpan()) { return getRequestFactory().newSelectRequest( storeContext.pathToKey(path)); } } /** * Execute an S3 Select operation. * On a failure, the request is only logged at debug to avoid the * select exception being printed. * @param source source for selection * @param request Select request to issue. * @param action the action for use in exception creation * @return response * @throws IOException failure */ @Retries.RetryTranslated public SelectObjectContentResult select( final Path source, final SelectObjectContentRequest request, final String action) throws IOException { // no setting of span here as the select binding is (statically) created // without any span. String bucketName = request.getBucketName(); Preconditions.checkArgument(bucket.equals(bucketName), "wrong bucket: %s", bucketName); if (LOG.isDebugEnabled()) { LOG.debug("Initiating select call {} {}", source, request.getExpression()); LOG.debug(SelectBinding.toString(request)); } return invoker.retry( action, source.toString(), true, withinAuditSpan(getAuditSpan(), () -> { try (DurationInfo ignored = new DurationInfo(LOG, "S3 Select operation")) { try { return owner.getAmazonS3Client().selectObjectContent(request); } catch (AmazonS3Exception e) { LOG.error("Failure of S3 Select request against {}", source); LOG.debug("S3 Select request against {}:\n{}", source, SelectBinding.toString(request), e); throw e; } } })); } @Override public AuditSpan createSpan(final String operation, @Nullable final String path1, @Nullable final String path2) throws IOException { return auditSpanSource.createSpan(operation, path1, path2); } @Override public void incrementWriteOperations() { owner.incrementWriteOperations(); } /** * Deactivate the audit span. */ @Override public void close() throws IOException { deactivateAuditSpan(); } /** * Get the request factory which uses this store's audit span. * @return the request factory. */ public RequestFactory getRequestFactory() { return requestFactory; } }
/* * Copyright 2016 DiffPlug * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.diffplug.gradle.spotless; import java.io.File; import java.io.Serializable; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Random; import java.util.stream.Stream; import javax.annotation.Nullable; import org.gradle.api.GradleException; import org.gradle.api.Project; import org.gradle.api.file.FileCollection; import org.gradle.api.internal.file.UnionFileCollection; import com.diffplug.spotless.FormatExceptionPolicyStrict; import com.diffplug.spotless.FormatterFunc; import com.diffplug.spotless.FormatterStep; import com.diffplug.spotless.LazyForwardingEquality; import com.diffplug.spotless.LineEnding; import com.diffplug.spotless.ThrowingEx; import com.diffplug.spotless.generic.EndWithNewlineStep; import com.diffplug.spotless.generic.IndentStep; import com.diffplug.spotless.generic.LicenseHeaderStep; import com.diffplug.spotless.generic.ReplaceRegexStep; import com.diffplug.spotless.generic.ReplaceStep; import com.diffplug.spotless.generic.TrimTrailingWhitespaceStep; import groovy.lang.Closure; /** Adds a `spotless{Name}Check` and `spotless{Name}Apply` task. */ public class FormatExtension { final SpotlessExtension root; public FormatExtension(SpotlessExtension root) { this.root = root; } private String formatName() { for (Map.Entry<String, FormatExtension> entry : root.formats.entrySet()) { if (entry.getValue() == this) { return entry.getKey(); } } throw new IllegalStateException("This format is not contained by any SpotlessExtension."); } boolean paddedCell = false; /** Enables paddedCell mode. @see <a href="https://github.com/diffplug/spotless/blob/master/PADDEDCELL.md">Padded cell</a> */ public void paddedCell() { paddedCell(true); } /** Enables paddedCell mode. @see <a href="https://github.com/diffplug/spotless/blob/master/PADDEDCELL.md">Padded cell</a> */ public void paddedCell(boolean paddedCell) { this.paddedCell = paddedCell; } LineEnding lineEndings; /** Returns the line endings to use (defaults to {@link SpotlessExtension#getLineEndings()}. */ public LineEnding getLineEndings() { return lineEndings == null ? root.getLineEndings() : lineEndings; } /** Sets the line endings to use (defaults to {@link SpotlessExtension#getLineEndings()}. */ public void setLineEndings(LineEnding lineEndings) { this.lineEndings = lineEndings; } Charset encoding; /** Returns the encoding to use (defaults to {@link SpotlessExtension#getEncoding()}. */ public Charset getEncoding() { return encoding == null ? root.getEncoding() : encoding; } /** Sets the encoding to use (defaults to {@link SpotlessExtension#getEncoding()}. */ public void setEncoding(String name) { setEncoding(Charset.forName(name)); } /** Sets the encoding to use (defaults to {@link SpotlessExtension#getEncoding()}. */ public void setEncoding(Charset charset) { encoding = Objects.requireNonNull(charset); } FormatExceptionPolicyStrict exceptionPolicy = new FormatExceptionPolicyStrict(); /** Ignores errors in the given step. */ public void ignoreErrorForStep(String stepName) { exceptionPolicy.excludeStep(stepName); } /** Ignores errors for the given relative path. */ public void ignoreErrorForPath(String relativePath) { exceptionPolicy.excludePath(relativePath); } /** Sets encoding to use (defaults to {@link SpotlessExtension#getEncoding()}). */ public void encoding(String charset) { setEncoding(charset); } /** The files that need to be formatted. */ protected FileCollection target; /** * FileCollections pass through raw. * Strings are treated as the 'include' arg to fileTree, with project.rootDir as the dir. * List<String> are treated as the 'includes' arg to fileTree, with project.rootDir as the dir. * Anything else gets passed to getProject().files(). */ public void target(Object... targets) { if (targets.length == 0) { this.target = getProject().files(); } else if (targets.length == 1) { this.target = parseTarget(targets[0]); } else { if (Stream.of(targets).allMatch(o -> o instanceof String)) { this.target = parseTarget(Arrays.asList(targets)); } else { UnionFileCollection union = new UnionFileCollection(); for (Object target : targets) { union.add(parseTarget(target)); } this.target = union; } } } @SuppressWarnings("unchecked") protected FileCollection parseTarget(Object target) { if (target instanceof FileCollection) { return (FileCollection) target; } else if (target instanceof String || (target instanceof List && ((List<?>) target).stream().allMatch(o -> o instanceof String))) { // since people are likely to do '**/*.md', we want to make sure to exclude folders // they don't want to format which will slow down the operation greatly File dir = getProject().getProjectDir(); List<String> excludes = new ArrayList<>(); // no git excludes.add(".git"); // no .gradle if (getProject() == getProject().getRootProject()) { excludes.add(".gradle"); } // no build folders excludes.add(relativize(dir, getProject().getBuildDir())); for (Project subproject : getProject().getSubprojects()) { excludes.add(relativize(dir, subproject.getBuildDir())); } if (target instanceof String) { return (FileCollection) getProject().fileTree(dir).include((String) target).exclude(excludes); } else { // target can only be a List<String> at this point return (FileCollection) getProject().fileTree(dir).include((List<String>) target).exclude(excludes); } } else { return getProject().files(target); } } static String relativize(File root, File dest) { String rootPath = root.getAbsolutePath(); String destPath = dest.getAbsolutePath(); if (!destPath.startsWith(rootPath)) { throw new IllegalArgumentException(dest + " is not a child of " + root); } else { return destPath.substring(rootPath.length()); } } /** The steps that need to be added. */ protected List<FormatterStep> steps = new ArrayList<>(); /** Adds a new step. */ public void addStep(FormatterStep newStep) { FormatterStep existing = getExistingStep(newStep.getName()); if (existing != null) { throw new GradleException("Multiple steps with name '" + newStep.getName() + "' for spotless format '" + formatName() + "'"); } steps.add(newStep); } /** Returns the existing step with the given name, if any. */ @Nullable protected FormatterStep getExistingStep(String stepName) { for (FormatterStep step : steps) { if (stepName.equals(step.getName())) { return step; } } return null; } /** Replaces the given step. */ protected void replaceStep(FormatterStep replacementStep) { FormatterStep existing = getExistingStep(replacementStep.getName()); if (existing == null) { throw new GradleException("Cannot replace step '" + replacementStep.getName() + "' for spotless format '" + formatName() + "' because it hasn't been added yet."); } int index = steps.indexOf(existing); steps.set(index, replacementStep); } /** Clears all of the existing steps. */ public void clearSteps() { steps.clear(); } /** * An optional performance optimization if you are using any of the `custom` or `customLazy` * methods. If you aren't explicitly calling `custom` or `customLazy`, then this method * has no effect. * * Spotless tracks what files have changed from run to run, so that it can run faster * by only checking files which have changed, or whose formatting steps have changed. * If you use either the `custom` or `customLazy` methods, then gradle can never mark * your files as `up-to-date`, because it can't know if perhaps the behavior of your * custom function has changed. * * If you set `bumpThisNumberIfACustomStepChanges( <some number> )`, then spotless will * assume that the custom rules have not changed if the number has not changed. If a * custom rule does change, then you must bump the number so that spotless will know * that it must recheck the files it has already checked. */ public void bumpThisNumberIfACustomStepChanges(int number) { globalState = number; } private Serializable globalState = new NeverUpToDateBetweenRuns(); static class NeverUpToDateBetweenRuns extends LazyForwardingEquality<Integer> { private static final long serialVersionUID = 1L; private static final Random RANDOM = new Random(); @Override protected Integer calculateState() throws Exception { return RANDOM.nextInt(); } } /** * Adds the given custom step, which is constructed lazily for performance reasons. * * The resulting function will receive a string with unix-newlines, and it must return a string unix newlines. * * If you're getting errors about `closure cannot be cast to com.diffplug.common.base.Throwing$Function`, then use * {@link #customLazyGroovy(String, ThrowingEx.Supplier)}. */ public void customLazy(String name, ThrowingEx.Supplier<FormatterFunc> formatterSupplier) { addStep(FormatterStep.createLazy(name, () -> globalState, unusedState -> formatterSupplier.get())); } /** Same as {@link #customLazy(String, ThrowingEx.Supplier)}, but for Groovy closures. */ public void customLazyGroovy(String name, ThrowingEx.Supplier<Closure<String>> formatterSupplier) { customLazy(name, () -> formatterSupplier.get()::call); } /** Adds a custom step. Receives a string with unix-newlines, must return a string with unix newlines. */ public void custom(String name, Closure<String> formatter) { custom(name, formatter::call); } /** Adds a custom step. Receives a string with unix-newlines, must return a string with unix newlines. */ public void custom(String name, FormatterFunc formatter) { customLazy(name, () -> formatter); } /** Highly efficient find-replace char sequence. */ public void replace(String name, CharSequence original, CharSequence after) { addStep(ReplaceStep.create(name, original, after)); } /** Highly efficient find-replace regex. */ public void replaceRegex(String name, String regex, String replacement) { addStep(ReplaceRegexStep.create(name, regex, replacement)); } /** Removes trailing whitespace. */ public void trimTrailingWhitespace() { addStep(TrimTrailingWhitespaceStep.create()); } /** Ensures that files end with a single newline. */ public void endWithNewline() { addStep(EndWithNewlineStep.create()); } /** Ensures that the files are indented using spaces. */ public void indentWithSpaces(int numSpacesPerTab) { addStep(IndentStep.Type.SPACE.create(numSpacesPerTab)); } /** Ensures that the files are indented using spaces. */ public void indentWithSpaces() { indentWithSpaces(4); } /** Ensures that the files are indented using tabs. */ public void indentWithTabs(int tabToSpaces) { addStep(IndentStep.Type.TAB.create(tabToSpaces)); } /** Ensures that the files are indented using tabs. */ public void indentWithTabs() { indentWithTabs(4); } /** * @param licenseHeader * Content that should be at the top of every file * @param delimiter * Spotless will look for a line that starts with this to know what the "top" is. */ public void licenseHeader(String licenseHeader, String delimiter) { addStep(LicenseHeaderStep.createFromHeader(licenseHeader, delimiter)); } /** * @param licenseHeaderFile * Content that should be at the top of every file * @param delimiter * Spotless will look for a line that starts with this to know what the "top" is. */ public void licenseHeaderFile(Object licenseHeaderFile, String delimiter) { addStep(LicenseHeaderStep.createFromFile(getProject().file(licenseHeaderFile), getEncoding(), delimiter)); } /** Sets up a format task according to the values in this extension. */ protected void setupTask(SpotlessTask task) { task.setPaddedCell(paddedCell); task.setEncoding(getEncoding().name()); task.setExceptionPolicy(exceptionPolicy); task.setTarget(target); task.setSteps(steps); task.setLineEndingsPolicy(getLineEndings().createPolicy(getProject().getProjectDir(), () -> task.target)); } /** Returns the project that this extension is attached to. */ protected Project getProject() { return root.project; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.athena.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * A workgroup, which contains a name, description, creation time, state, and other configuration, listed under * <a>WorkGroup$Configuration</a>. Each workgroup enables you to isolate queries for you or your group of users from * other queries in the same account, to configure the query results location and the encryption configuration (known as * workgroup settings), to enable sending query metrics to Amazon CloudWatch, and to establish per-query data usage * control limits for all queries in a workgroup. The workgroup settings override is specified in * EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See * <a>WorkGroupConfiguration$EnforceWorkGroupConfiguration</a>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/WorkGroup" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class WorkGroup implements Serializable, Cloneable, StructuredPojo { /** * <p> * The workgroup name. * </p> */ private String name; /** * <p> * The state of the workgroup: ENABLED or DISABLED. * </p> */ private String state; /** * <p> * The configuration of the workgroup, which includes the location in Amazon S3 where query results are stored, the * encryption configuration, if any, used for query results; whether the Amazon CloudWatch Metrics are enabled for * the workgroup; whether workgroup settings override client-side settings; and the data usage limits for the amount * of data scanned per query or per workgroup. The workgroup settings override is specified in * EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See * <a>WorkGroupConfiguration$EnforceWorkGroupConfiguration</a>. * </p> */ private WorkGroupConfiguration configuration; /** * <p> * The workgroup description. * </p> */ private String description; /** * <p> * The date and time the workgroup was created. * </p> */ private java.util.Date creationTime; /** * <p> * The workgroup name. * </p> * * @param name * The workgroup name. */ public void setName(String name) { this.name = name; } /** * <p> * The workgroup name. * </p> * * @return The workgroup name. */ public String getName() { return this.name; } /** * <p> * The workgroup name. * </p> * * @param name * The workgroup name. * @return Returns a reference to this object so that method calls can be chained together. */ public WorkGroup withName(String name) { setName(name); return this; } /** * <p> * The state of the workgroup: ENABLED or DISABLED. * </p> * * @param state * The state of the workgroup: ENABLED or DISABLED. * @see WorkGroupState */ public void setState(String state) { this.state = state; } /** * <p> * The state of the workgroup: ENABLED or DISABLED. * </p> * * @return The state of the workgroup: ENABLED or DISABLED. * @see WorkGroupState */ public String getState() { return this.state; } /** * <p> * The state of the workgroup: ENABLED or DISABLED. * </p> * * @param state * The state of the workgroup: ENABLED or DISABLED. * @return Returns a reference to this object so that method calls can be chained together. * @see WorkGroupState */ public WorkGroup withState(String state) { setState(state); return this; } /** * <p> * The state of the workgroup: ENABLED or DISABLED. * </p> * * @param state * The state of the workgroup: ENABLED or DISABLED. * @return Returns a reference to this object so that method calls can be chained together. * @see WorkGroupState */ public WorkGroup withState(WorkGroupState state) { this.state = state.toString(); return this; } /** * <p> * The configuration of the workgroup, which includes the location in Amazon S3 where query results are stored, the * encryption configuration, if any, used for query results; whether the Amazon CloudWatch Metrics are enabled for * the workgroup; whether workgroup settings override client-side settings; and the data usage limits for the amount * of data scanned per query or per workgroup. The workgroup settings override is specified in * EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See * <a>WorkGroupConfiguration$EnforceWorkGroupConfiguration</a>. * </p> * * @param configuration * The configuration of the workgroup, which includes the location in Amazon S3 where query results are * stored, the encryption configuration, if any, used for query results; whether the Amazon CloudWatch * Metrics are enabled for the workgroup; whether workgroup settings override client-side settings; and the * data usage limits for the amount of data scanned per query or per workgroup. The workgroup settings * override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See * <a>WorkGroupConfiguration$EnforceWorkGroupConfiguration</a>. */ public void setConfiguration(WorkGroupConfiguration configuration) { this.configuration = configuration; } /** * <p> * The configuration of the workgroup, which includes the location in Amazon S3 where query results are stored, the * encryption configuration, if any, used for query results; whether the Amazon CloudWatch Metrics are enabled for * the workgroup; whether workgroup settings override client-side settings; and the data usage limits for the amount * of data scanned per query or per workgroup. The workgroup settings override is specified in * EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See * <a>WorkGroupConfiguration$EnforceWorkGroupConfiguration</a>. * </p> * * @return The configuration of the workgroup, which includes the location in Amazon S3 where query results are * stored, the encryption configuration, if any, used for query results; whether the Amazon CloudWatch * Metrics are enabled for the workgroup; whether workgroup settings override client-side settings; and the * data usage limits for the amount of data scanned per query or per workgroup. The workgroup settings * override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See * <a>WorkGroupConfiguration$EnforceWorkGroupConfiguration</a>. */ public WorkGroupConfiguration getConfiguration() { return this.configuration; } /** * <p> * The configuration of the workgroup, which includes the location in Amazon S3 where query results are stored, the * encryption configuration, if any, used for query results; whether the Amazon CloudWatch Metrics are enabled for * the workgroup; whether workgroup settings override client-side settings; and the data usage limits for the amount * of data scanned per query or per workgroup. The workgroup settings override is specified in * EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See * <a>WorkGroupConfiguration$EnforceWorkGroupConfiguration</a>. * </p> * * @param configuration * The configuration of the workgroup, which includes the location in Amazon S3 where query results are * stored, the encryption configuration, if any, used for query results; whether the Amazon CloudWatch * Metrics are enabled for the workgroup; whether workgroup settings override client-side settings; and the * data usage limits for the amount of data scanned per query or per workgroup. The workgroup settings * override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See * <a>WorkGroupConfiguration$EnforceWorkGroupConfiguration</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public WorkGroup withConfiguration(WorkGroupConfiguration configuration) { setConfiguration(configuration); return this; } /** * <p> * The workgroup description. * </p> * * @param description * The workgroup description. */ public void setDescription(String description) { this.description = description; } /** * <p> * The workgroup description. * </p> * * @return The workgroup description. */ public String getDescription() { return this.description; } /** * <p> * The workgroup description. * </p> * * @param description * The workgroup description. * @return Returns a reference to this object so that method calls can be chained together. */ public WorkGroup withDescription(String description) { setDescription(description); return this; } /** * <p> * The date and time the workgroup was created. * </p> * * @param creationTime * The date and time the workgroup was created. */ public void setCreationTime(java.util.Date creationTime) { this.creationTime = creationTime; } /** * <p> * The date and time the workgroup was created. * </p> * * @return The date and time the workgroup was created. */ public java.util.Date getCreationTime() { return this.creationTime; } /** * <p> * The date and time the workgroup was created. * </p> * * @param creationTime * The date and time the workgroup was created. * @return Returns a reference to this object so that method calls can be chained together. */ public WorkGroup withCreationTime(java.util.Date creationTime) { setCreationTime(creationTime); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getState() != null) sb.append("State: ").append(getState()).append(","); if (getConfiguration() != null) sb.append("Configuration: ").append(getConfiguration()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getCreationTime() != null) sb.append("CreationTime: ").append(getCreationTime()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof WorkGroup == false) return false; WorkGroup other = (WorkGroup) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getState() == null ^ this.getState() == null) return false; if (other.getState() != null && other.getState().equals(this.getState()) == false) return false; if (other.getConfiguration() == null ^ this.getConfiguration() == null) return false; if (other.getConfiguration() != null && other.getConfiguration().equals(this.getConfiguration()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getCreationTime() == null ^ this.getCreationTime() == null) return false; if (other.getCreationTime() != null && other.getCreationTime().equals(this.getCreationTime()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode()); hashCode = prime * hashCode + ((getConfiguration() == null) ? 0 : getConfiguration().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getCreationTime() == null) ? 0 : getCreationTime().hashCode()); return hashCode; } @Override public WorkGroup clone() { try { return (WorkGroup) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.athena.model.transform.WorkGroupMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.reactivex.flowable; import static org.junit.Assert.assertEquals; import java.util.*; import org.junit.Test; import org.reactivestreams.Publisher; import io.reactivex.Flowable; import io.reactivex.FlowableTransformer; import io.reactivex.flowables.GroupedFlowable; import io.reactivex.functions.*; import io.reactivex.subscribers.TestSubscriber; /** * Test super/extends of generics. * * See https://github.com/Netflix/RxJava/pull/331 */ public class FlowableCovarianceTest { /** * This won't compile if super/extends isn't done correctly on generics. */ @Test public void testCovarianceOfFrom() { Flowable.<Movie> just(new HorrorMovie()); Flowable.<Movie> fromIterable(new ArrayList<HorrorMovie>()); // Observable.<HorrorMovie>from(new Movie()); // may not compile } @Test public void testSortedList() { Comparator<Media> sortFunction = new Comparator<Media>() { @Override public int compare(Media t1, Media t2) { return 1; } }; // this one would work without the covariance generics Flowable<Media> f = Flowable.just(new Movie(), new TVSeason(), new Album()); f.toSortedList(sortFunction); // this one would NOT work without the covariance generics Flowable<Movie> f2 = Flowable.just(new Movie(), new ActionMovie(), new HorrorMovie()); f2.toSortedList(sortFunction); } @Test public void testGroupByCompose() { Flowable<Movie> movies = Flowable.just(new HorrorMovie(), new ActionMovie(), new Movie()); TestSubscriber<String> ts = new TestSubscriber<String>(); movies .groupBy(new Function<Movie, Object>() { @Override public Object apply(Movie v) { return v.getClass(); } }) .doOnNext(new Consumer<GroupedFlowable<Object, Movie>>() { @Override public void accept(GroupedFlowable<Object, Movie> g) { System.out.println(g.getKey()); } }) .flatMap(new Function<GroupedFlowable<Object, Movie>, Publisher<String>>() { @Override public Publisher<String> apply(GroupedFlowable<Object, Movie> g) { return g .doOnNext(new Consumer<Movie>() { @Override public void accept(Movie v) { System.out.println(v); } }) .compose(new FlowableTransformer<Movie, Movie>() { @Override public Publisher<Movie> apply(Flowable<Movie> m) { return m.concatWith(Flowable.just(new ActionMovie())); } } ) .map(new Function<Object, String>() { @Override public String apply(Object v) { return v.toString(); } }); } }) .subscribe(ts); ts.assertTerminated(); ts.assertNoErrors(); // System.out.println(ts.getOnNextEvents()); assertEquals(6, ts.valueCount()); } @SuppressWarnings("unused") @Test public void testCovarianceOfCompose() { Flowable<HorrorMovie> movie = Flowable.just(new HorrorMovie()); Flowable<Movie> movie2 = movie.compose(new FlowableTransformer<HorrorMovie, Movie>() { @Override public Publisher<Movie> apply(Flowable<HorrorMovie> t) { return Flowable.just(new Movie()); } }); } @SuppressWarnings("unused") @Test public void testCovarianceOfCompose2() { Flowable<Movie> movie = Flowable.<Movie> just(new HorrorMovie()); Flowable<HorrorMovie> movie2 = movie.compose(new FlowableTransformer<Movie, HorrorMovie>() { @Override public Publisher<HorrorMovie> apply(Flowable<Movie> t) { return Flowable.just(new HorrorMovie()); } }); } @SuppressWarnings("unused") @Test public void testCovarianceOfCompose3() { Flowable<Movie> movie = Flowable.<Movie>just(new HorrorMovie()); Flowable<HorrorMovie> movie2 = movie.compose(new FlowableTransformer<Movie, HorrorMovie>() { @Override public Publisher<HorrorMovie> apply(Flowable<Movie> t) { return Flowable.just(new HorrorMovie()).map(new Function<HorrorMovie, HorrorMovie>() { @Override public HorrorMovie apply(HorrorMovie v) { return v; } }); } } ); } @SuppressWarnings("unused") @Test public void testCovarianceOfCompose4() { Flowable<HorrorMovie> movie = Flowable.just(new HorrorMovie()); Flowable<HorrorMovie> movie2 = movie.compose(new FlowableTransformer<HorrorMovie, HorrorMovie>() { @Override public Publisher<HorrorMovie> apply(Flowable<HorrorMovie> t1) { return t1.map(new Function<HorrorMovie, HorrorMovie>() { @Override public HorrorMovie apply(HorrorMovie v) { return v; } }); } }); } @Test public void testComposeWithDeltaLogic() { List<Movie> list1 = Arrays.asList(new Movie(), new HorrorMovie(), new ActionMovie()); List<Movie> list2 = Arrays.asList(new ActionMovie(), new Movie(), new HorrorMovie(), new ActionMovie()); Flowable<List<Movie>> movies = Flowable.just(list1, list2); movies.compose(deltaTransformer); } static Function<List<List<Movie>>, Flowable<Movie>> calculateDelta = new Function<List<List<Movie>>, Flowable<Movie>>() { @Override public Flowable<Movie> apply(List<List<Movie>> listOfLists) { if (listOfLists.size() == 1) { return Flowable.fromIterable(listOfLists.get(0)); } else { // diff the two List<Movie> newList = listOfLists.get(1); List<Movie> oldList = new ArrayList<Movie>(listOfLists.get(0)); Set<Movie> delta = new LinkedHashSet<Movie>(); delta.addAll(newList); // remove all that match in old delta.removeAll(oldList); // filter oldList to those that aren't in the newList oldList.removeAll(newList); // for all left in the oldList we'll create DROP events for (@SuppressWarnings("unused") Movie old : oldList) { delta.add(new Movie()); } return Flowable.fromIterable(delta); } } }; static FlowableTransformer<List<Movie>, Movie> deltaTransformer = new FlowableTransformer<List<Movie>, Movie>() { @Override public Publisher<Movie> apply(Flowable<List<Movie>> movieList) { return movieList .startWith(new ArrayList<Movie>()) .buffer(2, 1) .skip(1) .flatMap(calculateDelta); } }; /* * Most tests are moved into their applicable classes such as [Operator]Tests.java */ static class Media { } static class Movie extends Media { } static class HorrorMovie extends Movie { } static class ActionMovie extends Movie { } static class Album extends Media { } static class TVSeason extends Media { } static class Rating { } static class CoolRating extends Rating { } static class Result { } static class ExtendedResult extends Result { } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.server.zookeeper; import static java.nio.charset.StandardCharsets.UTF_8; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.atomic.AtomicInteger; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.fate.zookeeper.ZooUtil.NodeExistsPolicy; import org.apache.accumulo.fate.zookeeper.ZooUtil.NodeMissingPolicy; import org.apache.accumulo.server.util.time.SimpleTimer; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NodeExistsException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Provides a way to push work out to tablet servers via zookeeper and wait for that work to be done. Any tablet server can pick up a work item and process it. * * Worker processes watch a zookeeper node for tasks to be performed. After getting an exclusive lock on the node, the worker will perform the task. */ public class DistributedWorkQueue { private static final String LOCKS_NODE = "locks"; private static final Logger log = LoggerFactory.getLogger(DistributedWorkQueue.class); private ThreadPoolExecutor threadPool; private ZooReaderWriter zoo = ZooReaderWriter.getInstance(); private String path; private AccumuloConfiguration config; private long timerInitialDelay, timerPeriod; private AtomicInteger numTask = new AtomicInteger(0); private void lookForWork(final Processor processor, List<String> children) { if (children.size() == 0) return; if (numTask.get() >= threadPool.getCorePoolSize()) return; Random random = new Random(); Collections.shuffle(children, random); try { for (final String child : children) { if (child.equals(LOCKS_NODE)) continue; final String lockPath = path + "/locks/" + child; try { // no need to use zoolock, because a queue (ephemeral sequential) is not needed // if can not get the lock right now then do not want to wait zoo.putEphemeralData(lockPath, new byte[0]); } catch (NodeExistsException nee) { // someone else has reserved it continue; } final String childPath = path + "/" + child; // check to see if another node processed it already if (!zoo.exists(childPath)) { zoo.recursiveDelete(lockPath, NodeMissingPolicy.SKIP); continue; } // Great... we got the lock, but maybe we're too busy if (numTask.get() >= threadPool.getCorePoolSize()) { zoo.recursiveDelete(lockPath, NodeMissingPolicy.SKIP); break; } log.debug("got lock for " + child); Runnable task = new Runnable() { @Override public void run() { try { try { processor.newProcessor().process(child, zoo.getData(childPath, null)); // if the task fails, then its entry in the Q is not deleted... so it will be retried try { zoo.recursiveDelete(childPath, NodeMissingPolicy.SKIP); } catch (Exception e) { log.error("Error received when trying to delete entry in zookeeper " + childPath, e); } } catch (Exception e) { log.warn("Failed to process work " + child, e); } try { zoo.recursiveDelete(lockPath, NodeMissingPolicy.SKIP); } catch (Exception e) { log.error("Error received when trying to delete entry in zookeeper " + childPath, e); } } finally { numTask.decrementAndGet(); } try { // its important that this is called after numTask is decremented lookForWork(processor, zoo.getChildren(path)); } catch (KeeperException e) { log.error("Failed to look for work", e); } catch (InterruptedException e) { log.info("Interrupted looking for work", e); } } }; numTask.incrementAndGet(); threadPool.execute(task); } } catch (Throwable t) { log.error("Unexpected error", t); } } public interface Processor { Processor newProcessor(); void process(String workID, byte[] data); } public DistributedWorkQueue(String path, AccumuloConfiguration config) { // Preserve the old delay and period this(path, config, new Random().nextInt(60 * 1000), 60 * 1000); } public DistributedWorkQueue(String path, AccumuloConfiguration config, long timerInitialDelay, long timerPeriod) { this.path = path; this.config = config; this.timerInitialDelay = timerInitialDelay; this.timerPeriod = timerPeriod; } public void startProcessing(final Processor processor, ThreadPoolExecutor executorService) throws KeeperException, InterruptedException { threadPool = executorService; zoo.mkdirs(path); zoo.mkdirs(path + "/" + LOCKS_NODE); List<String> children = zoo.getChildren(path, new Watcher() { @Override public void process(WatchedEvent event) { switch (event.getType()) { case NodeChildrenChanged: if (event.getPath().equals(path)) try { lookForWork(processor, zoo.getChildren(path, this)); } catch (KeeperException e) { log.error("Failed to look for work", e); } catch (InterruptedException e) { log.info("Interrupted looking for work", e); } else log.info("Unexpected path for NodeChildrenChanged event " + event.getPath()); break; case NodeCreated: case NodeDataChanged: case NodeDeleted: case None: log.info("Got unexpected zookeeper event: " + event.getType() + " for " + path); break; } } }); lookForWork(processor, children); // Add a little jitter to avoid all the tservers slamming zookeeper at once SimpleTimer.getInstance(config).schedule(new Runnable() { @Override public void run() { log.debug("Looking for work in " + path); try { lookForWork(processor, zoo.getChildren(path)); } catch (KeeperException e) { log.error("Failed to look for work", e); } catch (InterruptedException e) { log.info("Interrupted looking for work", e); } } }, timerInitialDelay, timerPeriod); } /** * Adds work to the queue, automatically converting the String to bytes using UTF-8 */ public void addWork(String workId, String data) throws KeeperException, InterruptedException { addWork(workId, data.getBytes(UTF_8)); } public void addWork(String workId, byte[] data) throws KeeperException, InterruptedException { if (workId.equalsIgnoreCase(LOCKS_NODE)) throw new IllegalArgumentException("locks is reserved work id"); zoo.mkdirs(path); zoo.putPersistentData(path + "/" + workId, data, NodeExistsPolicy.SKIP); } public List<String> getWorkQueued() throws KeeperException, InterruptedException { ArrayList<String> children = new ArrayList<String>(zoo.getChildren(path)); children.remove(LOCKS_NODE); return children; } public void waitUntilDone(Set<String> workIDs) throws KeeperException, InterruptedException { final Object condVar = new Object(); Watcher watcher = new Watcher() { @Override public void process(WatchedEvent event) { switch (event.getType()) { case NodeChildrenChanged: synchronized (condVar) { condVar.notify(); } break; case NodeCreated: case NodeDataChanged: case NodeDeleted: case None: log.info("Got unexpected zookeeper event: " + event.getType() + " for " + path); break; } } }; List<String> children = zoo.getChildren(path, watcher); while (!Collections.disjoint(children, workIDs)) { synchronized (condVar) { condVar.wait(10000); } children = zoo.getChildren(path, watcher); } } }
package com.fincatto.documentofiscal.nfe310.classes.inutilizacao; import org.junit.Assert; import org.junit.Test; import com.fincatto.documentofiscal.DFAmbiente; import com.fincatto.documentofiscal.DFUnidadeFederativa; import com.fincatto.documentofiscal.nfe310.FabricaDeObjetosFake; import com.fincatto.documentofiscal.nfe310.classes.evento.inutilizacao.NFEventoInutilizacaoDados; public class NFEventoCancelamentoDadosTest { @Test(expected = IllegalStateException.class) public void naoDevePermitirModeloInvalido() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setModeloDocumentoFiscal("75"); } @Test public void devePermitirAmbosModelosDeNFe() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setModeloDocumentoFiscal("55"); dados.setModeloDocumentoFiscal("65"); } @Test(expected = IllegalStateException.class) public void naoDevePermitirJustificativaInvalido() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); try { dados.setJustificativa("rcAYGVaFoYcW8q"); } catch (final IllegalStateException e) { dados.setJustificativa("WDou2V29BncPEppZRB7XnD7BAQPYFgewTmEu2kCCRbESq01soGjLJVxhJmcYMxAY3t0nXCXmWJh8suPIikxWuUxaJCAMBKUiMMm04AyySjtjSrNqThH0W14IpNWM5bCkKOqyoV58HFVxfZLfZOYmn7SCUW3QTOoaos09TFbMMIccnW2kfVMrb8T419Mpy60IIjo6hqORvMPZiDKjSrmpWiYLCIGLLBpqjbO9XmSHryazw2XoT2yJMpfE9N53GCRh"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirCNPJInvalido() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setCnpj("1234567890123"); } @Test(expected = NumberFormatException.class) public void naoDevePermitirAnoDiferenteDeDuasCasas() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); try { dados.setAno(9); } catch (final NumberFormatException e) { dados.setAno(100); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirNumeroNFInicialInvalido() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); try { dados.setNumeroNFInicial(""); } catch (final IllegalStateException e) { dados.setNumeroNFInicial("1000000000"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirNumeroNFFinalInvalido() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); try { dados.setNumeroNFFinal(""); } catch (final IllegalStateException e) { dados.setNumeroNFFinal("1000000000"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirSerieInvalido() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); try { dados.setSerie(""); } catch (final IllegalStateException e) { dados.setSerie("1000"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirServicoInvalido() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setNomeServico("inutiliza"); } @Test(expected = IllegalStateException.class) public void naoDevePermitirIDInvalido() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setIdentificador("IDw6cRIPJzP4sv6gBWQFCNcFSITQK7rOxjmBFcW2Mzf"); } @Test(expected = IllegalStateException.class) public void naoDevePermitirIdentificadorNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setAno(15); dados.setCnpj("12345678901234"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setModeloDocumentoFiscal("55"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFInicial("1"); dados.setNumeroNFFinal("999999999"); dados.setSerie("999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirAmbienteNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAno(15); dados.setCnpj("12345678901234"); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setModeloDocumentoFiscal("55"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFInicial("1"); dados.setNumeroNFFinal("999999999"); dados.setSerie("999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirServicoNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setAno(15); dados.setCnpj("12345678901234"); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setModeloDocumentoFiscal("55"); dados.setNumeroNFInicial("1"); dados.setNumeroNFFinal("999999999"); dados.setSerie("999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirUFNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setAno(15); dados.setCnpj("12345678901234"); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setModeloDocumentoFiscal("55"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFInicial("1"); dados.setNumeroNFFinal("999999999"); dados.setSerie("999"); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirAnoNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setCnpj("12345678901234"); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setModeloDocumentoFiscal("55"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFInicial("1"); dados.setNumeroNFFinal("999999999"); dados.setSerie("999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirCNPJNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setAno(15); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setModeloDocumentoFiscal("55"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFInicial("1"); dados.setNumeroNFFinal("999999999"); dados.setSerie("999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirModeloNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setAno(15); dados.setCnpj("12345678901234"); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFInicial("1"); dados.setNumeroNFFinal("999999999"); dados.setSerie("999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirSerieNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setAno(15); dados.setCnpj("12345678901234"); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setModeloDocumentoFiscal("55"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFInicial("1"); dados.setNumeroNFFinal("999999999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirNumeroNotaInicialNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setAno(15); dados.setCnpj("12345678901234"); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setModeloDocumentoFiscal("55"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFFinal("999999999"); dados.setSerie("999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirNumeroNotaFinalNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setAno(15); dados.setCnpj("12345678901234"); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setJustificativa("u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5"); dados.setModeloDocumentoFiscal("55"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFInicial("1"); dados.setSerie("999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirJustificativaNulo() { final NFEventoInutilizacaoDados dados = new NFEventoInutilizacaoDados(); dados.setAmbiente(DFAmbiente.HOMOLOGACAO); dados.setAno(15); dados.setCnpj("12345678901234"); dados.setIdentificador("ID55605654557305333405403926218856863798956"); dados.setModeloDocumentoFiscal("55"); dados.setNomeServico("INUTILIZAR"); dados.setNumeroNFInicial("1"); dados.setNumeroNFFinal("999999999"); dados.setSerie("999"); dados.setUf(DFUnidadeFederativa.SC); dados.toString(); } @Test public void deveGerarXMLDeAcordoComOPadraoEstabelecido() { final String xmlEsperado = "<infInut Id=\"ID42161234567890123455123123456789987654321\"><tpAmb>2</tpAmb><xServ>INUTILIZAR</xServ><cUF>42</cUF><ano>16</ano><CNPJ>12345678901234</CNPJ><mod>55</mod><serie>123</serie><nNFIni>123456789</nNFIni><nNFFin>987654321</nNFFin><xJust>u2MGhwXFQDFtSuKsLkmgowBZNNhOWBL4JKIqYnIj5iDPTAUqHSwKL1O2olgmZwigRS1P58Zoc1qDxzqmvv3hBE1LYuLHNPbFXuLwM5ZxvH7xfSpnkX5VBGjrkR3cuiXLr1uz3chFb9JrNY5xU3X0eF9Byc2Q9TkPbFyPj7iRwwQVMNt6FGvpUyRMHGmhSDYhFRD2Dst0UaauvA4V0breWHyN4WUSEm9z377jXHNwtVLQQCxB2wcEIZGWVIT4CF5</xJust></infInut>"; Assert.assertEquals(xmlEsperado, FabricaDeObjetosFake.getNFEventoInutilizacaoDados().toString()); } }
/////////////////////////////////////////////////////////////////////////// // __ _ _ ________ // // / / ____ ____ _(_)____/ | / / ____/ // // / / / __ \/ __ `/ / ___/ |/ / / __ // // / /___/ /_/ / /_/ / / /__/ /| / /_/ / // // /_____/\____/\__, /_/\___/_/ |_/\____/ // // /____/ // // // // The Next Generation Logic Library // // // /////////////////////////////////////////////////////////////////////////// // // // Copyright 2015-20xx Christoph Zengler // // // // Licensed under the Apache License, Version 2.0 (the "License"); // // you may not use this file except in compliance with the License. // // You may obtain a copy of the License at // // // // http://www.apache.org/licenses/LICENSE-2.0 // // // // Unless required by applicable law or agreed to in writing, software // // distributed under the License is distributed on an "AS IS" BASIS, // // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // // implied. See the License for the specific language governing // // permissions and limitations under the License. // // // /////////////////////////////////////////////////////////////////////////// package org.logicng.transformations.cnf; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import org.junit.jupiter.api.Test; import org.logicng.datastructures.Assignment; import org.logicng.formulas.Formula; import org.logicng.formulas.FormulaFactory; import org.logicng.formulas.Variable; import org.logicng.io.parsers.ParserException; import org.logicng.io.parsers.PropositionalParser; import org.logicng.predicates.CNFPredicate; import org.logicng.solvers.MiniSat; import org.logicng.solvers.SATSolver; import java.util.List; import java.util.SortedSet; /** * Unit tests for the class {@link CNFEncoder}. * @version 2.0.0 * @since 1.1 */ public class CNFEncoderTest { private static final CNFPredicate cnfPredicate = CNFPredicate.get(); private static final String p1 = "(x1 | x2) & x3 & x4 & ((x1 & x5 & ~(x6 | x7) | x8) | x9)"; private static final String p2 = "(y1 | y2) & y3 & y4 & ((y1 & y5 & ~(y6 | y7) | y8) | y9)"; private static final String p3 = "(z1 | z2) & z3 & z4 & ((z1 & z5 & ~(z6 | z7) | z8) | z9)"; @Test public void testFactorization() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser p = new PropositionalParser(f); final Formula phi1 = p.parse(p1); assertThat(phi1.numberOfAtoms()).isEqualTo(10); assertThat(phi1.cnf()).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); f.putConfiguration(CNFConfig.builder().build()); assertThat(phi1.cnf()).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); f.putConfiguration(CNFConfig.builder().algorithm(CNFConfig.Algorithm.FACTORIZATION).build()); assertThat(phi1.cnf()).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); final CNFEncoder encoder = new CNFEncoder(f, CNFConfig.builder().algorithm(CNFConfig.Algorithm.FACTORIZATION).build()); assertThat(encoder.encode(phi1)).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); } @Test public void testTseitin() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser p = new PropositionalParser(f); final Formula phi1 = p.parse(p1); final Formula phi2 = p.parse(p2); f.putConfiguration(CNFConfig.builder().algorithm(CNFConfig.Algorithm.TSEITIN).build()); assertThat(phi1.cnf()).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); f.putConfiguration(CNFConfig.builder().algorithm(CNFConfig.Algorithm.TSEITIN).atomBoundary(8).build()); assertThat(phi1.cnf()).isEqualTo(p.parse("(@RESERVED_CNF_0 | ~x1) & (@RESERVED_CNF_0 | ~x2) & (~@RESERVED_CNF_0 | x1 | x2) & (~@RESERVED_CNF_1 | x1) & (~@RESERVED_CNF_1 | x5) & (~@RESERVED_CNF_1 | ~x6) & (~@RESERVED_CNF_1 | ~x7) & (@RESERVED_CNF_1 | ~x1 | ~x5 | x6 | x7) & (@RESERVED_CNF_2 | ~@RESERVED_CNF_1) & (@RESERVED_CNF_2 | ~x8) & (@RESERVED_CNF_2 | ~x9) & (~@RESERVED_CNF_2 | @RESERVED_CNF_1 | x8 | x9) & @RESERVED_CNF_0 & x3 & x4 & @RESERVED_CNF_2")); f.putConfiguration(CNFConfig.builder().algorithm(CNFConfig.Algorithm.TSEITIN).atomBoundary(11).build()); assertThat(phi2.cnf()).isEqualTo(p.parse("(y1 | y2) & y3 & y4 & (y1 | y8 | y9) & (y5 | y8 | y9) & (~y6 | y8 | y9) & (~y7 | y8 | y9)")); } @Test public void testPG() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser p = new PropositionalParser(f); final Formula phi1 = p.parse(p1); final Formula phi2 = p.parse(p2); f.putConfiguration(CNFConfig.builder().algorithm(CNFConfig.Algorithm.PLAISTED_GREENBAUM).build()); assertThat(phi1.cnf()).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); f.putConfiguration(CNFConfig.builder().algorithm(CNFConfig.Algorithm.PLAISTED_GREENBAUM).atomBoundary(8).build()); assertThat(phi1.cnf()).isEqualTo(p.parse("@RESERVED_CNF_1 & x3 & x4 & @RESERVED_CNF_2 & (~@RESERVED_CNF_1 | x1 | x2) & (~@RESERVED_CNF_2 | @RESERVED_CNF_3 | x8 | x9) & (~@RESERVED_CNF_3 | x1) & (~@RESERVED_CNF_3 | x5) & (~@RESERVED_CNF_3 | ~x6) & (~@RESERVED_CNF_3 | ~x7)")); f.putConfiguration(CNFConfig.builder().algorithm(CNFConfig.Algorithm.PLAISTED_GREENBAUM).atomBoundary(11).build()); assertThat(phi2.cnf()).isEqualTo(p.parse("(y1 | y2) & y3 & y4 & (y1 | y8 | y9) & (y5 | y8 | y9) & (~y6 | y8 | y9) & (~y7 | y8 | y9)")); } @Test public void testAdvanced() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser p = new PropositionalParser(f); final Formula phi1 = p.parse(p1); final Formula phi2 = p.parse(p2); final Formula phi3 = p.parse(p3); assertThat(phi1.cnf()).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); f.putConfiguration(CNFConfig.builder().createdClauseBoundary(5).atomBoundary(3).build()); assertThat(phi2.cnf()).isEqualTo(p.parse("(y1 | y2) & y3 & y4 & (~@RESERVED_CNF_0 | y1) & (~@RESERVED_CNF_0 | y5) & (~@RESERVED_CNF_0 | ~y6) & (~@RESERVED_CNF_0 | ~y7) & (@RESERVED_CNF_0 | ~y1 | ~y5 | y6 | y7) & (@RESERVED_CNF_0 | y8 | y9)")); f.putConfiguration(CNFConfig.builder().createdClauseBoundary(-1).distributionBoundary(5).atomBoundary(3).build()); assertThat(phi3.cnf()).isEqualTo(p.parse("(z1 | z2) & z3 & z4 & (~@RESERVED_CNF_2 | z1) & (~@RESERVED_CNF_2 | z5) & (~@RESERVED_CNF_2 | ~z6) & (~@RESERVED_CNF_2 | ~z7) & (@RESERVED_CNF_2 | ~z1 | ~z5 | z6 | z7) & (@RESERVED_CNF_2 | z8 | z9)")); } @Test public void testAdvancedWithPGFallback() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser p = new PropositionalParser(f); final Formula phi1 = p.parse(p1); final Formula phi2 = p.parse(p2); final Formula phi3 = p.parse(p3); assertThat(phi1.cnf()).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); f.putConfiguration(CNFConfig.builder().createdClauseBoundary(5).atomBoundary(3).fallbackAlgorithmForAdvancedEncoding(CNFConfig.Algorithm.PLAISTED_GREENBAUM).build()); assertThat(phi2.cnf()).isEqualTo(p.parse("(y1 | y2) & y3 & y4 & (@RESERVED_CNF_1 | y8 | y9) & (~@RESERVED_CNF_1 | y1) & (~@RESERVED_CNF_1 | y5) & (~@RESERVED_CNF_1 | ~y6) & (~@RESERVED_CNF_1 | ~y7)")); f.putConfiguration(CNFConfig.builder().createdClauseBoundary(-1).distributionBoundary(5).atomBoundary(3).fallbackAlgorithmForAdvancedEncoding(CNFConfig.Algorithm.PLAISTED_GREENBAUM).build()); assertThat(phi3.cnf()).isEqualTo(p.parse("(z1 | z2) & z3 & z4 & (@RESERVED_CNF_3 | z8 | z9) & (~@RESERVED_CNF_3 | z1) & (~@RESERVED_CNF_3 | z5) & (~@RESERVED_CNF_3 | ~z6) & (~@RESERVED_CNF_3 | ~z7)")); } @Test public void testTseitinEncoder() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser p = new PropositionalParser(f); final Formula phi1 = p.parse(p1); final CNFEncoder encoder1 = new CNFEncoder(f, CNFConfig.builder().algorithm(CNFConfig.Algorithm.TSEITIN).build()); assertThat(encoder1.encode(phi1)).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); final CNFEncoder encoder2 = new CNFEncoder(f, CNFConfig.builder().algorithm(CNFConfig.Algorithm.TSEITIN).atomBoundary(8).build()); assertThat(encoder2.encode(phi1)).isEqualTo(p.parse("(@RESERVED_CNF_0 | ~x1) & (@RESERVED_CNF_0 | ~x2) & (~@RESERVED_CNF_0 | x1 | x2) & (~@RESERVED_CNF_1 | x1) & (~@RESERVED_CNF_1 | x5) & (~@RESERVED_CNF_1 | ~x6) & (~@RESERVED_CNF_1 | ~x7) & (@RESERVED_CNF_1 | ~x1 | ~x5 | x6 | x7) & (@RESERVED_CNF_2 | ~@RESERVED_CNF_1) & (@RESERVED_CNF_2 | ~x8) & (@RESERVED_CNF_2 | ~x9) & (~@RESERVED_CNF_2 | @RESERVED_CNF_1 | x8 | x9) & @RESERVED_CNF_0 & x3 & x4 & @RESERVED_CNF_2")); } @Test public void testPGEncoder() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser p = new PropositionalParser(f); final Formula phi1 = p.parse(p1); final CNFEncoder encoder1 = new CNFEncoder(f, CNFConfig.builder().algorithm(CNFConfig.Algorithm.PLAISTED_GREENBAUM).build()); assertThat(encoder1.encode(phi1)).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); final CNFEncoder encoder2 = new CNFEncoder(f, CNFConfig.builder().algorithm(CNFConfig.Algorithm.PLAISTED_GREENBAUM).atomBoundary(8).build()); assertThat(encoder2.encode(phi1)).isEqualTo(p.parse("@RESERVED_CNF_1 & x3 & x4 & @RESERVED_CNF_2 & (~@RESERVED_CNF_1 | x1 | x2) & (~@RESERVED_CNF_2 | @RESERVED_CNF_3 | x8 | x9) & (~@RESERVED_CNF_3 | x1) & (~@RESERVED_CNF_3 | x5) & (~@RESERVED_CNF_3 | ~x6) & (~@RESERVED_CNF_3 | ~x7)")); } @Test public void testBDDEncoder() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser p = new PropositionalParser(f); final Formula phi1 = p.parse(p1); final Formula phi2 = p.parse(p2); final Formula phi3 = p.parse(p3); final CNFEncoder encoder = new CNFEncoder(f, CNFConfig.builder().algorithm(CNFConfig.Algorithm.BDD).build()); final Formula phi1CNF = encoder.encode(phi1); assertThat(phi1CNF.holds(cnfPredicate)).isTrue(); assertThat(equivalentModels(phi1, phi1CNF, phi1.variables())).isTrue(); final Formula phi2CNF = encoder.encode(phi2); assertThat(phi2CNF.holds(cnfPredicate)).isTrue(); assertThat(equivalentModels(phi2, phi2CNF, phi2.variables())).isTrue(); final Formula phi3CNF = encoder.encode(phi3); assertThat(phi3CNF.holds(cnfPredicate)).isTrue(); assertThat(equivalentModels(phi3, phi3CNF, phi3.variables())).isTrue(); } @Test public void testAdvancedEncoder() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser p = new PropositionalParser(f); final Formula phi1 = p.parse(p1); final Formula phi2 = p.parse(p2); final Formula phi3 = p.parse(p3); final CNFEncoder encoder1 = new CNFEncoder(f, CNFConfig.builder().build()); assertThat(encoder1.encode(phi1)).isEqualTo(p.parse("(x1 | x2) & x3 & x4 & (x1 | x8 | x9) & (x5 | x8 | x9) & (~x6 | x8 | x9) & (~x7 | x8 | x9)")); final CNFEncoder encoder2 = new CNFEncoder(f, CNFConfig.builder().createdClauseBoundary(5).atomBoundary(3).build()); assertThat(encoder2.encode(phi2)).isEqualTo(p.parse("(y1 | y2) & y3 & y4 & (~@RESERVED_CNF_0 | y1) & (~@RESERVED_CNF_0 | y5) & (~@RESERVED_CNF_0 | ~y6) & (~@RESERVED_CNF_0 | ~y7) & (@RESERVED_CNF_0 | ~y1 | ~y5 | y6 | y7) & (@RESERVED_CNF_0 | y8 | y9)")); final CNFEncoder encoder3 = new CNFEncoder(f, CNFConfig.builder().createdClauseBoundary(-1).distributionBoundary(5).atomBoundary(3).build()); assertThat(encoder3.encode(phi3)).isEqualTo(p.parse("(z1 | z2) & z3 & z4 & (~@RESERVED_CNF_2 | z1) & (~@RESERVED_CNF_2 | z5) & (~@RESERVED_CNF_2 | ~z6) & (~@RESERVED_CNF_2 | ~z7) & (@RESERVED_CNF_2 | ~z1 | ~z5 | z6 | z7) & (@RESERVED_CNF_2 | z8 | z9)")); } @Test public void testStrings() { final String expected = String.format("CNFConfig{%n" + "algorithm=TSEITIN%n" + "fallbackAlgorithmForAdvancedEncoding=PLAISTED_GREENBAUM%n" + "distributedBoundary=-1%n" + "createdClauseBoundary=1000%n" + "atomBoundary=12%n" + "}%n"); final FormulaFactory f = new FormulaFactory(); final CNFConfig config = CNFConfig.builder().algorithm(CNFConfig.Algorithm.TSEITIN).fallbackAlgorithmForAdvancedEncoding(CNFConfig.Algorithm.PLAISTED_GREENBAUM).build(); final CNFEncoder encoder = new CNFEncoder(f, config); assertThat(config.toString()).isEqualTo(expected); assertThat(encoder.toString()).isEqualTo(expected); assertThat(CNFConfig.Algorithm.valueOf("TSEITIN")).isEqualTo(CNFConfig.Algorithm.TSEITIN); } @Test public void testBugIssueNo4() throws ParserException { final FormulaFactory f = new FormulaFactory(); final PropositionalParser parser = new PropositionalParser(f); final Formula f1 = parser.parse("(x10 & x9 & x3 & x12 | x10 & x9 & x8 | x9 & x8 & x12) & ~x5 & ~x7 & x1 | (x10 & x9 & x3 & x12 | x10 & x9 & x8 | x9 & x8 & x12) & ~(x11 & x3) & ~(x11 & x8) & ~x5 & ~x7 & x0"); final Formula f2 = parser.parse("x1 & x3 & x4"); final Formula f3 = parser.parse("(x10 & x9 & x3 & x12 | x10 & x9 & x8 | x9 & x8 & x12) & ~(x11 & x3) & ~(x11 & x8 & x12) & ~x5 & ~x7 & x1 | (x10 & x9 & x3 & x12 | x10 & x9 & x8 | x9 & x8 & x12) & ~(x11 & x3) & ~(x11 & x8) & ~x5 & ~x7 & x0 | x3 & x4 & ~x5 & ~x7 & x1 | x3 & x4 & ~x5 & ~x7 & x0 | x2 & x6 & ~x5 & ~x7 & x0"); final Formula f4 = parser.parse("(x1 & x3 & x4 | x0 & (x2 & x6 | x3 & x4) | x9 & (x1 & x10 & x8 & ~x12 & x3 | (x1 | x0) & (x12 & (x10 & x3 | x8) | x10 & x8) & ~x11)) & ~x5 & ~x7"); assertThat(f.not(f.equivalence(f1, f2)).cnf()).isNotEqualTo(null); assertThat(f.not(f.equivalence(f3, f4)).cnf()).isNotEqualTo(null); } @Test public void testWrongFallbackForConfig() { assertThatThrownBy(() -> CNFConfig.builder().fallbackAlgorithmForAdvancedEncoding(CNFConfig.Algorithm.FACTORIZATION).build()).isInstanceOf(IllegalArgumentException.class); } private boolean equivalentModels(final Formula f1, final Formula f2, final SortedSet<Variable> vars) { final SATSolver s = MiniSat.miniSat(f1.factory()); s.add(f1); final List<Assignment> models1 = s.enumerateAllModels(vars); s.reset(); s.add(f2); final List<Assignment> models2 = s.enumerateAllModels(vars); if (models1.size() != models2.size()) { return false; } for (final Assignment model : models1) { if (!models2.contains(model)) { return false; } } return true; } }
package org.metacsp.utility.UI; /* * JFLAP - Formal Languages and Automata Package * * * Susan H. Rodger * Computer Science Department * Duke University * August 27, 2009 * Copyright (c) 2002-2009 * All rights reserved. * JFLAP is open source software. Please see the LICENSE for terms. * */ import java.awt.FontMetrics; import java.awt.Graphics2D; import java.awt.Graphics; import java.awt.Point; import java.awt.RenderingHints; import java.awt.geom.*; import java.awt.image.BufferedImage; /** * This is a simple class for storing and drawing a curved line with possible * arrow heads on it. * * @author Thomas Finley */ public class CurvedArrow { /** * Instantiates a <CODE>CurvedArrow</CODE> object. * * @param x1 * the x coordinate of the start point * @param y1 * the y coordinate of the start point * @param x2 * the x coordinate of the end point * @param y2 * the y coordinate of the end point * @param curvy * the curvi-ness factor; 0 will create a straight line; 1 and -1 * are rather curvy */ public CurvedArrow(int x1, int y1, int x2, int y2, float curvy) { curve = new QuadCurve2D.Float(); start = new Point(); end = new Point(); control = new Point(); setStart(x1, y1); setEnd(x2, y2); setCurvy(curvy); refreshCurve(); } /** * Instantiates a <CODE>CurvedArrow</CODE> object. * * @param start * the start point * @param end * the end point * @param curvy * the curvi-ness factor; 0 will create a straight line; 1 and -1 * are rather curvy */ public CurvedArrow(Point start, Point end, float curvy) { curve = new QuadCurve2D.Float(); setStart(start); setEnd(end); control = new Point(); setCurvy(curvy); refreshCurve(); } /** * Sets the start point. * * @param x1 * the x coordinate of the start point * @param y1 * the y coordinate of the start point */ public void setStart(int x1, int y1) { start.x = x1; start.y = y1; needsRefresh = true; } /** * Sets the start point. * * @param start * the new start point */ public void setStart(Point start) { this.start = start; needsRefresh = true; } /** * Sets the end point. * * @param x2 * the x coordinate of the end point * @param y2 * the y coordinate of the end point */ public void setEnd(int x2, int y2) { end.x = x2; end.y = y2; needsRefresh = true; } /** * Sets the end point. * * @param end * the new end point */ public void setEnd(Point end) { this.end = end; needsRefresh = true; } /** * Sets the "curvy-ness" factor. * * @param curvy * the new curvy factor */ public void setCurvy(float curvy) { this.curvy = curvy; needsRefresh = true; } /** * Draws the arrow on the indicated graphics environment. * * @param g * the graphics to draw this arrow upon */ public void draw(Graphics2D g) { if (needsRefresh) refreshCurve(); g.draw(curve); // Draws the main part of the arrow. drawArrow(g, end, control); // Draws the arrow head. drawText(g); } public void drawControlPoint(Graphics2D g){ //adjust later to center of circle = focus point g.drawOval((int)curve.getCtrlX() - 5, (int)curve.getCtrlY() - 5, 10,10); } /** * Draws a highlight of the curve. * * @param g * the graphics to draw the highlight of the curve upon */ public void drawHighlight(Graphics2D g) { if (needsRefresh) refreshCurve(); Graphics2D g2 = (Graphics2D) g.create(); g2.setStroke(new java.awt.BasicStroke(6.0f)); g2.setColor(HIGHLIGHT_COLOR); g2.draw(curve); g2.transform(affineToText); g2.fill(bounds); g2.dispose(); } /** * Draws the text on the high point of the arc. The text drawn is none other * than the label for this object, as retrieved from <CODE>getLabel</CODE>. * * @param g * the graphics object to draw the text upon */ public void drawText(Graphics2D g) { // We don't want to corrupt the graphics environs with our // affine transforms! Graphics2D g2 = (Graphics2D) g.create(); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2.transform(affineToText); // What about the text label? FontMetrics metrics = g2.getFontMetrics(); bounds = metrics.getStringBounds(getLabel(), g2); // Will the label appear to be upside down? boolean upsideDown = end.x < start.x; float dx = (float) bounds.getWidth() / 2.0f; float dy = (curvy < 0.0f) ^ upsideDown ? metrics.getAscent() : -metrics .getDescent(); bounds.setRect(bounds.getX() - dx, bounds.getY() + dy, bounds .getWidth(), bounds.getHeight()); for (int i = 0; i < label.length(); i += CHARS_PER_STEP) { String sublabel = label.substring(i, Math.min(i + CHARS_PER_STEP, label.length())); g2.drawString(sublabel, -dx, dy); dx -= metrics.getStringBounds(sublabel, g2).getWidth(); } // g2.drawString(label, -dx, dy); g2.dispose(); /* * if (GRAPHICS == null) { GRAPHICS = g.create(); METRICS = * GRAPHICS.getFontMetrics(); } */ } /** * Sets the label that will be drawn on the high arc point. * * @param label * the new label for the arrow */ public void setLabel(String label) { this.label = label; // if (GRAPHICS == null) return; bounds = METRICS.getStringBounds(getLabel(), GRAPHICS); boolean upsideDown = end.x < start.x; float dx = (float) bounds.getWidth() / 2.0f; float dy = (curvy < 0.0f) ^ upsideDown ? METRICS.getAscent() : -METRICS .getDescent(); bounds.setRect(bounds.getX() - dx, bounds.getY() + dy, bounds .getWidth(), bounds.getHeight()); //System.out.println("Setting label" + label); } /** * Returns the label for this arrow. * * @return the label for this arrow */ public String getLabel() { return this.label; } /** * Draws an arrow head on the graphics object. The arrow geometry is based * on the point of its head as well as another point, which the arrow is * defined as facing away from. This arrow head has no body. * * @param g * the graphics object to draw upon * @param head * the point that is the point of the head of the arrow * @param away * the point opposite from where the arrow is pointing, a point * along the line segment extending from the head backwards from * the head if this were an arrow with a line trailing the head */ private void drawArrow(Graphics g, Point head, Point away) { int endX, endY; double angle = Math.atan2((double) (away.x - head.x), (double) (away.y - head.y)); angle += ARROW_ANGLE; endX = ((int) (Math.sin(angle) * ARROW_LENGTH)) + head.x; endY = ((int) (Math.cos(angle) * ARROW_LENGTH)) + head.y; g.drawLine(head.x, head.y, endX, endY); angle -= 2 * ARROW_ANGLE; endX = ((int) (Math.sin(angle) * ARROW_LENGTH)) + head.x; endY = ((int) (Math.cos(angle) * ARROW_LENGTH)) + head.y; g.drawLine(head.x, head.y, endX, endY); } /** * Refreshes the curve object. */ public void refreshCurve() { // System.out.println("Curve refreshing"); needsRefresh = false; double lengthx = end.x - start.x; double lengthy = end.y - start.y; double centerx = ((double) (start.x + end.x)) / 2.0; double centery = ((double) (start.y + end.y)) / 2.0; double length = Math.sqrt(lengthx * lengthx + lengthy * lengthy); double factorx = length == 0.0 ? 0.0 : lengthx / length; double factory = length == 0.0 ? 0.0 : lengthy / length; control.x = (int) (centerx + curvy * HEIGHT * factory); control.y = (int) (centery - curvy * HEIGHT * factorx); high.x = (int) (centerx + curvy * HEIGHT * factory / 2.0); high.y = (int) (centery - curvy * HEIGHT * factorx / 2.0); curve.setCurve((float) start.x, (float) start.y, (float) control.x, (float) control.y, (float) end.x, (float) end.y); affineToText = new AffineTransform(); affineToText.translate(high.x, high.y); affineToText.rotate(Math.atan2(lengthy, lengthx)); if (end.x < start.x) affineToText.rotate(Math.PI); } /** * Returns the bounds. * * @return the rectangular bounds for this curved arrow */ public Rectangle2D getBounds() { if (needsRefresh) refreshCurve(); Rectangle2D b = curve.getBounds(); Area area = new Area(bounds); area.transform(affineToText); b.add(area.getBounds()); return b; } /** * Determines if a point is on/near the curved arrow. * * @param point * the point to check * @param fudge * the radius around the point that should be checked for the * presence of the curve * @return <TT>true</TT> if the point is on the curve within a certain * fudge factor, <TT>false</TT> otherwise */ public boolean isNear(Point point, int fudge) { if (needsRefresh) refreshCurve(); try { if (bounds.contains(affineToText.inverseTransform(point, null))) return true; } catch (java.awt.geom.NoninvertibleTransformException e) { } catch (NullPointerException e) { System.err.println(e + " : " + bounds + " : " + affineToText); return false; } return intersects(point, fudge, curve); } /** * Checks if something is on the line. If it appears to be, then it * subdivides the curve into halves and tries again recursively until the * flatness of the curve is less than the fudge. Frankly, I am a fucking * genius. I am one of two people in this department that could have * possibly thought of this. * * @param point * the point to check intersection * @param fudge * the "fudge" factor * @param c * the curve we're checking for intersection with * @return <TT>true</TT> if the point is on the curve within a certain * fudge factor, <TT>false</TT> otherwise */ private boolean intersects(Point point, int fudge, QuadCurve2D.Float c) { if (!c.intersects(point.x - fudge, point.y - fudge, fudge << 1, fudge << 1)) return false; if (c.getFlatness() < fudge) return true; QuadCurve2D.Float f1 = new QuadCurve2D.Float(), f2 = new QuadCurve2D.Float(); c.subdivide(f1, f2); return intersects(point, fudge, f1) || intersects(point, fudge, f2); } public QuadCurve2D getCurve(){ return curve; } /** The start, end, and single control points. */ protected Point start, end, control; /** The high point of the arc. */ private Point high = new Point(); /** The "curvy-ness" factor. */ protected float curvy; /** * The quad-curve that controls the shape of the long part of the arrow. */ protected QuadCurve2D.Float curve; /** * <CODE>true</CODE> if the curve needs to be refreshed, <CODE>false</CODE> * otherwise. */ protected boolean needsRefresh = true; /** Arrow flags. */ protected boolean startArrow = false, endArrow = false; /** The label for this arrow. */ protected String label = ""; /** The angle for the arrow heads. */ private static double ARROW_ANGLE = Math.PI / 10; /** The length of the arrow head edges. */ private static double ARROW_LENGTH = 15; /** The affine transform for "turning" text. */ private static AffineTransform AFFINE_TURN_180; /** The stored bounds. */ protected java.awt.geom.Rectangle2D bounds = new java.awt.Rectangle(0, 0); /** The affine transform for getting us to the text space. */ protected AffineTransform affineToText; /** The number of characters to draw in each step. */ private static final int CHARS_PER_STEP = 4; /** A graphics object. */ protected static Graphics GRAPHICS = null; /** A font metrics object. */ protected static FontMetrics METRICS; static { AFFINE_TURN_180 = new AffineTransform(); AFFINE_TURN_180.rotate(Math.PI); BufferedImage image = new BufferedImage(1, 1, BufferedImage.TYPE_INT_RGB); GRAPHICS = image.getGraphics(); METRICS = GRAPHICS.getFontMetrics(); } /** The high factor of a control point. */ private static double HEIGHT = 30.0; public static java.awt.Color HIGHLIGHT_COLOR = new java.awt.Color(255, 0, 0, 128); }
package net.eleritec.fractalui; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import net.eleritec.fractalui.events.AbstractUITopic; import net.eleritec.fractalui.validation.UIValidationHelper; import net.eleritec.fractalui.validation.Validator; /** * Created by IntelliJ IDEA. * User: cbutler * Date: Mar 31, 2008 * Time: 12:58:03 PM * To change this template use File | Settings | File Templates. */ public abstract class AbstractViewController<T> { private ViewSnapshot<T> snapshot; private AbstractViewController<?> parent; private List<AbstractViewController<?>> children; private boolean ready; private ControllerLibrary library; private T dataModel; private ArrayList<AbstractViewPresenter<T>> presenters; private Set<AbstractUITopic> topics; private Validator<T> validator; private UIValidationHelper<T> validationHelper; protected abstract void initializeTopics(); protected abstract void initializeListeners(); protected AbstractViewController() { setValidationHelper(null); children = new ArrayList<AbstractViewController<?>>(0); presenters = new ArrayList<AbstractViewPresenter<T>>(); topics = new HashSet<AbstractUITopic>(); initializeTopics(); } public boolean isReady() { return ready; } public void setReady(boolean ready) { if(this.ready!=ready) { this.ready = ready; if(ready) { initializeListeners(); } } } public Set<AbstractUITopic> getTopics() { return topics; } public void setTopics(Set<AbstractUITopic> topics) { this.topics = topics; } public ControllerLibrary getLibrary() { return library; } public void setLibrary(ControllerLibrary library) { this.library = library; } public T getDataModel() { return dataModel; } public boolean isValidModel(T model) { return true; } public void setDataModel(T dataModel) { if(dataModel==null || isValidModel(dataModel)) { if(dataModel!=this.dataModel) { notifyModelChangePending(); } this.dataModel = dataModel; dataModelUpdated(); fireModelChanged(this); } } protected void notifyModelChangePending() { if(dataModel!=null) { cleanupDataModel(); } } protected void cleanupDataModel() { } protected void dataModelUpdated() { if(getDataModel()!=null) { setupDataModel(); } } protected void setupDataModel() { } protected void addTopics(AbstractUITopic...topics) { for(AbstractUITopic topic: topics) { addTopic(topic); } } protected void addTopic(AbstractUITopic topic) { if(topic!=null) { topics.add(topic); } } public void addView(AbstractView<T> view) { AbstractViewPresenter<T> presenter = view.getViewPresenter(); if(presenter.setController(this)) { presenters.add(presenter); } } public void removeView(AbstractView<T> view) { AbstractViewPresenter<T> presenter = view.getViewPresenter(); presenters.remove(presenter); } public List<AbstractViewPresenter<T>> getViewPresenters() { return presenters; } public void repaint() { for(AbstractViewPresenter<T> presenter: presenters) { presenter.repaint(); } } public void refreshView() { for(AbstractViewPresenter<T> presenter: presenters) { presenter.refreshView(); } } public AbstractUITopic getTopic(Class<?>...interfaces) { Set<AbstractUITopic> topics = getTopics(interfaces); return topics.size()==0? null: topics.iterator().next(); } public Set<AbstractUITopic> getTopics(Class<?>...interfaces) { Set<AbstractUITopic> matches = new HashSet<AbstractUITopic>(); for(AbstractUITopic topic: topics) { if(topic.isImplementor(interfaces)) { matches.add(topic); } } return matches; } public void fireModelChanged(Object source) { List<AbstractViewPresenter<T>> presenters = getViewPresenters(); for(AbstractViewPresenter<T> presenter: presenters) { presenter.onModelChanged(source); } } public void addChild(AbstractViewController<?> controller) { if(controller!=null && !children.contains(controller)) { children.add(controller); controller.parent = this; } } public void removeChild(AbstractViewController<?> controller) { if(controller!=null && children.contains(controller)) { children.remove(controller); controller.parent = null; } } public List<AbstractViewController<?>> getChildren() { return new ArrayList<AbstractViewController<?>>(children); } public AbstractViewController<?> getParent() { return parent; } public ViewSnapshot<T> getSnapshot() { if(snapshot==null) { snapshot = new ViewSnapshot<T>(this); } return snapshot; } protected List<? extends AbstractViewPresenter<T>> getViewPresenters(Class<?>...types) { return getSnapshot().getPresenters(types); } protected AbstractViewController<?> getSubcontroller(String name) { return library==null? null: library.getController(name); } protected Object getPresenterDispatcher(Class<?>...types) { List<? extends AbstractViewPresenter<T>> presenters = getViewPresenters(types); // if there's only one presenter, then we don't need to hide behind // a proxy if(presenters.size()==1) { return presenters.get(0); } // weed out the non-interface classes before we create our proxy List<Class<?>> interfaces = new ArrayList<Class<?>>(); for(Class<?> type: types) { if(type.isInterface()) { interfaces.add(type); } } Class<?>[] listenerTypes = interfaces.toArray(new Class[0]); return AbstractUITopic.createTopic(presenters, listenerTypes); } public Validator<T> getValidator() { return validator; } public void setValidator(Validator<T> validator) { this.validator = validator; } public void reportMessages(List<Object> messages) { AbstractView<T> view = getSnapshot().getView(); if(view!=null) { view.reportMessages(messages); } } public boolean validate() { return validationHelper.validate(); } public UIValidationHelper<T> getValidationHelper() { return validationHelper; } public void setValidationHelper(UIValidationHelper<T> validationHelper) { this.validationHelper = validationHelper==null? createDefaultValidationHelper(): validationHelper; this.validationHelper.setController(this); } protected UIValidationHelper<T> createDefaultValidationHelper() { return new UIValidationHelper<T>(this); } }
/* * Copyright 2017 Data Minded * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package be.dataminded.nifi.plugins.util; import org.apache.avro.Schema; import org.apache.avro.SchemaBuilder; import org.apache.avro.SchemaBuilder.FieldAssembler; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.DatumWriter; import org.apache.commons.lang3.StringUtils; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; import java.sql.*; import static java.sql.Types.*; /** * JDBC / SQL common functions. */ public class JdbcCommon { private static final int MAX_DIGITS_IN_BIGINT = 19; public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, boolean convertNames) throws SQLException, IOException { return convertToAvroStream(rs, outStream, null, null, convertNames); } public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, boolean convertNames) throws SQLException, IOException { return convertToAvroStream(rs, outStream, recordName, null, convertNames); } public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, boolean convertNames) throws IOException, SQLException { return convertToAvroStream(rs, outStream, recordName, callback, 0, convertNames); } public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); } for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if(value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" try { int scale = meta.getScale(i); BigDecimal bigDecimal = ((BigDecimal) value); if (scale == 0) { if (meta.getPrecision(i) < 10) { rec.put(i - 1, bigDecimal.intValue()); } else { rec.put(i - 1, bigDecimal.longValue()); } } else { rec.put(i - 1, bigDecimal.doubleValue()); } } catch (Exception e) { rec.put(i - 1, value.toString()); } } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } } public static Schema createSchema(final ResultSet rs) throws SQLException { return createSchema(rs, null, false); } /** * Creates an Avro schema from a result set. If the table/record name is known a priori and provided, use that as a * fallback for the record name if it cannot be retrieved from the result set, and finally fall back to a default value. * * @param rs The result set to convert to Avro * @param recordName The a priori record name to use if it cannot be determined from the result set. * @return A Schema object representing the result set converted to an Avro record * @throws SQLException if any error occurs during conversion */ public static Schema createSchema(final ResultSet rs, String recordName, boolean convertNames) throws SQLException { final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); String tableName = StringUtils.isEmpty(recordName) ? "NiFi_ExecuteSQL_Record" : recordName; if (nrOfColumns > 0) { String tableNameFromMeta = meta.getTableName(1); if (!StringUtils.isBlank(tableNameFromMeta)) { tableName = tableNameFromMeta; } } if (convertNames) { tableName = normalizeNameForAvro(tableName); } final FieldAssembler<Schema> builder = SchemaBuilder.record(tableName).namespace("any.data").fields(); /** * Some missing Avro types - Decimal, Date types. May need some additional work. */ for (int i = 1; i <= nrOfColumns; i++) { /** * as per jdbc 4 specs, getColumnLabel will have the alias for the column, if not it will have the column name. * so it may be a better option to check for columnlabel first and if in case it is null is someimplementation, * check for alias. Postgres is the one that has the null column names for calculated fields. */ String nameOrLabel = StringUtils.isNotEmpty(meta.getColumnLabel(i)) ? meta.getColumnLabel(i) :meta.getColumnName(i); String columnName = convertNames ? normalizeNameForAvro(nameOrLabel) : nameOrLabel; switch (meta.getColumnType(i)) { case CHAR: case LONGNVARCHAR: case LONGVARCHAR: case NCHAR: case NVARCHAR: case VARCHAR: case CLOB: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion().noDefault(); break; case BIT: case BOOLEAN: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().booleanType().endUnion().noDefault(); break; case INTEGER: if (meta.isSigned(i)) { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion().noDefault(); } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType().endUnion().noDefault(); } break; case SMALLINT: case TINYINT: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion().noDefault(); break; case BIGINT: // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // If the precision > 19 (or is negative), use a string for the type, otherwise use a long. The object(s) will be converted // to strings as necessary int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion().noDefault(); } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType().endUnion().noDefault(); } break; // java.sql.RowId is interface, is seems to be database // implementation specific, let's convert to String case ROWID: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion().noDefault(); break; case FLOAT: case REAL: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().floatType().endUnion().noDefault(); break; case DOUBLE: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().doubleType().endUnion().noDefault(); break; // Did not find direct suitable type, need to be clarified!!!! case DECIMAL: case NUMERIC: int scale = meta.getScale(i); if (scale == 0) { if (meta.getPrecision(i) < 10) { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().intType().endUnion().noDefault(); } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().longType().endUnion().noDefault(); } } else { builder.name(columnName).type().unionOf().nullBuilder().endNull().and().doubleType().endUnion().noDefault(); } break; // Did not find direct suitable type, need to be clarified!!!! case DATE: case TIME: case TIMESTAMP: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().stringType().endUnion().noDefault(); break; case BINARY: case VARBINARY: case LONGVARBINARY: case ARRAY: case BLOB: builder.name(columnName).type().unionOf().nullBuilder().endNull().and().bytesType().endUnion().noDefault(); break; default: throw new IllegalArgumentException("createSchema: Unknown SQL type " + meta.getColumnType(i) + " cannot be converted to Avro type"); } } return builder.endRecord(); } private static String normalizeNameForAvro(String inputName) { String normalizedName = inputName.replaceAll("[^A-Za-z0-9_]", "_"); if (Character.isDigit(normalizedName.charAt(0))) { normalizedName = "_" + normalizedName; } return normalizedName; } /** * An interface for callback methods which allows processing of a row during the convertToAvroStream() processing. * <b>IMPORTANT:</b> This method should only work on the row pointed at by the current ResultSet reference. * Advancing the cursor (e.g.) can cause rows to be skipped during Avro transformation. */ public interface ResultSetRowCallback { void processRow(ResultSet resultSet) throws IOException; } }
package com.rbmhtechnology.vind.solr.backend; import com.google.common.collect.Sets; import com.rbmhtechnology.vind.api.query.filter.FieldBasedFilter; import com.rbmhtechnology.vind.api.query.filter.Filter; import com.rbmhtechnology.vind.model.DocumentFactory; import com.rbmhtechnology.vind.model.FieldDescriptor; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.stream.Collectors; import static com.rbmhtechnology.vind.api.query.filter.Filter.*; import static com.rbmhtechnology.vind.solr.backend.SolrUtils.Fieldname.TYPE; /** * Created on 10.08.18. */ public class ChildrenFilterSerializer { private static Logger log = LoggerFactory.getLogger(SolrFilterSerializer.class); private final DocumentFactory parentFactory; private final boolean strict; private final String TYPE_FILTER = "%s:%s"; private final String CHILD_QUERY_TEMPLATE = "{!parent which='%s:%s' v='%s AND %s'}"; private String searchContext; private final DocumentFactory childFactory; private final boolean childrenSearch; public ChildrenFilterSerializer(DocumentFactory parentFactory, DocumentFactory childFactory, String searchContext, boolean strict, boolean childrenSearch){ this.parentFactory = parentFactory; this.childFactory = childFactory; this.searchContext = searchContext; this.strict = strict; this.childrenSearch = childrenSearch; } public String serialize(Filter filter){ final Filter normalizedFilter = normalize(filter); if (AndFilter.class.isAssignableFrom(normalizedFilter.getClass())) return serialize((AndFilter)normalizedFilter); else if (OrFilter.class.isAssignableFrom(normalizedFilter.getClass())) return serialize((OrFilter)normalizedFilter); else if (isHierarchicalFilter(normalizedFilter)) { final String parentFilter = new SolrFilterSerializer(parentFactory, strict).serialize(normalizedFilter,searchContext); return parentFilter; } else { final String childFilter = new SolrFilterSerializer(childFactory, strict).serialize(normalizedFilter,searchContext); return String.format(CHILD_QUERY_TEMPLATE, TYPE, parentFactory.getType(), String.format(TYPE_FILTER, TYPE, childFactory.getType()), childFilter); } } private String serialize(AndFilter filter){ //Get the filters which apply to the parent final String parentFilters = filter.getChildren().stream() .filter(f -> isHierarchicalFilter(f)) .map( f -> new SolrFilterSerializer(parentFactory, strict).serialize(f,searchContext)) .filter(Objects::nonNull) .collect(Collectors.joining(" AND ")); //Get the filter which apply to the children final String childrenFilters = filter.getChildren().stream() .filter(f -> !isHierarchicalFilter(f)) .map( f -> new SolrFilterSerializer(childFactory, strict).serialize(f,searchContext)) .filter(Objects::nonNull) .collect(Collectors.joining(" AND ")); if (StringUtils.isNotBlank(childrenFilters) && StringUtils.isNotBlank(parentFilters)){ return String.join(" AND ", String.format(TYPE_FILTER, TYPE, parentFactory.getType()), parentFilters, String.format(CHILD_QUERY_TEMPLATE, TYPE, parentFactory.getType(), String.format(TYPE_FILTER, TYPE, childFactory.getType()), childrenFilters)); } if ( StringUtils.isNotBlank(parentFilters)){ return String.join(" AND ", String.format(TYPE_FILTER, TYPE, parentFactory.getType()), parentFilters); } return String.format(CHILD_QUERY_TEMPLATE, TYPE, parentFactory.getType(), String.format(TYPE_FILTER, TYPE, childFactory.getType()), childrenFilters); } private String serialize(OrFilter filter) { final String andFilters = filter.getChildren().stream() .filter( f-> AndFilter.class.isAssignableFrom(f.getClass())) .filter(Objects::nonNull) .map(this::serialize) .map( andF -> "(" + andF +" )") .collect(Collectors.joining(" OR ")); final String basicFilters = filter.getChildren().stream() .filter( f-> !AndFilter.class.isAssignableFrom(f.getClass())) .filter(Objects::nonNull) .map(this::serialize) .collect(Collectors.joining(" OR ")); if (StringUtils.isNotBlank(basicFilters) && StringUtils.isNotBlank(andFilters)){ return String.join(" OR ", basicFilters, andFilters); } if ( StringUtils.isNotBlank(andFilters)){ return andFilters; } return basicFilters; } private Filter normalize(AndFilter filter){ final Set<Filter> normalizedFilters = filter.getChildren().stream() .map(this::normalize) .collect(Collectors.toSet()); //Get the basic filters already normalized final Set<Filter> normalizedChildren = normalizedFilters.stream() .filter(f -> !f.getType().equals(filter.getType()) && !f.getType().equals("OrFilter")) .collect(Collectors.toSet()); //Get the And filters and promote the children to this level normalizedFilters.stream(). filter(f -> f.getType().equals(filter.getType())) .forEach( af -> normalizedChildren.addAll(((AndFilter)af).getChildren())); final Set<Filter> orChildren = normalizedFilters.stream(). filter(f -> f.getType().equals("OrFilter")) .map( of -> normalize((OrFilter) of)) .collect(Collectors.toSet()); final Filter orFilterPivot = orChildren.stream() .findFirst() .orElse(null); if(Objects.nonNull(orFilterPivot)) { orChildren.remove(orFilterPivot); final Set<Filter> andResultFilters= ((OrFilter) orFilterPivot).getChildren().stream() .map( f-> AndFilter.fromSet(Sets.union( normalizedChildren, Sets.newHashSet(f)))) .map( af -> AndFilter.fromSet(Sets.union( orChildren, Sets.newHashSet(af)))) .collect(Collectors.toSet()); final Set<Filter> andResultNormalizedFilters = andResultFilters.stream() .map(f -> normalize(f)) .collect(Collectors.toSet()); if(CollectionUtils.isNotEmpty(andResultNormalizedFilters)) { return OrFilter.fromSet(andResultNormalizedFilters); } } if(CollectionUtils.isNotEmpty(orChildren)) { return OrFilter.fromSet(orChildren); } return AndFilter.fromSet(normalizedChildren); } private Filter normalize(OrFilter filter){ final List<Filter> normalizedFilters = filter.getChildren().stream() .map(this::normalize) .collect(Collectors.toList()); //Get the basic filters already normalized final Set<Filter> normalizedChildren = normalizedFilters.stream() .filter(f -> !f.getType().equals(filter.getType()) && !f.getType().equals("AndFilter")) .collect(Collectors.toSet()); normalizedFilters.stream(). filter(f -> f.getType().equals(filter.getType())) .forEach( of -> normalizedChildren.addAll(((OrFilter)of).getChildren())); normalizedFilters.stream(). filter( f -> f.getType().equals("AndFilter")) .forEach( f -> normalizedChildren.add(f)); return OrFilter.fromSet(normalizedChildren); } private Filter normalize(NotFilter filter){ final Filter normalizedDelegate = normalize(filter.getDelegate()); if(AndFilter.class.isAssignableFrom(normalizedDelegate.getClass())) { return OrFilter.fromSet(((AndFilter)normalizedDelegate).getChildren().stream() .map(f -> new NotFilter(f)) .collect(Collectors.toSet())); } else if(OrFilter.class.isAssignableFrom(normalizedDelegate.getClass())) { return AndFilter.fromSet(((OrFilter)normalizedDelegate).getChildren().stream() .map(f -> new NotFilter(f)) .collect(Collectors.toSet())); } else if(NotFilter.class.isAssignableFrom(normalizedDelegate.getClass())) { return ((NotFilter)normalizedDelegate).getDelegate(); } return filter; } protected Filter normalize(Filter filter){ if (AndFilter.class.isAssignableFrom(filter.getClass())) return normalize((AndFilter)filter); else if (OrFilter.class.isAssignableFrom(filter.getClass())) return normalize((OrFilter)filter); else if (NotFilter.class.isAssignableFrom(filter.getClass())) return normalize((NotFilter) filter); else return filter; } private boolean isHierarchicalFilter(Filter filter) { if(filter instanceof FieldBasedFilter) return isHierarchicalField(((FieldBasedFilter) filter).getField()); if(filter instanceof Filter.NotFilter) return isHierarchicalFilter((((NotFilter) filter).getDelegate())); if(filter instanceof Filter.ChildrenDocumentFilter) return true; throw new RuntimeException("Error parsing filter: Filter '" + filter.getClass() + "' not supported!"); } private boolean isHierarchicalField(String fieldName) { if(Objects.nonNull(this.childFactory)){ final FieldDescriptor parentDescriptor = this.parentFactory.getField(fieldName); final FieldDescriptor childDescriptor = this.childFactory.getField(fieldName); //Check if the field descriptor belongs to the parent and not to the children in a children search if(Objects.nonNull(parentDescriptor) && Objects.isNull(childDescriptor) && childrenSearch){ log.debug("The field [{}] is a parent property", fieldName); return true; } //Check if the field belongs to the parent in a parent search if(Objects.nonNull(parentDescriptor) && !childrenSearch) { log.debug("The field [{}] is a parent property", fieldName); return true; } return false; } log.debug("There is no Children factory define therefore field [{}] is a parent property", fieldName); return true; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.vcs.log.data.index; import consulo.disposer.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.progress.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Condition; import consulo.disposer.Disposer; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.EmptyConsumer; import com.intellij.util.Processor; import com.intellij.util.ThrowableRunnable; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.indexing.StorageException; import com.intellij.util.io.*; import com.intellij.vcs.log.*; import com.intellij.vcs.log.data.*; import com.intellij.vcs.log.impl.FatalErrorHandler; import com.intellij.vcs.log.ui.filter.VcsLogTextFilterImpl; import com.intellij.vcs.log.util.PersistentSet; import com.intellij.vcs.log.util.PersistentSetImpl; import com.intellij.vcs.log.util.StopWatch; import com.intellij.vcs.log.util.TroveUtil; import consulo.logging.Logger; import consulo.util.collection.primitive.ints.IntSet; import consulo.util.collection.primitive.ints.IntSets; import gnu.trove.TIntHashSet; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.IntStream; import static com.intellij.vcs.log.data.index.VcsLogFullDetailsIndex.INDEX; import static com.intellij.vcs.log.util.PersistentUtil.*; public class VcsLogPersistentIndex implements VcsLogIndex, Disposable { private static final Logger LOG = Logger.getInstance(VcsLogPersistentIndex.class); private static final int VERSION = 0; @Nonnull private final Project myProject; @Nonnull private final FatalErrorHandler myFatalErrorsConsumer; @Nonnull private final VcsLogProgress myProgress; @Nonnull private final Map<VirtualFile, VcsLogProvider> myProviders; @Nonnull private final VcsLogStorage myHashMap; @Nonnull private final VcsUserRegistryImpl myUserRegistry; @Nonnull private final Set<VirtualFile> myRoots; @Nullable private final MyIndexStorage myIndexStorage; @Nonnull private final SingleTaskController<IndexingRequest, Void> mySingleTaskController = new MySingleTaskController(); @Nonnull private final Map<VirtualFile, AtomicInteger> myNumberOfTasks = ContainerUtil.newHashMap(); @Nonnull private Map<VirtualFile, TIntHashSet> myCommitsToIndex = new HashMap<>(); public VcsLogPersistentIndex(@Nonnull Project project, @Nonnull VcsLogStorage hashMap, @Nonnull VcsLogProgress progress, @Nonnull Map<VirtualFile, VcsLogProvider> providers, @Nonnull FatalErrorHandler fatalErrorsConsumer, @Nonnull Disposable disposableParent) { myHashMap = hashMap; myProject = project; myProgress = progress; myProviders = providers; myFatalErrorsConsumer = fatalErrorsConsumer; myRoots = ContainerUtil.newLinkedHashSet(); for (Map.Entry<VirtualFile, VcsLogProvider> entry : providers.entrySet()) { if (VcsLogProperties.get(entry.getValue(), VcsLogProperties.SUPPORTS_INDEXING)) { myRoots.add(entry.getKey()); } } myUserRegistry = (VcsUserRegistryImpl)ServiceManager.getService(myProject, VcsUserRegistry.class); myIndexStorage = createIndexStorage(fatalErrorsConsumer, calcLogId(myProject, providers)); for (VirtualFile root : myRoots) { myNumberOfTasks.put(root, new AtomicInteger()); } Disposer.register(disposableParent, this); } protected MyIndexStorage createIndexStorage(@Nonnull FatalErrorHandler fatalErrorHandler, @Nonnull String logId) { try { return IOUtil.openCleanOrResetBroken(() -> new MyIndexStorage(logId, myUserRegistry, myRoots, fatalErrorHandler, this), () -> MyIndexStorage.cleanup(logId)); } catch (IOException e) { myFatalErrorsConsumer.consume(this, e); } return null; } public static int getVersion() { return VcsLogStorageImpl.VERSION + VERSION; } @Override public synchronized void scheduleIndex(boolean full) { if (myCommitsToIndex.isEmpty()) return; Map<VirtualFile, TIntHashSet> commitsToIndex = myCommitsToIndex; for (VirtualFile root : commitsToIndex.keySet()) { myNumberOfTasks.get(root).incrementAndGet(); } myCommitsToIndex = ContainerUtil.newHashMap(); mySingleTaskController.request(new IndexingRequest(commitsToIndex, full)); } private void storeDetail(@Nonnull VcsFullCommitDetails detail) { if (myIndexStorage == null) return; try { int index = myHashMap.getCommitIndex(detail.getId(), detail.getRoot()); myIndexStorage.messages.put(index, detail.getFullMessage()); myIndexStorage.trigrams.update(index, detail); myIndexStorage.users.update(index, detail); myIndexStorage.paths.update(index, detail); myIndexStorage.commits.put(index); } catch (IOException e) { myFatalErrorsConsumer.consume(this, e); } } private void flush() { try { if (myIndexStorage != null) { myIndexStorage.messages.force(); myIndexStorage.trigrams.flush(); myIndexStorage.users.flush(); myIndexStorage.paths.flush(); myIndexStorage.commits.flush(); } } catch (StorageException e) { myFatalErrorsConsumer.consume(this, e); } } public void markCorrupted() { if (myIndexStorage != null) myIndexStorage.commits.markCorrupted(); } @Override public boolean isIndexed(int commit) { try { return myIndexStorage == null || myIndexStorage.commits.contains(commit); } catch (IOException e) { myFatalErrorsConsumer.consume(this, e); } return false; } @Override public synchronized boolean isIndexed(@Nonnull VirtualFile root) { return myRoots.contains(root) && (!myCommitsToIndex.containsKey(root) && myNumberOfTasks.get(root).get() == 0); } @Override public synchronized void markForIndexing(int index, @Nonnull VirtualFile root) { if (isIndexed(index) || !myRoots.contains(root)) return; TIntHashSet set = myCommitsToIndex.get(root); if (set == null) { set = new TIntHashSet(); myCommitsToIndex.put(root, set); } set.add(index); } @Nonnull private <T> IntSet filter(@Nonnull PersistentMap<Integer, T> map, @Nonnull Condition<T> condition) { IntSet result = IntSets.newHashSet(); if (myIndexStorage == null) return result; try { Processor<Integer> processor = integer -> { try { T value = map.get(integer); if (value != null) { if (condition.value(value)) { result.add(integer); } } } catch (IOException e) { myFatalErrorsConsumer.consume(this, e); return false; } return true; }; if (myIndexStorage.messages instanceof PersistentHashMap) { ((PersistentHashMap<Integer, T>)myIndexStorage.messages).processKeysWithExistingMapping(processor); } else { myIndexStorage.messages.processKeys(processor); } } catch (IOException e) { myFatalErrorsConsumer.consume(this, e); } return result; } @Nonnull private IntSet filterUsers(@Nonnull Set<VcsUser> users) { if (myIndexStorage != null) { try { return myIndexStorage.users.getCommitsForUsers(users); } catch (IOException | StorageException e) { myFatalErrorsConsumer.consume(this, e); } catch (RuntimeException e) { processRuntimeException(e); } } return IntSets.newHashSet(); } @Nonnull private IntSet filterPaths(@Nonnull Collection<FilePath> paths) { if (myIndexStorage != null) { try { return myIndexStorage.paths.getCommitsForPaths(paths); } catch (IOException | StorageException e) { myFatalErrorsConsumer.consume(this, e); } catch (RuntimeException e) { processRuntimeException(e); } } return IntSets.newHashSet(); } @Nonnull public IntSet filterMessages(@Nonnull VcsLogTextFilter filter) { if (myIndexStorage != null) { try { if (!filter.isRegex()) { IntSet commitsForSearch = myIndexStorage.trigrams.getCommitsForSubstring(filter.getText()); if (commitsForSearch != null) { IntSet result = IntSets.newHashSet(); PrimitiveIterator.OfInt iterator = commitsForSearch.iterator(); while (iterator.hasNext()) { int commit = iterator.nextInt(); try { String value = myIndexStorage.messages.get(commit); if (value != null) { if (VcsLogTextFilterImpl.matches(filter, value)) { result.add(commit); } } } catch (IOException e) { myFatalErrorsConsumer.consume(this, e); break; } } return result; } } } catch (StorageException e) { myFatalErrorsConsumer.consume(this, e); } catch (RuntimeException e) { processRuntimeException(e); } return filter(myIndexStorage.messages, message -> VcsLogTextFilterImpl.matches(filter, message)); } return IntSet.of(); } private void processRuntimeException(@Nonnull RuntimeException e) { if (myIndexStorage != null) myIndexStorage.markCorrupted(); if (e.getCause() instanceof IOException || e.getCause() instanceof StorageException) { myFatalErrorsConsumer.consume(this, e); } else { throw new RuntimeException(e); } } @Override public boolean canFilter(@Nonnull List<VcsLogDetailsFilter> filters) { if (filters.isEmpty() || myIndexStorage == null) return false; for (VcsLogDetailsFilter filter : filters) { if (filter instanceof VcsLogTextFilter || filter instanceof VcsLogUserFilter || filter instanceof VcsLogStructureFilter) { continue; } return false; } return true; } @Override @Nonnull public Set<Integer> filter(@Nonnull List<VcsLogDetailsFilter> detailsFilters) { VcsLogTextFilter textFilter = ContainerUtil.findInstance(detailsFilters, VcsLogTextFilter.class); VcsLogUserFilter userFilter = ContainerUtil.findInstance(detailsFilters, VcsLogUserFilter.class); VcsLogStructureFilter pathFilter = ContainerUtil.findInstance(detailsFilters, VcsLogStructureFilter.class); IntSet filteredByMessage = null; if (textFilter != null) { filteredByMessage = filterMessages(textFilter); } IntSet filteredByUser = null; if (userFilter != null) { Set<VcsUser> users = ContainerUtil.newHashSet(); for (VirtualFile root : myRoots) { users.addAll(userFilter.getUsers(root)); } filteredByUser = filterUsers(users); } IntSet filteredByPath = null; if (pathFilter != null) { filteredByPath = filterPaths(pathFilter.getFiles()); } return TroveUtil.intersect(filteredByMessage, filteredByPath, filteredByUser); } @Nullable @Override public String getFullMessage(int index) { if (myIndexStorage != null) { try { return myIndexStorage.messages.get(index); } catch (IOException e) { myFatalErrorsConsumer.consume(this, e); } } return null; } @Override public void dispose() { } private static class MyIndexStorage { private static final String COMMITS = "commits"; private static final String MESSAGES = "messages"; private static final int MESSAGES_VERSION = 0; @Nonnull private final PersistentSet<Integer> commits; @Nonnull private final PersistentMap<Integer, String> messages; @Nonnull private final VcsLogMessagesTrigramIndex trigrams; @Nonnull private final VcsLogUserIndex users; @Nonnull private final VcsLogPathsIndex paths; private static final String INPUTS = "inputs"; public MyIndexStorage(@Nonnull String logId, @Nonnull VcsUserRegistryImpl userRegistry, @Nonnull Set<VirtualFile> roots, @Nonnull FatalErrorHandler fatalErrorHandler, @Nonnull Disposable parentDisposable) throws IOException { Disposable disposable = Disposable.newDisposable(); Disposer.register(parentDisposable, disposable); try { int version = getVersion(); File commitsStorage = getStorageFile(INDEX, COMMITS, logId, version, true); commits = new PersistentSetImpl<>(commitsStorage, EnumeratorIntegerDescriptor.INSTANCE, Page.PAGE_SIZE, null, version); Disposer.register(disposable, () -> catchAndWarn(commits::close)); File messagesStorage = getStorageFile(INDEX, MESSAGES, logId, VcsLogStorageImpl.VERSION + MESSAGES_VERSION, true); messages = new PersistentHashMap<>(messagesStorage, new IntInlineKeyDescriptor(), EnumeratorStringDescriptor.INSTANCE, Page.PAGE_SIZE); Disposer.register(disposable, () -> catchAndWarn(messages::close)); trigrams = new VcsLogMessagesTrigramIndex(logId, fatalErrorHandler, disposable); users = new VcsLogUserIndex(logId, userRegistry, fatalErrorHandler, disposable); paths = new VcsLogPathsIndex(logId, roots, fatalErrorHandler, disposable); } catch (Throwable t) { Disposer.dispose(disposable); throw t; } // cleanup of old index storage files // to remove after 2017.1 release cleanupOldStorageFile(MESSAGES, logId); cleanupOldStorageFile(INDEX + "-" + VcsLogMessagesTrigramIndex.TRIGRAMS, logId); cleanupOldStorageFile(INDEX + "-no-" + VcsLogMessagesTrigramIndex.TRIGRAMS, logId); cleanupOldStorageFile(INDEX + "-" + INPUTS + "-" + VcsLogMessagesTrigramIndex.TRIGRAMS, logId); cleanupOldStorageFile(INDEX + "-" + VcsLogPathsIndex.PATHS, logId); cleanupOldStorageFile(INDEX + "-no-" + VcsLogPathsIndex.PATHS, logId); cleanupOldStorageFile(INDEX + "-" + VcsLogPathsIndex.PATHS + "-ids", logId); cleanupOldStorageFile(INDEX + "-" + INPUTS + "-" + VcsLogPathsIndex.PATHS, logId); cleanupOldStorageFile(INDEX + "-" + VcsLogUserIndex.USERS, logId); cleanupOldStorageFile(INDEX + "-" + INPUTS + "-" + VcsLogUserIndex.USERS, logId); } void markCorrupted() { catchAndWarn(commits::markCorrupted); } private static void catchAndWarn(@Nonnull ThrowableRunnable<IOException> runnable) { try { runnable.run(); } catch (IOException e) { LOG.warn(e); } } private static void cleanup(@Nonnull String logId) { if (!cleanupStorageFiles(INDEX, logId)) { LOG.error("Could not clean up storage files in " + new File(LOG_CACHE, INDEX) + " starting with " + logId); } } } private class MySingleTaskController extends SingleTaskController<IndexingRequest, Void> { public MySingleTaskController() { super(EmptyConsumer.getInstance()); } @Override protected void startNewBackgroundTask() { ApplicationManager.getApplication().invokeLater(() -> { Task.Backgroundable task = new Task.Backgroundable(VcsLogPersistentIndex.this.myProject, "Indexing Commit Data", true, PerformInBackgroundOption.ALWAYS_BACKGROUND) { @Override public void run(@Nonnull ProgressIndicator indicator) { List<IndexingRequest> requests; while (!(requests = popRequests()).isEmpty()) { for (IndexingRequest request : requests) { try { request.run(indicator); } catch (ProcessCanceledException reThrown) { throw reThrown; } catch (Throwable t) { LOG.error("Error while indexing", t); } } } taskCompleted(null); } }; ProgressIndicator indicator = myProgress.createProgressIndicator(false); ProgressManager.getInstance().runProcessWithProgressAsynchronously(task, indicator); }); } } private class IndexingRequest { private static final int MAGIC_NUMBER = 150000; private static final int BATCH_SIZE = 1000; private final Map<VirtualFile, TIntHashSet> myCommits; private final boolean myFull; public IndexingRequest(@Nonnull Map<VirtualFile, TIntHashSet> commits, boolean full) { myCommits = commits; myFull = full; } public void run(@Nonnull ProgressIndicator indicator) { indicator.setIndeterminate(false); indicator.setFraction(0); long time = System.currentTimeMillis(); CommitsCounter counter = new CommitsCounter(indicator, myCommits.values().stream().mapToInt(TIntHashSet::size).sum()); LOG.debug("Indexing " + counter.allCommits + " commits"); for (VirtualFile root : myCommits.keySet()) { try { if (myFull) { indexAll(root, myCommits.get(root), counter); } else { indexOneByOne(root, myCommits.get(root), counter); } } finally { myNumberOfTasks.get(root).decrementAndGet(); } } LOG.debug(StopWatch.formatTime(System.currentTimeMillis() - time) + " for indexing " + counter.newIndexedCommits + " new commits out of " + counter.allCommits); int leftCommits = counter.allCommits - counter.newIndexedCommits - counter.oldCommits; if (leftCommits > 0) { LOG.warn("Did not index " + leftCommits + " commits"); } } private void indexOneByOne(@Nonnull VirtualFile root, @Nonnull TIntHashSet commitsSet, @Nonnull CommitsCounter counter) { IntStream commits = TroveUtil.stream(commitsSet).filter(c -> { if (isIndexed(c)) { counter.oldCommits++; return false; } return true; }); indexOneByOne(root, counter, commits); } private void indexOneByOne(@Nonnull VirtualFile root, @Nonnull CommitsCounter counter, @Nonnull IntStream commits) { // We pass hashes to VcsLogProvider#readFullDetails in batches // in order to avoid allocating too much memory for these hashes // (we have up to 150K commits here that will occupy up to 18Mb as Strings). TroveUtil.processBatches(commits, BATCH_SIZE, batch -> { counter.indicator.checkCanceled(); if (indexOneByOne(root, batch)) { counter.newIndexedCommits += batch.size(); } counter.displayProgress(); }); flush(); } private boolean indexOneByOne(@Nonnull VirtualFile root, @Nonnull TIntHashSet commits) { VcsLogProvider provider = myProviders.get(root); try { List<String> hashes = TroveUtil.map(commits, value -> myHashMap.getCommitId(value).getHash().asString()); provider.readFullDetails(root, hashes, VcsLogPersistentIndex.this::storeDetail); } catch (VcsException e) { LOG.error(e); commits.forEach(value -> { markForIndexing(value, root); return true; }); return false; } return true; } public void indexAll(@Nonnull VirtualFile root, @Nonnull TIntHashSet commitsSet, @Nonnull CommitsCounter counter) { TIntHashSet notIndexed = new TIntHashSet(); TroveUtil.stream(commitsSet).forEach(c -> { if (isIndexed(c)) { counter.oldCommits++; } else { notIndexed.add(c); } }); counter.displayProgress(); if (notIndexed.size() <= MAGIC_NUMBER) { indexOneByOne(root, counter, TroveUtil.stream(notIndexed)); } else { try { myProviders.get(root).readAllFullDetails(root, details -> { int index = myHashMap.getCommitIndex(details.getId(), details.getRoot()); if (notIndexed.contains(index)) { storeDetail(details); counter.newIndexedCommits++; } counter.indicator.checkCanceled(); counter.displayProgress(); }); } catch (VcsException e) { LOG.error(e); notIndexed.forEach(value -> { markForIndexing(value, root); return true; }); } } flush(); } } private static class CommitsCounter { @Nonnull public final ProgressIndicator indicator; public final int allCommits; public volatile int newIndexedCommits; public volatile int oldCommits; private CommitsCounter(@Nonnull ProgressIndicator indicator, int commits) { this.indicator = indicator; this.allCommits = commits; } public void displayProgress() { indicator.setFraction(((double)newIndexedCommits + oldCommits) / allCommits); } } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.webapps; import android.app.Activity; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.os.SystemClock; import android.text.TextUtils; import android.util.Base64; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import org.chromium.base.ApiCompatibilityUtils; import org.chromium.base.ContextUtils; import org.chromium.base.IntentUtils; import org.chromium.base.Log; import org.chromium.chrome.R; import org.chromium.chrome.browser.IntentHandler; import org.chromium.chrome.browser.ShortcutHelper; import org.chromium.chrome.browser.ShortcutSource; import org.chromium.chrome.browser.WarmupManager; import org.chromium.chrome.browser.browserservices.BrowserServicesIntentDataProvider; import org.chromium.chrome.browser.customtabs.BaseCustomTabActivity; import org.chromium.chrome.browser.document.ChromeLauncherActivity; import org.chromium.chrome.browser.firstrun.FirstRunFlowSequencer; import org.chromium.webapk.lib.client.WebApkValidator; import org.chromium.webapk.lib.common.WebApkConstants; import java.lang.ref.WeakReference; /** * Launches web apps. This was separated from the ChromeLauncherActivity because the * ChromeLauncherActivity is not allowed to be excluded from Android's Recents: crbug.com/517426. */ public class WebappLauncherActivity extends Activity { /** * Action fired when an Intent is trying to launch a WebappActivity. * Never change the package name or the Intents will fail to launch. */ public static final String ACTION_START_WEBAPP = "com.google.android.apps.chrome.webapps.WebappManager.ACTION_START_WEBAPP"; public static final String SECURE_WEBAPP_LAUNCHER = "org.chromium.chrome.browser.webapps.SecureWebAppLauncher"; public static final String ACTION_START_SECURE_WEBAPP = "org.chromium.chrome.browser.webapps.WebappManager.ACTION_START_SECURE_WEBAPP"; /** * Delay in ms for relaunching WebAPK as a result of getting intent with extra * {@link WebApkConstants.EXTRA_RELAUNCH}. The delay was chosen arbirtarily and seems to * work. */ private static final int WEBAPK_LAUNCH_DELAY_MS = 20; private static final String TAG = "webapps"; /** * Extracted parameters from the launch intent. */ @VisibleForTesting public static class LaunchData { public final String id; public final String url; public final boolean isForWebApk; public final String webApkPackageName; public final boolean isSplashProvidedByWebApk; public LaunchData( String id, String url, String webApkPackageName, boolean isSplashProvidedByWebApk) { this.id = id; this.url = url; this.isForWebApk = !TextUtils.isEmpty(webApkPackageName); this.webApkPackageName = webApkPackageName; this.isSplashProvidedByWebApk = isSplashProvidedByWebApk; } } /** Creates intent to relaunch WebAPK. */ public static Intent createRelaunchWebApkIntent( Intent sourceIntent, @NonNull String webApkPackageName, @NonNull String url) { Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); intent.setPackage(webApkPackageName); intent.setFlags( Intent.FLAG_ACTIVITY_NEW_TASK | ApiCompatibilityUtils.getActivityNewDocumentFlag()); Bundle extras = sourceIntent.getExtras(); if (extras != null) { intent.putExtras(extras); } return intent; } /** * Brings a live WebappActivity back to the foreground if one exists for the given tab ID. * @param tabId ID of the Tab to bring back to the foreground. * @return True if a live WebappActivity was found, false otherwise. */ public static boolean bringWebappToFront(int tabId) { WeakReference<BaseCustomTabActivity<?>> customTabActivity = WebappLocator.findWebappActivityWithTabId(tabId); if (customTabActivity == null || customTabActivity.get() == null) return false; customTabActivity.get().getWebContentsDelegate().activateContents(); return true; } /** * Generates parameters for the WebAPK first run experience for the given intent. Returns null * if the intent does not launch either a WebappLauncherActivity or a WebAPK Activity. This * method is slow. It makes several PackageManager calls. */ public static @Nullable BrowserServicesIntentDataProvider maybeSlowlyGenerateWebApkIntentDataProviderFromIntent(Intent fromIntent) { // Check for intents targeted at WebappActivity, WebappActivity0-9, // SameTaskWebApkActivity and WebappLauncherActivity. String targetActivityClassName = fromIntent.getComponent().getClassName(); if (!targetActivityClassName.startsWith(WebappActivity.class.getName()) && !targetActivityClassName.equals(SameTaskWebApkActivity.class.getName()) && !targetActivityClassName.equals(WebappLauncherActivity.class.getName())) { return null; } return WebApkIntentDataProviderFactory.create(fromIntent); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Close the notification tray. ContextUtils.getApplicationContext().sendBroadcast( new Intent(Intent.ACTION_CLOSE_SYSTEM_DIALOGS)); long createTimestamp = SystemClock.elapsedRealtime(); Intent intent = getIntent(); if (WebappActionsNotificationManager.handleNotificationAction(intent)) { finish(); return; } ChromeWebApkHost.init(); LaunchData launchData = extractLaunchData(intent); if (!shouldLaunchWebapp(intent, launchData)) { launchData = null; // This is not a valid WebAPK. Modify the intent so that WebApkInfo#create() (in the // first run logic) returns null. intent.removeExtra(WebApkConstants.EXTRA_WEBAPK_PACKAGE_NAME); } if (shouldRelaunchWebApk(intent, launchData)) { relaunchWebApk(this, intent, launchData); return; } if (FirstRunFlowSequencer.launch(this, intent, false /* requiresBroadcast */, shouldPreferLightweightFre(launchData))) { ApiCompatibilityUtils.finishAndRemoveTask(this); return; } if (launchData != null) { launchWebapp(this, intent, launchData, createTimestamp); return; } launchInTab(this, intent); } /** * Extracts {@link LaunchData} from the passed-in intent. Does not validate whether the intent * is a valid webapp or WebAPK launch intent. */ private static LaunchData extractLaunchData(Intent intent) { String webApkPackageName = WebappIntentUtils.getWebApkPackageName(intent); boolean isSplashProvidedByWebApk = !TextUtils.isEmpty(webApkPackageName) && IntentUtils.safeGetBooleanExtra( intent, WebApkConstants.EXTRA_SPLASH_PROVIDED_BY_WEBAPK, false); return new LaunchData(WebappIntentUtils.getId(intent), WebappIntentUtils.getUrl(intent), webApkPackageName, isSplashProvidedByWebApk); } /** * Returns whether to prefer the Lightweight First Run Experience instead of the * non-Lightweight First Run Experience when launching the given webapp. */ private static boolean shouldPreferLightweightFre(LaunchData launchData) { // Use lightweight FRE for unbound WebAPKs. return launchData != null && launchData.webApkPackageName != null && !launchData.webApkPackageName.startsWith(WebApkConstants.WEBAPK_PACKAGE_PREFIX); } private static boolean shouldLaunchWebapp(Intent intent, LaunchData launchData) { Context appContext = ContextUtils.getApplicationContext(); if (launchData.isForWebApk) { // The LaunchData is valid if the WebAPK package is valid and the WebAPK has an intent // filter for the URL. if (!TextUtils.isEmpty(launchData.url) && WebApkValidator.canWebApkHandleUrl( appContext, launchData.webApkPackageName, launchData.url)) { return true; } Log.d(TAG, "%s is either not a WebAPK or %s is not within the WebAPK's scope", launchData.webApkPackageName, launchData.url); return false; } // The component is not exported and can only be launched by Chrome. if (intent.getComponent().equals(new ComponentName(appContext, SECURE_WEBAPP_LAUNCHER))) { return true; } String webappMac = IntentUtils.safeGetStringExtra(intent, ShortcutHelper.EXTRA_MAC); return (isValidMacForUrl(launchData.url, webappMac) || wasIntentFromChrome(intent)); } private static void launchWebapp(Activity launchingActivity, Intent intent, @NonNull LaunchData launchData, long createTimestamp) { Intent launchIntent = createIntentToLaunchForWebapp(intent, launchData, createTimestamp); WarmupManager.getInstance().maybePrefetchDnsForUrlInBackground( launchingActivity, launchData.url); IntentUtils.safeStartActivity(launchingActivity, launchIntent); if (IntentUtils.isIntentForNewTaskOrNewDocument(launchIntent)) { ApiCompatibilityUtils.finishAndRemoveTask(launchingActivity); } else { launchingActivity.finish(); launchingActivity.overridePendingTransition(0, R.anim.no_anim); } } /** * Returns whether {@link sourceIntent} was sent by a WebAPK to relaunch itself. * * A WebAPK sends an intent to Chrome to get relaunched when it knows it is about to get killed * as result of a call to PackageManager#setComponentEnabledSetting(). */ private static boolean shouldRelaunchWebApk(Intent sourceIntent, LaunchData launchData) { return launchData != null && launchData.isForWebApk && sourceIntent.hasExtra(WebApkConstants.EXTRA_RELAUNCH); } /** Relaunches WebAPK. */ private static void relaunchWebApk( Activity launchingActivity, Intent sourceIntent, @NonNull LaunchData launchData) { Intent launchIntent = createRelaunchWebApkIntent( sourceIntent, launchData.webApkPackageName, launchData.url); launchAfterDelay( launchingActivity.getApplicationContext(), launchIntent, WEBAPK_LAUNCH_DELAY_MS); ApiCompatibilityUtils.finishAndRemoveTask(launchingActivity); } /** Extracts start URL from source intent and launches URL in Chrome tab. */ private static void launchInTab(Activity launchingActivity, Intent sourceIntent) { Context appContext = ContextUtils.getApplicationContext(); String webappUrl = IntentUtils.safeGetStringExtra(sourceIntent, ShortcutHelper.EXTRA_URL); int webappSource = IntentUtils.safeGetIntExtra( sourceIntent, ShortcutHelper.EXTRA_SOURCE, ShortcutSource.UNKNOWN); if (TextUtils.isEmpty(webappUrl)) return; Intent launchIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(webappUrl)); launchIntent.setClassName( appContext.getPackageName(), ChromeLauncherActivity.class.getName()); launchIntent.putExtra(ShortcutHelper.REUSE_URL_MATCHING_TAB_ELSE_NEW_TAB, true); launchIntent.putExtra(ShortcutHelper.EXTRA_SOURCE, webappSource); launchIntent.setFlags( Intent.FLAG_ACTIVITY_NEW_TASK | ApiCompatibilityUtils.getActivityNewDocumentFlag()); Log.e(TAG, "Shortcut (%s) opened in Chrome.", webappUrl); IntentUtils.safeStartActivity(appContext, launchIntent); ApiCompatibilityUtils.finishAndRemoveTask(launchingActivity); } /** * Checks whether or not the MAC is present and valid for the web app shortcut. * * The MAC is used to prevent malicious apps from launching Chrome into a full screen * Activity for phishing attacks (among other reasons). * * @param url The URL for the web app. * @param mac MAC to compare the URL against. See {@link WebappAuthenticator}. * @return Whether the MAC is valid for the URL. */ private static boolean isValidMacForUrl(String url, String mac) { return mac != null && WebappAuthenticator.isUrlValid(url, Base64.decode(mac, Base64.DEFAULT)); } private static boolean wasIntentFromChrome(Intent intent) { return IntentHandler.wasIntentSenderChrome(intent); } /** Returns the class name of the {@link WebappActivity} subclass to launch. */ private static String selectWebappActivitySubclass(@NonNull LaunchData launchData) { return launchData.isSplashProvidedByWebApk ? SameTaskWebApkActivity.class.getName() : WebappActivity.class.getName(); } /** Returns intent to launch for the web app. */ @VisibleForTesting public static Intent createIntentToLaunchForWebapp( Intent intent, @NonNull LaunchData launchData, long createTimestamp) { String launchActivityClassName = selectWebappActivitySubclass(launchData); Intent launchIntent = new Intent(); launchIntent.setClassName(ContextUtils.getApplicationContext(), launchActivityClassName); launchIntent.setAction(Intent.ACTION_VIEW); // Firing intents with the exact same data should relaunch a particular Activity. launchIntent.setData(Uri.parse(WebappActivity.WEBAPP_SCHEME + "://" + launchData.id)); IntentHandler.addTimestampToIntent(launchIntent, createTimestamp); if (launchData.isForWebApk) { WebappIntentUtils.copyWebApkLaunchIntentExtras(intent, launchIntent); } else { WebappIntentUtils.copyWebappLaunchIntentExtras(intent, launchIntent); } // Setting FLAG_ACTIVITY_CLEAR_TOP handles 2 edge cases: // - If a legacy PWA is launching from a notification, we want to ensure that the URL being // launched is the URL in the intent. If a paused WebappActivity exists for this id, // then by default it will be focused and we have no way of sending the desired URL to // it (the intent is swallowed). As a workaround, set the CLEAR_TOP flag to ensure that // the existing Activity handles an update via onNewIntent(). // - If a WebAPK is having a CustomTabActivity on top of it in the same Task, and user // clicks a link to takes them back to the scope of a WebAPK, we want to destroy the // CustomTabActivity activity and go back to the WebAPK activity. It is intentional that // Custom Tab will not be reachable with a back button. if (launchData.isSplashProvidedByWebApk) { launchIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NO_ANIMATION | Intent.FLAG_ACTIVITY_FORWARD_RESULT); } else { launchIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | ApiCompatibilityUtils.getActivityNewDocumentFlag() | Intent.FLAG_ACTIVITY_CLEAR_TOP); } return launchIntent; } /** Launches intent after a delay. */ private static void launchAfterDelay(Context appContext, Intent intent, int launchDelayMs) { new Handler().postDelayed(new Runnable() { @Override public void run() { IntentUtils.safeStartActivity(appContext, intent); } }, launchDelayMs); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.jdbc; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; import com.google.common.util.concurrent.AbstractFuture; import io.prestosql.execution.QueryInfo; import io.prestosql.execution.warnings.WarningCollectorConfig; import io.prestosql.plugin.blackhole.BlackHolePlugin; import io.prestosql.plugin.tpch.TpchPlugin; import io.prestosql.server.testing.TestingPrestoServer; import io.prestosql.spi.PrestoWarning; import io.prestosql.spi.WarningCode; import io.prestosql.sql.parser.SqlParserOptions; import io.prestosql.testing.TestingWarningCollector; import io.prestosql.testing.TestingWarningCollectorConfig; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.concurrent.ExecutorService; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.prestosql.jdbc.TestPrestoDriver.closeQuietly; import static io.prestosql.jdbc.TestPrestoDriver.waitForNodeRefresh; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.Executors.newSingleThreadExecutor; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; @Test(singleThreaded = true) public class TestJdbcWarnings { // Number of warnings preloaded to the testing warning collector before a query runs private static final int PRELOADED_WARNINGS = 5; private TestingPrestoServer server; private Connection connection; private Statement statement; @BeforeClass public void setupServer() throws Exception { server = new TestingPrestoServer( true, ImmutableMap.<String, String>builder() .put("testing-warning-collector.add-warnings", "true") .put("testing-warning-collector.preloaded-warnings", String.valueOf(PRELOADED_WARNINGS)) .build(), null, null, new SqlParserOptions(), ImmutableList.of()); server.installPlugin(new TpchPlugin()); server.createCatalog("tpch", "tpch"); server.installPlugin(new BlackHolePlugin()); server.createCatalog("blackhole", "blackhole"); waitForNodeRefresh(server); } @AfterClass(alwaysRun = true) public void teardownServer() { closeQuietly(server); } @SuppressWarnings("JDBCResourceOpenedButNotSafelyClosed") @BeforeMethod public void setup() throws Exception { connection = createConnection(); statement = connection.createStatement(); } @AfterMethod public void teardown() { closeQuietly(statement); closeQuietly(connection); } @Test public void testStatementWarnings() throws SQLException { assertFalse(statement.execute("CREATE SCHEMA blackhole.test_schema")); SQLWarning warning = statement.getWarnings(); assertNotNull(warning); TestingWarningCollectorConfig warningCollectorConfig = new TestingWarningCollectorConfig().setPreloadedWarnings(PRELOADED_WARNINGS); TestingWarningCollector warningCollector = new TestingWarningCollector(new WarningCollectorConfig(), warningCollectorConfig); List<PrestoWarning> expectedWarnings = warningCollector.getWarnings(); assertStartsWithExpectedWarnings(warning, fromPrestoWarnings(expectedWarnings)); statement.clearWarnings(); assertNull(statement.getWarnings()); } @Test public void testLongRunningStatement() throws SQLException, InterruptedException { ExecutorService queryExecutor = newSingleThreadExecutor(daemonThreadsNamed("test-%s")); QueryCreationFuture queryCreationFuture = new QueryCreationFuture(); queryExecutor.submit(() -> { try { statement.execute("CREATE SCHEMA blackhole.blackhole"); statement.execute("CREATE TABLE blackhole.blackhole.test_table AS SELECT 1 AS col1 FROM tpch.sf1.lineitem CROSS JOIN tpch.sf1.lineitem"); queryCreationFuture.set(null); } catch (Throwable e) { queryCreationFuture.setException(e); } }); while (statement.getWarnings() == null) { Thread.sleep(100); } SQLWarning warning = statement.getWarnings(); Set<WarningEntry> currentWarnings = new HashSet<>(); assertTrue(currentWarnings.add(new WarningEntry(warning))); for (int warnings = 1; !queryCreationFuture.isDone() && warnings < 100; warnings++) { for (SQLWarning nextWarning = warning.getNextWarning(); nextWarning == null; nextWarning = warning.getNextWarning()) { // Wait for new warnings } warning = warning.getNextWarning(); assertTrue(currentWarnings.add(new WarningEntry(warning))); Thread.sleep(100); } assertEquals(currentWarnings.size(), 100); queryExecutor.shutdownNow(); } @Test public void testLongRunningQuery() throws SQLException, InterruptedException { ExecutorService queryExecutor = newSingleThreadExecutor(daemonThreadsNamed("test-%s")); QueryCreationFuture queryCreationFuture = new QueryCreationFuture(); queryExecutor.submit(() -> { try { statement.execute("SELECT 1 AS col1 FROM tpch.sf1.lineitem CROSS JOIN tpch.sf1.lineitem"); queryCreationFuture.set(null); } catch (Throwable e) { queryCreationFuture.setException(e); } }); while (statement.getResultSet() == null) { Thread.sleep(100); } ResultSet resultSet = statement.getResultSet(); Set<WarningEntry> currentWarnings = new HashSet<>(); for (int rows = 0; !queryCreationFuture.isDone() && rows < 10; ) { if (resultSet.next()) { for (SQLWarning warning = resultSet.getWarnings(); warning.getNextWarning() != null; warning = warning.getNextWarning()) { assertTrue(currentWarnings.add(new WarningEntry(warning.getNextWarning()))); } } else { break; } Thread.sleep(100); } queryExecutor.shutdownNow(); } @Test public void testExecuteQueryWarnings() throws SQLException { try (ResultSet rs = statement.executeQuery("SELECT a FROM (VALUES 1, 2, 3) t(a)")) { assertNull(statement.getConnection().getWarnings()); assertNull(statement.getWarnings()); assertNull(rs.getWarnings()); Set<WarningEntry> currentWarnings = new HashSet<>(); while (rs.next()) { assertWarnings(rs.getWarnings(), currentWarnings); } TestingWarningCollectorConfig warningCollectorConfig = new TestingWarningCollectorConfig().setPreloadedWarnings(PRELOADED_WARNINGS).setAddWarnings(true); TestingWarningCollector warningCollector = new TestingWarningCollector(new WarningCollectorConfig(), warningCollectorConfig); List<PrestoWarning> expectedWarnings = warningCollector.getWarnings(); for (PrestoWarning prestoWarning : expectedWarnings) { assertTrue(currentWarnings.contains(new WarningEntry(new PrestoSqlWarning(prestoWarning)))); } } } @Test public void testSqlWarning() { ImmutableList.Builder<PrestoWarning> builder = ImmutableList.builder(); for (int i = 0; i < 3; i++) { builder.add(new PrestoWarning(new WarningCode(i, "CODE_" + i), "warning message " + i)); } List<PrestoWarning> warnings = builder.build(); SQLWarning warning = fromPrestoWarnings(warnings); assertEquals(Iterators.size(warning.iterator()), warnings.size()); assertWarningsEqual(warning, new PrestoSqlWarning(warnings.get(0))); assertWarningsEqual(warning.getNextWarning(), new PrestoSqlWarning(warnings.get(1))); assertWarningsEqual(warning.getNextWarning().getNextWarning(), new PrestoSqlWarning(warnings.get(2))); } private static SQLWarning fromPrestoWarnings(List<PrestoWarning> warnings) { requireNonNull(warnings, "warnings is null"); assertFalse(warnings.isEmpty()); Iterator<PrestoWarning> iterator = warnings.iterator(); PrestoSqlWarning first = new PrestoSqlWarning(iterator.next()); SQLWarning current = first; while (iterator.hasNext()) { current.setNextWarning(new PrestoSqlWarning(iterator.next())); current = current.getNextWarning(); } return first; } private static void assertWarningsEqual(SQLWarning actual, SQLWarning expected) { assertEquals(actual.getMessage(), expected.getMessage()); assertEquals(actual.getSQLState(), expected.getSQLState()); assertEquals(actual.getErrorCode(), expected.getErrorCode()); } private static void addWarnings(Set<WarningEntry> currentWarnings, SQLWarning newWarning) { if (newWarning == null) { return; } for (Throwable warning : newWarning) { WarningEntry entry = new WarningEntry(warning); currentWarnings.add(entry); } } //TODO: this method seems to be copied in multiple test classes in this package, should it be moved to a utility? private Connection createConnection() throws SQLException { String url = format("jdbc:presto://%s", server.getAddress(), "blackhole", "blackhole"); return DriverManager.getConnection(url, "test", null); } private static void assertWarnings(SQLWarning warning, Set<WarningEntry> currentWarnings) { assertNotNull(warning); int previousSize = currentWarnings.size(); addWarnings(currentWarnings, warning); assertTrue(currentWarnings.size() >= previousSize); } private static void assertStartsWithExpectedWarnings(SQLWarning warning, SQLWarning expected) { assertNotNull(expected); assertNotNull(warning); while (true) { assertWarningsEqual(warning, expected); warning = warning.getNextWarning(); expected = expected.getNextWarning(); if (expected == null) { return; } assertNotNull(warning); } } private static class WarningEntry { public final int vendorCode; public final String sqlState; public final String message; public WarningEntry(Throwable throwable) { requireNonNull(throwable, "throwable is null"); assertTrue(throwable instanceof SQLWarning); SQLWarning warning = (SQLWarning) throwable; this.vendorCode = warning.getErrorCode(); this.sqlState = requireNonNull(warning.getSQLState(), "SQLState is null"); this.message = requireNonNull(warning.getMessage(), "message is null"); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof WarningEntry)) { return false; } WarningEntry that = (WarningEntry) other; return vendorCode == that.vendorCode && sqlState.equals(that.sqlState) && message.equals(that.message); } @Override public int hashCode() { return Objects.hash(vendorCode, sqlState, message); } } private static class QueryCreationFuture extends AbstractFuture<QueryInfo> { @Override protected boolean set(QueryInfo value) { return super.set(value); } @Override protected boolean setException(Throwable throwable) { return super.setException(throwable); } @Override public boolean cancel(boolean mayInterruptIfRunning) { // query submission can not be canceled return false; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.assertions; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.Serializable; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.lang3.StringUtils; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.testelement.AbstractScopedAssertion; import org.apache.jmeter.testelement.property.BooleanProperty; import org.apache.jmeter.testelement.property.StringProperty; import org.apache.jmeter.util.TidyException; import org.apache.jmeter.util.XPathUtil; import org.apache.jorphan.logging.LoggingManager; import org.apache.jorphan.util.JOrphanUtils; import org.apache.log.Logger; import org.w3c.dom.Document; import org.xml.sax.SAXException; /** * Checks if the result is a well-formed XML content and whether it matches an * XPath * */ public class XPathAssertion extends AbstractScopedAssertion implements Serializable, Assertion { private static final Logger log = LoggingManager.getLoggerForClass(); private static final long serialVersionUID = 240L; //+ JMX file attributes private static final String XPATH_KEY = "XPath.xpath"; // $NON-NLS-1$ private static final String WHITESPACE_KEY = "XPath.whitespace"; // $NON-NLS-1$ private static final String VALIDATE_KEY = "XPath.validate"; // $NON-NLS-1$ private static final String TOLERANT_KEY = "XPath.tolerant"; // $NON-NLS-1$ private static final String NEGATE_KEY = "XPath.negate"; // $NON-NLS-1$ private static final String NAMESPACE_KEY = "XPath.namespace"; // $NON-NLS-1$ private static final String QUIET_KEY = "XPath.quiet"; // $NON-NLS-1$ private static final String REPORT_ERRORS_KEY = "XPath.report_errors"; // $NON-NLS-1$ private static final String SHOW_WARNINGS_KEY = "XPath.show_warnings"; // $NON-NLS-1$ private static final String DOWNLOAD_DTDS = "XPath.download_dtds"; // $NON-NLS-1$ //- JMX file attributes public static final String DEFAULT_XPATH = "/"; /** * Returns the result of the Assertion. Checks if the result is well-formed * XML, and that the XPath expression is matched (or not, as the case may * be) */ @Override public AssertionResult getResult(SampleResult response) { // no error as default AssertionResult result = new AssertionResult(getName()); result.setFailure(false); result.setFailureMessage(""); byte[] responseData = null; Document doc = null; try { if (isScopeVariable()){ String inputString=getThreadContext().getVariables().get(getVariableName()); if(!StringUtils.isEmpty(inputString)) { responseData = inputString.getBytes("UTF-8"); } } else { responseData = response.getResponseData(); } if (responseData == null || responseData.length == 0) { return result.setResultForNull(); } if (log.isDebugEnabled()) { log.debug(new StringBuilder("Validation is set to ").append(isValidating()).toString()); log.debug(new StringBuilder("Whitespace is set to ").append(isWhitespace()).toString()); log.debug(new StringBuilder("Tolerant is set to ").append(isTolerant()).toString()); } boolean isXML = JOrphanUtils.isXML(responseData); doc = XPathUtil.makeDocument(new ByteArrayInputStream(responseData), isValidating(), isWhitespace(), isNamespace(), isTolerant(), isQuiet(), showWarnings() , reportErrors(), isXML , isDownloadDTDs()); } catch (SAXException e) { log.debug("Caught sax exception: " + e); result.setError(true); result.setFailureMessage(new StringBuilder("SAXException: ").append(e.getMessage()).toString()); return result; } catch (IOException e) { log.warn("Cannot parse result content", e); result.setError(true); result.setFailureMessage(new StringBuilder("IOException: ").append(e.getMessage()).toString()); return result; } catch (ParserConfigurationException e) { log.warn("Cannot parse result content", e); result.setError(true); result.setFailureMessage(new StringBuilder("ParserConfigurationException: ").append(e.getMessage()) .toString()); return result; } catch (TidyException e) { result.setError(true); result.setFailureMessage(e.getMessage()); return result; } if (doc == null || doc.getDocumentElement() == null) { result.setError(true); result.setFailureMessage("Document is null, probably not parsable"); return result; } XPathUtil.computeAssertionResult(result, doc, getXPathString(), isNegated()); return result; } /** * Get The XPath String that will be used in matching the document * * @return String xpath String */ public String getXPathString() { return getPropertyAsString(XPATH_KEY, DEFAULT_XPATH); } /** * Set the XPath String this will be used as an xpath * * @param xpath * String */ public void setXPathString(String xpath) { setProperty(new StringProperty(XPATH_KEY, xpath)); } /** * Set whether to ignore element whitespace * * @param whitespace Flag whether whitespace elements should be ignored */ public void setWhitespace(boolean whitespace) { setProperty(new BooleanProperty(WHITESPACE_KEY, whitespace)); } /** * Set use validation * * @param validate Flag whether validation should be used */ public void setValidating(boolean validate) { setProperty(new BooleanProperty(VALIDATE_KEY, validate)); } /** * Set whether this is namespace aware * * @param namespace Flag whether namespace should be used */ public void setNamespace(boolean namespace) { setProperty(new BooleanProperty(NAMESPACE_KEY, namespace)); } /** * Set tolerant mode if required * * @param tolerant * true/false */ public void setTolerant(boolean tolerant) { setProperty(new BooleanProperty(TOLERANT_KEY, tolerant)); } public void setNegated(boolean negate) { setProperty(new BooleanProperty(NEGATE_KEY, negate)); } /** * Is this whitepsace ignored. * * @return boolean */ public boolean isWhitespace() { return getPropertyAsBoolean(WHITESPACE_KEY, false); } /** * Is this validating * * @return boolean */ public boolean isValidating() { return getPropertyAsBoolean(VALIDATE_KEY, false); } /** * Is this namespace aware? * * @return boolean */ public boolean isNamespace() { return getPropertyAsBoolean(NAMESPACE_KEY, false); } /** * Is this using tolerant mode? * * @return boolean */ public boolean isTolerant() { return getPropertyAsBoolean(TOLERANT_KEY, false); } /** * Negate the XPath test, that is return true if something is not found. * * @return boolean negated */ public boolean isNegated() { return getPropertyAsBoolean(NEGATE_KEY, false); } public void setReportErrors(boolean val) { setProperty(REPORT_ERRORS_KEY, val, false); } public boolean reportErrors() { return getPropertyAsBoolean(REPORT_ERRORS_KEY, false); } public void setShowWarnings(boolean val) { setProperty(SHOW_WARNINGS_KEY, val, false); } public boolean showWarnings() { return getPropertyAsBoolean(SHOW_WARNINGS_KEY, false); } public void setQuiet(boolean val) { setProperty(QUIET_KEY, val, true); } public boolean isQuiet() { return getPropertyAsBoolean(QUIET_KEY, true); } public void setDownloadDTDs(boolean val) { setProperty(DOWNLOAD_DTDS, val, false); } public boolean isDownloadDTDs() { return getPropertyAsBoolean(DOWNLOAD_DTDS, false); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnTlvReplyPacketsVer14 implements OFBsnTlvReplyPackets { private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvReplyPacketsVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int LENGTH = 12; private final static U64 DEFAULT_VALUE = U64.ZERO; // OF message fields private final U64 value; // // Immutable default instance final static OFBsnTlvReplyPacketsVer14 DEFAULT = new OFBsnTlvReplyPacketsVer14( DEFAULT_VALUE ); // package private constructor - used by readers, builders, and factory OFBsnTlvReplyPacketsVer14(U64 value) { if(value == null) { throw new NullPointerException("OFBsnTlvReplyPacketsVer14: property value cannot be null"); } this.value = value; } // Accessors for OF message fields @Override public int getType() { return 0xc; } @Override public U64 getValue() { return value; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } public OFBsnTlvReplyPackets.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnTlvReplyPackets.Builder { final OFBsnTlvReplyPacketsVer14 parentMessage; // OF message fields private boolean valueSet; private U64 value; BuilderWithParent(OFBsnTlvReplyPacketsVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public int getType() { return 0xc; } @Override public U64 getValue() { return value; } @Override public OFBsnTlvReplyPackets.Builder setValue(U64 value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFBsnTlvReplyPackets build() { U64 value = this.valueSet ? this.value : parentMessage.value; if(value == null) throw new NullPointerException("Property value must not be null"); // return new OFBsnTlvReplyPacketsVer14( value ); } } static class Builder implements OFBsnTlvReplyPackets.Builder { // OF message fields private boolean valueSet; private U64 value; @Override public int getType() { return 0xc; } @Override public U64 getValue() { return value; } @Override public OFBsnTlvReplyPackets.Builder setValue(U64 value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } // @Override public OFBsnTlvReplyPackets build() { U64 value = this.valueSet ? this.value : DEFAULT_VALUE; if(value == null) throw new NullPointerException("Property value must not be null"); return new OFBsnTlvReplyPacketsVer14( value ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnTlvReplyPackets> { @Override public OFBsnTlvReplyPackets readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 0xc short type = bb.readShort(); if(type != (short) 0xc) throw new OFParseError("Wrong type: Expected=0xc(0xc), got="+type); int length = U16.f(bb.readShort()); if(length != 12) throw new OFParseError("Wrong length: Expected=12(12), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); U64 value = U64.ofRaw(bb.readLong()); OFBsnTlvReplyPacketsVer14 bsnTlvReplyPacketsVer14 = new OFBsnTlvReplyPacketsVer14( value ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnTlvReplyPacketsVer14); return bsnTlvReplyPacketsVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnTlvReplyPacketsVer14Funnel FUNNEL = new OFBsnTlvReplyPacketsVer14Funnel(); static class OFBsnTlvReplyPacketsVer14Funnel implements Funnel<OFBsnTlvReplyPacketsVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnTlvReplyPacketsVer14 message, PrimitiveSink sink) { // fixed value property type = 0xc sink.putShort((short) 0xc); // fixed value property length = 12 sink.putShort((short) 0xc); message.value.putTo(sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnTlvReplyPacketsVer14> { @Override public void write(ByteBuf bb, OFBsnTlvReplyPacketsVer14 message) { // fixed value property type = 0xc bb.writeShort((short) 0xc); // fixed value property length = 12 bb.writeShort((short) 0xc); bb.writeLong(message.value.getValue()); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnTlvReplyPacketsVer14("); b.append("value=").append(value); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnTlvReplyPacketsVer14 other = (OFBsnTlvReplyPacketsVer14) obj; if (value == null) { if (other.value != null) return false; } else if (!value.equals(other.value)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((value == null) ? 0 : value.hashCode()); return result; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.impl.evaluate; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CustomShortcutSet; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.util.ui.JBUI; import com.intellij.xdebugger.*; import com.intellij.xdebugger.evaluation.EvaluationMode; import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider; import com.intellij.xdebugger.evaluation.XDebuggerEvaluator; import com.intellij.xdebugger.impl.XDebugSessionImpl; import com.intellij.xdebugger.impl.XDebuggerUtilImpl; import com.intellij.xdebugger.impl.actions.XDebuggerActions; import com.intellij.xdebugger.impl.breakpoints.XExpressionImpl; import com.intellij.xdebugger.impl.settings.XDebuggerSettingsManager; import com.intellij.xdebugger.impl.ui.XDebugSessionTab; import com.intellij.xdebugger.impl.ui.XDebuggerEditorBase; import com.intellij.xdebugger.impl.ui.tree.XDebuggerTree; import com.intellij.xdebugger.impl.ui.tree.XDebuggerTreePanel; import com.intellij.xdebugger.impl.ui.tree.nodes.EvaluatingExpressionRootNode; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.TreeNode; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; /** * @author nik */ public class XDebuggerEvaluationDialog extends DialogWrapper { private final JPanel myMainPanel; private final JPanel myResultPanel; private final XDebuggerTreePanel myTreePanel; private EvaluationInputComponent myInputComponent; private final XDebugSession mySession; private final XDebuggerEditorsProvider myEditorsProvider; private EvaluationMode myMode; private XSourcePosition mySourcePosition; private final SwitchModeAction mySwitchModeAction; private final boolean myIsCodeFragmentEvaluationSupported; public XDebuggerEvaluationDialog(@NotNull XDebugSession session, @NotNull XDebuggerEditorsProvider editorsProvider, @NotNull XDebuggerEvaluator evaluator, @NotNull XExpression text, @Nullable XSourcePosition sourcePosition) { super(session.getProject(), true); mySession = session; myEditorsProvider = editorsProvider; mySourcePosition = sourcePosition; setModal(false); setOKButtonText(XDebuggerBundle.message("xdebugger.button.evaluate")); setCancelButtonText(XDebuggerBundle.message("xdebugger.evaluate.dialog.close")); mySession.addSessionListener(new XDebugSessionAdapter() { @Override public void sessionStopped() { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { close(CANCEL_EXIT_CODE); } }); } @Override public void stackFrameChanged() { updateSourcePosition(); } @Override public void sessionPaused() { updateSourcePosition(); } }, myDisposable); myTreePanel = new XDebuggerTreePanel(session.getProject(), editorsProvider, myDisposable, sourcePosition, XDebuggerActions.EVALUATE_DIALOG_TREE_POPUP_GROUP, ((XDebugSessionImpl)session).getValueMarkers()); myResultPanel = JBUI.Panels.simplePanel() .addToTop(new JLabel(XDebuggerBundle.message("xdebugger.evaluate.label.result"))) .addToCenter(myTreePanel.getMainPanel()); myMainPanel = JBUI.Panels.simplePanel(); mySwitchModeAction = new SwitchModeAction(); new AnAction(){ @Override public void actionPerformed(AnActionEvent e) { doOKAction(); addToWatches(); } }.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, InputEvent.CTRL_DOWN_MASK | InputEvent.SHIFT_DOWN_MASK)), getRootPane(), myDisposable); new AnAction() { @Override public void actionPerformed(AnActionEvent e) { IdeFocusManager.getInstance(mySession.getProject()).requestFocus(myTreePanel.getTree(), true); } }.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_R, InputEvent.ALT_DOWN_MASK)), getRootPane(), myDisposable); myTreePanel.getTree().expandNodesOnLoad(new Condition<TreeNode>() { @Override public boolean value(TreeNode node) { return node.getParent() instanceof EvaluatingExpressionRootNode; } }); EvaluationMode mode = XDebuggerSettingsManager.getInstanceImpl().getGeneralSettings().getEvaluationDialogMode(); myIsCodeFragmentEvaluationSupported = evaluator.isCodeFragmentEvaluationSupported(); if (mode == EvaluationMode.CODE_FRAGMENT && !myIsCodeFragmentEvaluationSupported) { mode = EvaluationMode.EXPRESSION; } if (mode == EvaluationMode.EXPRESSION && text.getMode() == EvaluationMode.CODE_FRAGMENT && myIsCodeFragmentEvaluationSupported) { mode = EvaluationMode.CODE_FRAGMENT; } switchToMode(mode, text); init(); } private void updateSourcePosition() { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { mySourcePosition = mySession.getCurrentPosition(); getInputEditor().setSourcePosition(mySourcePosition); } }); } @Override protected void doOKAction() { evaluate(); } @Override protected void createDefaultActions() { super.createDefaultActions(); myOKAction = new OkAction(){ @Override public void actionPerformed(ActionEvent e) { super.actionPerformed(e); if ((e.getModifiers() & (InputEvent.SHIFT_MASK | InputEvent.CTRL_MASK)) == (InputEvent.SHIFT_MASK | InputEvent.CTRL_MASK)) { addToWatches(); } } }; } private void addToWatches() { if (myMode == EvaluationMode.EXPRESSION) { XExpression expression = getInputEditor().getExpression(); if (!XDebuggerUtilImpl.isEmptyExpression(expression)) { XDebugSessionTab tab = ((XDebugSessionImpl)mySession).getSessionTab(); if (tab != null) { tab.getWatchesView().addWatchExpression(expression, -1, true); requestFocusInEditor(); } } } } @NotNull @Override protected Action[] createActions() { if (myIsCodeFragmentEvaluationSupported) { return new Action[]{getOKAction(), mySwitchModeAction, getCancelAction()}; } return super.createActions(); } @Override protected String getHelpId() { return "debugging.debugMenu.evaluate"; } @Override protected JButton createJButtonForAction(Action action) { final JButton button = super.createJButtonForAction(action); if (action == mySwitchModeAction) { int width1 = new JButton(getSwitchButtonText(EvaluationMode.EXPRESSION)).getPreferredSize().width; int width2 = new JButton(getSwitchButtonText(EvaluationMode.CODE_FRAGMENT)).getPreferredSize().width; final Dimension size = new Dimension(Math.max(width1, width2), button.getPreferredSize().height); button.setMinimumSize(size); button.setPreferredSize(size); } return button; } public XExpression getExpression() { return getInputEditor().getExpression(); } private static String getSwitchButtonText(EvaluationMode mode) { return mode != EvaluationMode.EXPRESSION ? XDebuggerBundle.message("button.text.expression.mode") : XDebuggerBundle.message("button.text.code.fragment.mode"); } private void switchToMode(EvaluationMode mode, XExpression text) { if (myMode == mode) return; myMode = mode; if (mode == EvaluationMode.EXPRESSION) { text = new XExpressionImpl(StringUtil.convertLineSeparators(text.getExpression(), " "), text.getLanguage(), text.getCustomInfo()); } myInputComponent = createInputComponent(mode, text); myMainPanel.removeAll(); myInputComponent.addComponent(myMainPanel, myResultPanel); setTitle(myInputComponent.getTitle()); mySwitchModeAction.putValue(Action.NAME, getSwitchButtonText(mode)); requestFocusInEditor(); } private void requestFocusInEditor() { JComponent preferredFocusedComponent = getInputEditor().getPreferredFocusedComponent(); if (preferredFocusedComponent != null) { IdeFocusManager.getInstance(mySession.getProject()).requestFocus(preferredFocusedComponent, true); } } private XDebuggerEditorBase getInputEditor() { return myInputComponent.getInputEditor(); } private EvaluationInputComponent createInputComponent(EvaluationMode mode, XExpression text) { final Project project = mySession.getProject(); text = XExpressionImpl.changeMode(text, mode); if (mode == EvaluationMode.EXPRESSION) { return new ExpressionInputComponent(project, myEditorsProvider, mySourcePosition, text); } else { return new CodeFragmentInputComponent(project, myEditorsProvider, mySourcePosition, text, myDisposable); } } private void evaluate() { final XDebuggerEditorBase inputEditor = getInputEditor(); int offset = -1; //try to save caret position Editor editor = inputEditor.getEditor(); if (editor != null) { offset = editor.getCaretModel().getOffset(); } final XDebuggerTree tree = myTreePanel.getTree(); tree.markNodesObsolete(); tree.setRoot(new EvaluatingExpressionRootNode(this, tree), false); myResultPanel.invalidate(); //editor is already changed editor = inputEditor.getEditor(); //selectAll puts focus back inputEditor.selectAll(); //try to restore caret position and clear selection if (offset >= 0 && editor != null) { offset = Math.min(editor.getDocument().getTextLength(), offset); editor.getCaretModel().moveToOffset(offset); editor.getSelectionModel().setSelection(offset, offset); } } @Override protected String getDimensionServiceKey() { return "#xdebugger.evaluate"; } @Override protected JComponent createCenterPanel() { return myMainPanel; } public void startEvaluation(@NotNull XDebuggerEvaluator.XEvaluationCallback evaluationCallback) { final XDebuggerEditorBase inputEditor = getInputEditor(); inputEditor.saveTextInHistory(); XExpression expression = inputEditor.getExpression(); XDebuggerEvaluator evaluator = mySession.getDebugProcess().getEvaluator(); if (evaluator == null) { evaluationCallback.errorOccurred(XDebuggerBundle.message("xdebugger.evaluate.stack.frame.has.not.evaluator")); } else { evaluator.evaluate(expression, evaluationCallback, null); } } public void evaluationDone() { mySession.rebuildViews(); } @Override public JComponent getPreferredFocusedComponent() { return getInputEditor().getPreferredFocusedComponent(); } private class SwitchModeAction extends AbstractAction { @Override public void actionPerformed(ActionEvent e) { XExpression text = getInputEditor().getExpression(); EvaluationMode newMode = (myMode == EvaluationMode.EXPRESSION) ? EvaluationMode.CODE_FRAGMENT : EvaluationMode.EXPRESSION; // remember only on user selection XDebuggerSettingsManager.getInstanceImpl().getGeneralSettings().setEvaluationDialogMode(newMode); switchToMode(newMode, text); } } }
package com.yxs.test; import java.io.IOException; import java.util.Arrays; import java.util.Properties; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.I0Itec.zkclient.ZkClient; import org.I0Itec.zkclient.ZkConnection; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.Topology; import org.apache.kafka.streams.kstream.GlobalKTable; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.KeyValueMapper; import org.apache.kafka.streams.kstream.ValueJoiner; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import kafka.admin.AdminUtils; import kafka.admin.RackAwareMode; import kafka.utils.ZkUtils; import scala.collection.Map; /** * Hello world! * */ public class App { /* * * {"listener_security_protocol_map":{"PLAINTEXT":"PLAINTEXT"},"endpoints":[ * "PLAINTEXT://yxsicd-Aspire-4741:9092"],"jmx_port":-1,"host": * "yxsicd-Aspire-4741","timestamp":"1521734620784","port":9092,"version":4} * * */ public static ConcurrentHashMap<String, KafkaProducer<String, String>> producerMap = new ConcurrentHashMap<String, KafkaProducer<String, String>>(); public static ConcurrentHashMap<String, KafkaConsumer<String, String>> consumerMap = new ConcurrentHashMap<String, KafkaConsumer<String, String>>(); static ObjectMapper om = new ObjectMapper(); public static ExecutorService servicePool = Executors.newCachedThreadPool(); public static KafkaProducer<String, String> getProducer(String name) { KafkaProducer<String, String> kafkaProducer = producerMap.get(name); if (kafkaProducer != null) { return kafkaProducer; } Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("acks", "all"); props.put("retries", 0); props.put("batch.size", 16384); props.put("linger.ms", 1); props.put("buffer.memory", 33554432); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer<String, String> producer = new KafkaProducer<String, String>(props); producerMap.put(name, producer); return producer; } public static KafkaConsumer<String, String> getConsumer(String name) { KafkaConsumer<String, String> consumer = consumerMap.get(name); if (consumer != null) { return consumer; } Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("group.id", name); props.put("enable.auto.commit", "false"); // props.put("auto.commit.interval.ms", "1000"); props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); consumer = new KafkaConsumer<>(props); consumerMap.put(name, consumer); return consumer; // consumer.subscribe(Arrays.asList("inventory_port_calc", // "inventory_ne_calc")); // while (true) { // ConsumerRecords<String, String> records = consumer.poll(100); // for (ConsumerRecord<String, String> record : records) // System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), // record.key(), record.value()); // } // // producerMap.put(name, producer); // // return producer; } public static ConsumerRecords<String, String> ReadMessage(String name, String topic, int count) { KafkaConsumer<String, String> consumer = getConsumer(name); TopicPartition tp = new TopicPartition(topic, 0); consumer.assign(Arrays.asList(tp)); consumer.seek(tp, 0); ConsumerRecords<String, String> records = consumer.poll(count); return records; } public static void ListenMessage(String input_name, String input_topic) { final String name = input_name; final String topic = input_topic; Runnable command = new Runnable() { @Override public void run() { System.out.printf("listen start, name is %s, topic is %s", name, topic); KafkaConsumer<String, String> consumer = getConsumer(name); TopicPartition tp = new TopicPartition(topic, 0); consumer.assign(Arrays.asList(tp)); consumer.seek(tp, 0); while (true) { ConsumerRecords<String, String> records = consumer.poll(1000); ShowRecordsMaxOffset(name, records); } } }; servicePool.execute(command); } public static void ShowRecords(ConsumerRecords<String, String> records) { for (ConsumerRecord<String, String> record : records) { System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value()); } } public static void ShowRecordsMaxOffset(String name, ConsumerRecords<String, String> records) { for (ConsumerRecord<String, String> record : records) { long offset = record.offset(); if (offset % 500 == 0) { System.out.printf("name= %s, offset = %d, key = %s, value = %s%n", name, record.offset(), record.key(), record.value()); } } } public static void initTopic() { Properties topicConfig = new Properties(); topicConfig.setProperty("compression.type", "gzip"); // topicConfig.put("key.serializer", // "org.apache.kafka.common.serialization.StringSerializer"); // topicConfig.put("value.serializer", // "org.apache.kafka.common.serialization.StringSerializer"); String serverstring = "localhost:2181"; MyStringS zkSerializer = new MyStringS(); ZkClient zkClient = new ZkClient(serverstring, 5000, 5000, zkSerializer); // zkClient.setZkSerializer(zkSerializer); ZkConnection zkConnection = new ZkConnection(serverstring); ZkUtils zk = new ZkUtils(zkClient, zkConnection, false); Map<String, Properties> fetchAllTopicConfigs = AdminUtils.fetchAllTopicConfigs(zk); System.out.println(fetchAllTopicConfigs); // AdminUtils.deleteTopic(zk, "inventory_ne_input"); // AdminUtils.deleteTopic(zk, "inventory_port_input"); // AdminUtils.deleteTopic(zk, "inventory_ne_output"); // AdminUtils.deleteTopic(zk, "inventory_port_output"); AdminUtils.createTopic(zk, "inventory_ne_input", 2, 1, topicConfig, RackAwareMode.Safe$.MODULE$); AdminUtils.createTopic(zk, "inventory_port_input", 2, 1, topicConfig, RackAwareMode.Safe$.MODULE$); AdminUtils.createTopic(zk, "inventory_ne_output", 2, 1, topicConfig, RackAwareMode.Safe$.MODULE$); AdminUtils.createTopic(zk, "inventory_port_output", 2, 1, topicConfig, RackAwareMode.Safe$.MODULE$); fetchAllTopicConfigs = AdminUtils.fetchAllTopicConfigs(zk); System.out.println(fetchAllTopicConfigs); } public static <T> T parseJson(String input, Class<T> valueType) { try { return om.readValue(input, valueType); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } public static void initStream() throws IOException { Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, "inventory"); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); final StreamsBuilder builder = new StreamsBuilder(); GlobalKTable<Object, Object> tbl_ne = builder.globalTable("inventory_ne_input"); // KStream<Object, Object> stream_ne = builder.stream("inventory_ne_input"); KStream<Object, Object> stream_port = builder.stream("inventory_port_input"); KeyValueMapper<? super Object, ? super Object, ? extends Object> keyValueMapper = new KeyValueMapper<Object, Object, Object>() { @Override public Object apply(Object key, Object value) { // TODO Auto-generated method stub JsonNode parseJson = parseJson(new String((byte[]) value), JsonNode.class); return parseJson.path("1002").asText().getBytes(); } }; ValueJoiner<? super Object, ? super Object, ? extends Object> joiner = new ValueJoiner<Object, Object, Object>() { @Override public Object apply(Object value1, Object value2) { ObjectNode port_value = parseJson(new String((byte[]) value1), ObjectNode.class); JsonNode ne_value = parseJson(new String((byte[]) value2), JsonNode.class); port_value.put("1003", ne_value.path("1001").asText()); return port_value.toString().getBytes(); } }; KStream<Object, Object> stream_output_port = stream_port.join(tbl_ne, keyValueMapper, joiner); stream_output_port.to("inventory_port_output"); final Topology topology = builder.build(); System.out.println(topology.describe()); final KafkaStreams streams = new KafkaStreams(topology, props); streams.start(); // // final CountDownLatch latch = new CountDownLatch(1); // // // attach shutdown handler to catch control-c // Runtime.getRuntime().addShutdownHook(new Thread("streams-shutdown-hook") { // @Override // public void run() { // streams.close(); // latch.countDown(); // } // }); // // try { // streams.start(); // latch.await(); // } catch (Throwable e) { // System.exit(1); // } // System.exit(0); } public static void initTestData() { KafkaProducer<String, String> producer = getProducer("test"); for (int i = 0; i < 50000; i++) { String id = Integer.toString(i) + ""; ObjectNode ne_node = JsonNodeFactory.instance.objectNode(); ne_node.put("100", id); ne_node.put("1001", "ne_" + id); String ne_value = ne_node.toString(); // System.out.println(ne_value); producer.send(new ProducerRecord<String, String>("inventory_ne_input", id, ne_value)); for (int j = 0; j < 5; j++) { String port_id = id + "_" + Integer.toString(j) + ""; ObjectNode port_node = JsonNodeFactory.instance.objectNode(); port_node.put("100", port_id); port_node.put("1001", "port_" + port_id); port_node.put("1002", id); String port_value = port_node.toString(); // System.out.println(port_value); producer.send(new ProducerRecord<String, String>("inventory_port_input", port_id, port_value)); } } producer.flush(); System.out.println("init done"); } public static void sendTestData(int count) { KafkaProducer<String, String> producer = getProducer("test"); // System.out.println(port_value); for (int i = 0; i < count; i++) { String id = Math.round(Math.random() * 50000 % 50000) + ""; String port_id = id + "_" + Long.toString(System.currentTimeMillis()) + ""; ObjectNode port_node = JsonNodeFactory.instance.objectNode(); port_node.put("100", port_id); port_node.put("1001", "port_" + port_id); port_node.put("1002", id); String port_value = port_node.toString(); producer.send(new ProducerRecord<String, String>("inventory_port_input", port_id, port_value)); } producer.flush(); } public static void main(String[] args) throws IOException { // initTopic(); // initTestData(); // ConsumerRecords<String, String> readMessage = ReadMessage("test", // "inventory_port_input", 2000); // System.out.println(readMessage.count()); // ShowRecords(readMessage); while (true) { int rkey = System.in.read(); // System.out.println("read key : " + rkey); switch (rkey) { case 96: initStream(); break; case 50: sendTestData(1000); break; case 51: sendTestData(10000); break; case 52: sendTestData(100000); break; case 53: sendTestData(1000000); break; case 49: ListenMessage(UUID.randomUUID().toString(), "inventory_port_input"); break; } } } }
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.runtime.io.text.csv; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.List; import org.junit.Test; /** * Test for {@code LineCursor}. */ public class LineCursorTest { private final boolean allowLineFeed = true; /** * simple case. */ @Test public void simple() { List<String> results = parse("Hello, world!"); assertThat(results, contains("Hello, world!")); } /** * w/ LF. */ @Test public void lf() { List<String> results = parse("hello\nworld\n\n!\n"); assertThat(results, contains("hello", "world", "", "!")); } /** * w/ CR. */ @Test public void cr() { List<String> results = parse("hello\rworld\r\r!\r"); assertThat(results, contains("hello", "world", "", "!")); } /** * w/ CRLF. */ @Test public void crlf() { List<String> results = parse("hello\r\nworld\r\n\r\n!\r\n"); assertThat(results, contains("hello", "world", "", "!")); } /** * w/ quote. */ @Test public void quote() { List<String> results = parse("'Hello, world!'\n"); assertThat(results, contains("'Hello, world!'")); } /** * w/ field separator. */ @Test public void comma() { List<String> results = parse("Hello, world!\n"); assertThat(results, contains("Hello, world!")); } /** * invalid quote. */ @Test public void invalid_quote() { List<String> results = parse("Hello' world!\n"); assertThat(results, contains("Hello' world!")); } /** * empty file. */ @Test public void begin_eof() { List<String> results = parse(""); assertThat(results, hasSize(0)); } /** * field separator follows EOF. */ @Test public void comma_eof() { List<String> results = parse(","); assertThat(results, contains(",")); } /** * empty file. */ @Test public void begin_lf() { List<String> results = parse("\n"); assertThat(results, contains("")); } /** * quote w/ quote. */ @Test public void quoted_quote() { List<String> results = parse("'Hello'' world!'\n"); assertThat(results, contains("'Hello'' world!'")); } /** * quote w/ field separator. */ @Test public void quoted_comma() { List<String> results = parse("',',','\n"); assertThat(results, contains("',',','")); } /** * quoted field ends with EOF. */ @Test public void quoted_quote_eof() { List<String> results = parse("'Hello, world!'"); assertThat(results, contains("'Hello, world!'")); } /** * quote w/ LF. */ @Test public void quoted_lf() { List<String> results = parse("'\n'\n"); assertThat(results, contains("'\n'")); } /** * quote w/ EOF. */ @Test public void quoted_eof() { List<String> results = parse("'"); assertThat(results, contains("'")); } /** * quote w/ CRLF. */ @Test public void quoted_cr_lf() { List<String> results = parse("'\r\n'\n"); assertThat(results, contains("'\r\n'")); } /** * quote w/ CR - c. */ @Test public void quoted_cr_c() { List<String> results = parse("'\rc'\n"); assertThat(results, contains("'\rc'")); } /** * quote w/ CR - quote. */ @Test public void quoted_cr_quote() { List<String> results = parse("'\r'\n"); assertThat(results, contains("'\r'")); } /** * quote w/ CR - CR. */ @Test public void quoted_cr_cr() { List<String> results = parse("'\r\r'\n"); assertThat(results, contains("'\r\r'")); } /** * quote w/ CR - EOF. */ @Test public void quoted_cr_eof() { List<String> results = parse("'\r"); assertThat(results, contains("'\r")); } /** * quote w/ quote - c. */ @Test public void quoted_quote_c() { List<String> results = parse("'Hello' world!'\n"); assertThat(results, contains("'Hello' world!'")); } /** * quoted field ends with CRLF. */ @Test public void quoted_field_cr_lf() { List<String> results = parse("'Hello, world!'\r\n"); assertThat(results, contains("'Hello, world!'")); } private List<String> parse(String contents) { return parse('\'', ',', contents); } private List<String> parse(char quote, char field, String contents) { try (LineCursor cursor = new LineCursor(new StringReader(contents), quote, field, allowLineFeed)) { List<String> results = new ArrayList<>(); while (cursor.next()) { results.add(cursor.getContent().toString()); } return results; } catch (IOException e) { throw new AssertionError(e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.query; import junit.framework.TestCase; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteTransactions; import org.apache.ignite.Ignition; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.query.QueryCursor; import org.apache.ignite.cache.query.SpiQuery; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.processors.cache.CacheEntryImpl; import org.apache.ignite.internal.transactions.IgniteTxHeuristicCheckedException; import org.apache.ignite.spi.IgniteSpiAdapter; import org.apache.ignite.spi.IgniteSpiException; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.spi.indexing.IndexingQueryFilter; import org.apache.ignite.spi.indexing.IndexingSpi; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionIsolation; import org.apache.ignite.transactions.TransactionState; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.Callable; import javax.cache.Cache; /** * Indexing Spi query test */ public class IndexingSpiQuerySelfTest extends TestCase { /** {@inheritDoc} */ @Override public void tearDown() throws Exception { Ignition.stopAll(true); } /** * @throws Exception If failed. */ public void testSimpleIndexingSpi() throws Exception { IgniteConfiguration cfg = configuration(); cfg.setIndexingSpi(new MyIndexingSpi()); Ignite ignite = Ignition.start(cfg); CacheConfiguration<Integer, Integer> ccfg = new CacheConfiguration<>("test-cache"); ccfg.setIndexedTypes(Integer.class, Integer.class); IgniteCache<Integer, Integer> cache = ignite.createCache(ccfg); for (int i = 0; i < 10; i++) cache.put(i, i); QueryCursor<Cache.Entry<Integer, Integer>> cursor = cache.query(new SpiQuery<Integer, Integer>().setArgs(2, 5)); for (Cache.Entry<Integer, Integer> entry : cursor) System.out.println(entry); } /** * @throws Exception If failed. */ @SuppressWarnings("ThrowableResultOfMethodCallIgnored") public void testIndexingSpiFailure() throws Exception { IgniteConfiguration cfg = configuration(); cfg.setIndexingSpi(new MyBrokenIndexingSpi()); Ignite ignite = Ignition.start(cfg); CacheConfiguration<Integer, Integer> ccfg = new CacheConfiguration<>("test-cache"); ccfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); ccfg.setIndexedTypes(Integer.class, Integer.class); final IgniteCache<Integer, Integer> cache = ignite.createCache(ccfg); final IgniteTransactions txs = ignite.transactions(); for (final TransactionConcurrency concurrency : TransactionConcurrency.values()) { for (final TransactionIsolation isolation : TransactionIsolation.values()) { System.out.println("Run in transaction: " + concurrency + " " + isolation); GridTestUtils.assertThrowsWithCause(new Callable<Void>() { @Override public Void call() throws Exception { Transaction tx; try (Transaction tx0 = tx = txs.txStart(concurrency, isolation)) { cache.put(1, 1); tx0.commit(); } assertEquals(TransactionState.ROLLED_BACK, tx.state()); return null; } }, IgniteTxHeuristicCheckedException.class); } } } /** * @return Configuration. */ private IgniteConfiguration configuration() { IgniteConfiguration cfg = new IgniteConfiguration(); TcpDiscoveryVmIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setMaxMissedHeartbeats(Integer.MAX_VALUE); disco.setIpFinder(ipFinder); cfg.setDiscoverySpi(disco); return cfg; } /** * Indexing Spi implementation for test */ private static class MyIndexingSpi extends IgniteSpiAdapter implements IndexingSpi { /** Index. */ private final SortedMap<Object, Object> idx = new TreeMap<>(); /** {@inheritDoc} */ @Override public void spiStart(@Nullable String gridName) throws IgniteSpiException { // No-op. } /** {@inheritDoc} */ @Override public void spiStop() throws IgniteSpiException { // No-op. } /** {@inheritDoc} */ @Override public Iterator<Cache.Entry<?, ?>> query(@Nullable String spaceName, Collection<Object> params, @Nullable IndexingQueryFilter filters) throws IgniteSpiException { if (params.size() < 2) throw new IgniteSpiException("Range parameters required."); Iterator<Object> paramsIt = params.iterator(); Object from = paramsIt.next(); Object to = paramsIt.next(); SortedMap<Object, Object> map = idx.subMap(from, to); Collection<Cache.Entry<?, ?>> res = new ArrayList<>(map.size()); for (Map.Entry<Object, Object> entry : map.entrySet()) res.add(new CacheEntryImpl<>(entry.getKey(), entry.getValue())); return res.iterator(); } /** {@inheritDoc} */ @Override public void store(@Nullable String spaceName, Object key, Object val, long expirationTime) throws IgniteSpiException { idx.put(key, val); } /** {@inheritDoc} */ @Override public void remove(@Nullable String spaceName, Object key) throws IgniteSpiException { // No-op. } /** {@inheritDoc} */ @Override public void onSwap(@Nullable String spaceName, Object key) throws IgniteSpiException { // No-op. } /** {@inheritDoc} */ @Override public void onUnswap(@Nullable String spaceName, Object key, Object val) throws IgniteSpiException { // No-op. } } /** * Broken Indexing Spi implementation for test */ private class MyBrokenIndexingSpi extends MyIndexingSpi { /** {@inheritDoc} */ @Override public void store(@Nullable String spaceName, Object key, Object val, long expirationTime) throws IgniteSpiException { throw new IgniteSpiException("Test exception"); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jms; import java.util.Map; import java.util.concurrent.ExecutorService; import javax.jms.ConnectionFactory; import javax.jms.ExceptionListener; import javax.jms.Session; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; import org.apache.camel.LoggingLevel; import org.apache.camel.impl.UriEndpointComponent; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.spi.HeaderFilterStrategyAware; import org.apache.camel.spi.Metadata; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.core.task.TaskExecutor; import org.springframework.jms.connection.JmsTransactionManager; import org.springframework.jms.connection.UserCredentialsConnectionFactoryAdapter; import org.springframework.jms.core.JmsOperations; import org.springframework.jms.support.converter.MessageConverter; import org.springframework.jms.support.destination.DestinationResolver; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.util.ErrorHandler; import static org.apache.camel.util.ObjectHelper.removeStartingCharacters; /** * A <a href="http://activemq.apache.org/jms.html">JMS Component</a> * * @version */ public class JmsComponent extends UriEndpointComponent implements ApplicationContextAware, HeaderFilterStrategyAware { private static final Logger LOG = LoggerFactory.getLogger(JmsComponent.class); private static final String KEY_FORMAT_STRATEGY_PARAM = "jmsKeyFormatStrategy"; private JmsConfiguration configuration; private ApplicationContext applicationContext; private QueueBrowseStrategy queueBrowseStrategy; private HeaderFilterStrategy headerFilterStrategy; private ExecutorService asyncStartStopExecutorService; private MessageCreatedStrategy messageCreatedStrategy; public JmsComponent() { super(JmsEndpoint.class); } public JmsComponent(Class<? extends Endpoint> endpointClass) { super(endpointClass); } public JmsComponent(CamelContext context) { super(context, JmsEndpoint.class); } public JmsComponent(CamelContext context, Class<? extends Endpoint> endpointClass) { super(context, endpointClass); } public JmsComponent(JmsConfiguration configuration) { this(); this.configuration = configuration; } /** * Static builder method */ public static JmsComponent jmsComponent() { return new JmsComponent(); } /** * Static builder method */ public static JmsComponent jmsComponent(JmsConfiguration configuration) { return new JmsComponent(configuration); } /** * Static builder method */ public static JmsComponent jmsComponent(ConnectionFactory connectionFactory) { return jmsComponent(new JmsConfiguration(connectionFactory)); } /** * Static builder method */ public static JmsComponent jmsComponentClientAcknowledge(ConnectionFactory connectionFactory) { JmsConfiguration template = new JmsConfiguration(connectionFactory); template.setAcknowledgementMode(Session.CLIENT_ACKNOWLEDGE); return jmsComponent(template); } /** * Static builder method */ public static JmsComponent jmsComponentAutoAcknowledge(ConnectionFactory connectionFactory) { JmsConfiguration template = new JmsConfiguration(connectionFactory); template.setAcknowledgementMode(Session.AUTO_ACKNOWLEDGE); return jmsComponent(template); } public static JmsComponent jmsComponentTransacted(ConnectionFactory connectionFactory) { JmsTransactionManager transactionManager = new JmsTransactionManager(); transactionManager.setConnectionFactory(connectionFactory); return jmsComponentTransacted(connectionFactory, transactionManager); } @SuppressWarnings("deprecation") public static JmsComponent jmsComponentTransacted(ConnectionFactory connectionFactory, PlatformTransactionManager transactionManager) { JmsConfiguration template = new JmsConfiguration(connectionFactory); template.setTransactionManager(transactionManager); template.setTransacted(true); template.setTransactedInOut(true); return jmsComponent(template); } // Properties // ------------------------------------------------------------------------- public JmsConfiguration getConfiguration() { if (configuration == null) { configuration = createConfiguration(); // If we are being configured with spring... if (applicationContext != null) { if (isAllowAutoWiredConnectionFactory()) { Map<String, ConnectionFactory> beansOfTypeConnectionFactory = applicationContext.getBeansOfType(ConnectionFactory.class); if (!beansOfTypeConnectionFactory.isEmpty()) { ConnectionFactory cf = beansOfTypeConnectionFactory.values().iterator().next(); configuration.setConnectionFactory(cf); } } if (isAllowAutoWiredDestinationResolver()) { Map<String, DestinationResolver> beansOfTypeDestinationResolver = applicationContext.getBeansOfType(DestinationResolver.class); if (!beansOfTypeDestinationResolver.isEmpty()) { DestinationResolver destinationResolver = beansOfTypeDestinationResolver.values().iterator().next(); configuration.setDestinationResolver(destinationResolver); } } } } return configuration; } /** * Subclasses can override to prevent the jms configuration from being * setup to use an auto-wired the connection factory that's found in the spring * application context. * * @return true by default */ public boolean isAllowAutoWiredConnectionFactory() { return true; } /** * Subclasses can override to prevent the jms configuration from being * setup to use an auto-wired the destination resolved that's found in the spring * application context. * * @return true by default */ public boolean isAllowAutoWiredDestinationResolver() { return true; } /** * To use a shared JMS configuration */ public void setConfiguration(JmsConfiguration configuration) { this.configuration = configuration; } /** * Specifies whether the consumer accept messages while it is stopping. * You may consider enabling this option, if you start and stop JMS routes at runtime, while there are still messages * enqued on the queue. If this option is false, and you stop the JMS route, then messages may be rejected, * and the JMS broker would have to attempt redeliveries, which yet again may be rejected, and eventually the message * may be moved at a dead letter queue on the JMS broker. To avoid this its recommended to enable this option. */ public void setAcceptMessagesWhileStopping(boolean acceptMessagesWhileStopping) { getConfiguration().setAcceptMessagesWhileStopping(acceptMessagesWhileStopping); } /** * Whether the DefaultMessageListenerContainer used in the reply managers for request-reply messaging allow * the DefaultMessageListenerContainer.runningAllowed flag to quick stop in case JmsConfiguration#isAcceptMessagesWhileStopping * is enabled, and org.apache.camel.CamelContext is currently being stopped. This quick stop ability is enabled by * default in the regular JMS consumers but to enable for reply managers you must enable this flag. */ public void setAllowReplyManagerQuickStop(boolean allowReplyManagerQuickStop) { getConfiguration().setAllowReplyManagerQuickStop(allowReplyManagerQuickStop); } /** * The JMS acknowledgement mode defined as an Integer. * Allows you to set vendor-specific extensions to the acknowledgment mode. * For the regular modes, it is preferable to use the acknowledgementModeName instead. */ public void setAcknowledgementMode(int consumerAcknowledgementMode) { getConfiguration().setAcknowledgementMode(consumerAcknowledgementMode); } /** * Enables eager loading of JMS properties as soon as a message is loaded * which generally is inefficient as the JMS properties may not be required * but sometimes can catch early any issues with the underlying JMS provider * and the use of JMS properties */ public void setEagerLoadingOfProperties(boolean eagerLoadingOfProperties) { getConfiguration().setEagerLoadingOfProperties(eagerLoadingOfProperties); } /** * The JMS acknowledgement name, which is one of: SESSION_TRANSACTED, CLIENT_ACKNOWLEDGE, AUTO_ACKNOWLEDGE, DUPS_OK_ACKNOWLEDGE */ public void setAcknowledgementModeName(String consumerAcknowledgementMode) { getConfiguration().setAcknowledgementModeName(consumerAcknowledgementMode); } /** * Specifies whether the consumer container should auto-startup. */ public void setAutoStartup(boolean autoStartup) { getConfiguration().setAutoStartup(autoStartup); } /** * Sets the cache level by ID for the underlying JMS resources. See cacheLevelName option for more details. */ public void setCacheLevel(int cacheLevel) { getConfiguration().setCacheLevel(cacheLevel); } /** * Sets the cache level by name for the underlying JMS resources. * Possible values are: CACHE_AUTO, CACHE_CONNECTION, CACHE_CONSUMER, CACHE_NONE, and CACHE_SESSION. * The default setting is CACHE_AUTO. See the Spring documentation and Transactions Cache Levels for more information. */ public void setCacheLevelName(String cacheName) { getConfiguration().setCacheLevelName(cacheName); } /** * Sets the cache level by name for the reply consumer when doing request/reply over JMS. * This option only applies when using fixed reply queues (not temporary). * Camel will by default use: CACHE_CONSUMER for exclusive or shared w/ replyToSelectorName. * And CACHE_SESSION for shared without replyToSelectorName. Some JMS brokers such as IBM WebSphere * may require to set the replyToCacheLevelName=CACHE_NONE to work. * Note: If using temporary queues then CACHE_NONE is not allowed, * and you must use a higher value such as CACHE_CONSUMER or CACHE_SESSION. */ public void setReplyToCacheLevelName(String cacheName) { getConfiguration().setReplyToCacheLevelName(cacheName); } /** * Sets the JMS client ID to use. Note that this value, if specified, must be unique and can only be used by a single JMS connection instance. * It is typically only required for durable topic subscriptions. * <p/> * If using Apache ActiveMQ you may prefer to use Virtual Topics instead. */ public void setClientId(String consumerClientId) { getConfiguration().setClientId(consumerClientId); } /** * Specifies the default number of concurrent consumers when consuming from JMS (not for request/reply over JMS). * See also the maxMessagesPerTask option to control dynamic scaling up/down of threads. * <p/> * When doing request/reply over JMS then the option replyToConcurrentConsumers is used to control number * of concurrent consumers on the reply message listener. */ public void setConcurrentConsumers(int concurrentConsumers) { getConfiguration().setConcurrentConsumers(concurrentConsumers); } /** * Specifies the default number of concurrent consumers when doing request/reply over JMS. * See also the maxMessagesPerTask option to control dynamic scaling up/down of threads. */ public void setReplyToConcurrentConsumers(int concurrentConsumers) { getConfiguration().setReplyToConcurrentConsumers(concurrentConsumers); } /** * Sets the default connection factory to be use */ public void setConnectionFactory(ConnectionFactory connectionFactory) { getConfiguration().setConnectionFactory(connectionFactory); } /** * Username to use with the ConnectionFactory. You can also configure username/password directly on the ConnectionFactory. */ @Metadata(secret = true) public void setUsername(String username) { getConfiguration().setUsername(username); } /** * Password to use with the ConnectionFactory. You can also configure username/password directly on the ConnectionFactory. */ @Metadata(secret = true) public void setPassword(String password) { getConfiguration().setPassword(password); } /** * Specifies whether persistent delivery is used by default. */ public void setDeliveryPersistent(boolean deliveryPersistent) { getConfiguration().setDeliveryPersistent(deliveryPersistent); } /** * Specifies the delivery mode to be used. Possible values are * Possibles values are those defined by javax.jms.DeliveryMode. * NON_PERSISTENT = 1 and PERSISTENT = 2. */ public void setDeliveryMode(Integer deliveryMode) { getConfiguration().setDeliveryMode(deliveryMode); } /** * The durable subscriber name for specifying durable topic subscriptions. The clientId option must be configured as well. */ public void setDurableSubscriptionName(String durableSubscriptionName) { getConfiguration().setDurableSubscriptionName(durableSubscriptionName); } /** * Specifies the JMS Exception Listener that is to be notified of any underlying JMS exceptions. */ public void setExceptionListener(ExceptionListener exceptionListener) { getConfiguration().setExceptionListener(exceptionListener); } /** * Specifies a org.springframework.util.ErrorHandler to be invoked in case of any uncaught exceptions thrown while processing a Message. * By default these exceptions will be logged at the WARN level, if no errorHandler has been configured. * You can configure logging level and whether stack traces should be logged using errorHandlerLoggingLevel and errorHandlerLogStackTrace options. * This makes it much easier to configure, than having to code a custom errorHandler. */ public void setErrorHandler(ErrorHandler errorHandler) { getConfiguration().setErrorHandler(errorHandler); } /** * Allows to configure the default errorHandler logging level for logging uncaught exceptions. */ public void setErrorHandlerLoggingLevel(LoggingLevel errorHandlerLoggingLevel) { getConfiguration().setErrorHandlerLoggingLevel(errorHandlerLoggingLevel); } /** * Allows to control whether stacktraces should be logged or not, by the default errorHandler. */ public void setErrorHandlerLogStackTrace(boolean errorHandlerLogStackTrace) { getConfiguration().setErrorHandlerLogStackTrace(errorHandlerLogStackTrace); } /** * Set if the deliveryMode, priority or timeToLive qualities of service should be used when sending messages. * This option is based on Spring's JmsTemplate. The deliveryMode, priority and timeToLive options are applied to the current endpoint. * This contrasts with the preserveMessageQos option, which operates at message granularity, * reading QoS properties exclusively from the Camel In message headers. */ public void setExplicitQosEnabled(boolean explicitQosEnabled) { getConfiguration().setExplicitQosEnabled(explicitQosEnabled); } /** * Specifies whether the listener session should be exposed when consuming messages. */ public void setExposeListenerSession(boolean exposeListenerSession) { getConfiguration().setExposeListenerSession(exposeListenerSession); } /** * Specifies the limit for idle executions of a receive task, not having received any message within its execution. * If this limit is reached, the task will shut down and leave receiving to other executing tasks * (in the case of dynamic scheduling; see the maxConcurrentConsumers setting). * There is additional doc available from Spring. */ public void setIdleTaskExecutionLimit(int idleTaskExecutionLimit) { getConfiguration().setIdleTaskExecutionLimit(idleTaskExecutionLimit); } /** * Specify the limit for the number of consumers that are allowed to be idle at any given time. */ public void setIdleConsumerLimit(int idleConsumerLimit) { getConfiguration().setIdleConsumerLimit(idleConsumerLimit); } /** * Specifies the maximum number of concurrent consumers when consuming from JMS (not for request/reply over JMS). * See also the maxMessagesPerTask option to control dynamic scaling up/down of threads. * <p/> * When doing request/reply over JMS then the option replyToMaxConcurrentConsumers is used to control number * of concurrent consumers on the reply message listener. */ public void setMaxConcurrentConsumers(int maxConcurrentConsumers) { getConfiguration().setMaxConcurrentConsumers(maxConcurrentConsumers); } /** * Specifies the maximum number of concurrent consumers when using request/reply over JMS. * See also the maxMessagesPerTask option to control dynamic scaling up/down of threads. */ public void setReplyToMaxConcurrentConsumers(int maxConcurrentConsumers) { getConfiguration().setReplyToMaxConcurrentConsumers(maxConcurrentConsumers); } /** * Specifies the maximum number of concurrent consumers for continue routing when timeout occurred when using request/reply over JMS. */ public void setReplyOnTimeoutToMaxConcurrentConsumers(int maxConcurrentConsumers) { getConfiguration().setReplyToOnTimeoutMaxConcurrentConsumers(maxConcurrentConsumers); } /** * The number of messages per task. -1 is unlimited. * If you use a range for concurrent consumers (eg min < max), then this option can be used to set * a value to eg 100 to control how fast the consumers will shrink when less work is required. */ public void setMaxMessagesPerTask(int maxMessagesPerTask) { getConfiguration().setMaxMessagesPerTask(maxMessagesPerTask); } /** * To use a custom Spring org.springframework.jms.support.converter.MessageConverter so you can be in control * how to map to/from a javax.jms.Message. */ public void setMessageConverter(MessageConverter messageConverter) { getConfiguration().setMessageConverter(messageConverter); } /** * Specifies whether Camel should auto map the received JMS message to a suited payload type, such as javax.jms.TextMessage to a String etc. * See section about how mapping works below for more details. */ public void setMapJmsMessage(boolean mapJmsMessage) { getConfiguration().setMapJmsMessage(mapJmsMessage); } /** * When sending, specifies whether message IDs should be added. */ public void setMessageIdEnabled(boolean messageIdEnabled) { getConfiguration().setMessageIdEnabled(messageIdEnabled); } /** * Specifies whether timestamps should be enabled by default on sending messages. */ public void setMessageTimestampEnabled(boolean messageTimestampEnabled) { getConfiguration().setMessageTimestampEnabled(messageTimestampEnabled); } /** * If true, Camel will always make a JMS message copy of the message when it is passed to the producer for sending. * Copying the message is needed in some situations, such as when a replyToDestinationSelectorName is set * (incidentally, Camel will set the alwaysCopyMessage option to true, if a replyToDestinationSelectorName is set) */ public void setAlwaysCopyMessage(boolean alwaysCopyMessage) { getConfiguration().setAlwaysCopyMessage(alwaysCopyMessage); } /** * Specifies whether JMSMessageID should always be used as JMSCorrelationID for InOut messages. */ public void setUseMessageIDAsCorrelationID(boolean useMessageIDAsCorrelationID) { getConfiguration().setUseMessageIDAsCorrelationID(useMessageIDAsCorrelationID); } /** * Values greater than 1 specify the message priority when sending (where 0 is the lowest priority and 9 is the highest). * The explicitQosEnabled option must also be enabled in order for this option to have any effect. */ public void setPriority(int priority) { getConfiguration().setPriority(priority); } /** * Specifies whether to inhibit the delivery of messages published by its own connection. */ public void setPubSubNoLocal(boolean pubSubNoLocal) { getConfiguration().setPubSubNoLocal(pubSubNoLocal); } /** * The timeout for receiving messages (in milliseconds). */ public void setReceiveTimeout(long receiveTimeout) { getConfiguration().setReceiveTimeout(receiveTimeout); } /** * Specifies the interval between recovery attempts, i.e. when a connection is being refreshed, in milliseconds. * The default is 5000 ms, that is, 5 seconds. */ public void setRecoveryInterval(long recoveryInterval) { getConfiguration().setRecoveryInterval(recoveryInterval); } /** * Deprecated: Enabled by default, if you specify a durableSubscriptionName and a clientId. */ @Deprecated public void setSubscriptionDurable(boolean subscriptionDurable) { getConfiguration().setSubscriptionDurable(subscriptionDurable); } /** * Allows you to specify a custom task executor for consuming messages. */ public void setTaskExecutor(TaskExecutor taskExecutor) { getConfiguration().setTaskExecutor(taskExecutor); } /** * When sending messages, specifies the time-to-live of the message (in milliseconds). */ public void setTimeToLive(long timeToLive) { getConfiguration().setTimeToLive(timeToLive); } /** * Specifies whether to use transacted mode */ public void setTransacted(boolean consumerTransacted) { getConfiguration().setTransacted(consumerTransacted); } /** * If true, Camel will create a JmsTransactionManager, if there is no transactionManager injected when option transacted=true. */ public void setLazyCreateTransactionManager(boolean lazyCreating) { getConfiguration().setLazyCreateTransactionManager(lazyCreating); } /** * The Spring transaction manager to use. */ public void setTransactionManager(PlatformTransactionManager transactionManager) { getConfiguration().setTransactionManager(transactionManager); } /** * The name of the transaction to use. */ public void setTransactionName(String transactionName) { getConfiguration().setTransactionName(transactionName); } /** * The timeout value of the transaction (in seconds), if using transacted mode. */ public void setTransactionTimeout(int transactionTimeout) { getConfiguration().setTransactionTimeout(transactionTimeout); } /** * Specifies whether to test the connection on startup. * This ensures that when Camel starts that all the JMS consumers have a valid connection to the JMS broker. * If a connection cannot be granted then Camel throws an exception on startup. * This ensures that Camel is not started with failed connections. * The JMS producers is tested as well. */ public void setTestConnectionOnStartup(boolean testConnectionOnStartup) { getConfiguration().setTestConnectionOnStartup(testConnectionOnStartup); } /** * Whether to startup the JmsConsumer message listener asynchronously, when starting a route. * For example if a JmsConsumer cannot get a connection to a remote JMS broker, then it may block while retrying * and/or failover. This will cause Camel to block while starting routes. By setting this option to true, * you will let routes startup, while the JmsConsumer connects to the JMS broker using a dedicated thread * in asynchronous mode. If this option is used, then beware that if the connection could not be established, * then an exception is logged at WARN level, and the consumer will not be able to receive messages; * You can then restart the route to retry. */ public void setAsyncStartListener(boolean asyncStartListener) { getConfiguration().setAsyncStartListener(asyncStartListener); } /** * Whether to stop the JmsConsumer message listener asynchronously, when stopping a route. */ public void setAsyncStopListener(boolean asyncStopListener) { getConfiguration().setAsyncStopListener(asyncStopListener); } /** * When using mapJmsMessage=false Camel will create a new JMS message to send to a new JMS destination * if you touch the headers (get or set) during the route. Set this option to true to force Camel to send * the original JMS message that was received. */ public void setForceSendOriginalMessage(boolean forceSendOriginalMessage) { getConfiguration().setForceSendOriginalMessage(forceSendOriginalMessage); } /** * The timeout for waiting for a reply when using the InOut Exchange Pattern (in milliseconds). * The default is 20 seconds. You can include the header "CamelJmsRequestTimeout" to override this endpoint configured * timeout value, and thus have per message individual timeout values. * See also the requestTimeoutCheckerInterval option. */ public void setRequestTimeout(long requestTimeout) { getConfiguration().setRequestTimeout(requestTimeout); } /** * Configures how often Camel should check for timed out Exchanges when doing request/reply over JMS. * By default Camel checks once per second. But if you must react faster when a timeout occurs, * then you can lower this interval, to check more frequently. The timeout is determined by the option requestTimeout. */ public void setRequestTimeoutCheckerInterval(long requestTimeoutCheckerInterval) { getConfiguration().setRequestTimeoutCheckerInterval(requestTimeoutCheckerInterval); } /** * You can transfer the exchange over the wire instead of just the body and headers. * The following fields are transferred: In body, Out body, Fault body, In headers, Out headers, Fault headers, * exchange properties, exchange exception. * This requires that the objects are serializable. Camel will exclude any non-serializable objects and log it at WARN level. * You must enable this option on both the producer and consumer side, so Camel knows the payloads is an Exchange and not a regular payload. */ public void setTransferExchange(boolean transferExchange) { getConfiguration().setTransferExchange(transferExchange); } /** * If enabled and you are using Request Reply messaging (InOut) and an Exchange failed on the consumer side, * then the caused Exception will be send back in response as a javax.jms.ObjectMessage. * If the client is Camel, the returned Exception is rethrown. This allows you to use Camel JMS as a bridge * in your routing - for example, using persistent queues to enable robust routing. * Notice that if you also have transferExchange enabled, this option takes precedence. * The caught exception is required to be serializable. * The original Exception on the consumer side can be wrapped in an outer exception * such as org.apache.camel.RuntimeCamelException when returned to the producer. */ public void setTransferException(boolean transferException) { getConfiguration().setTransferException(transferException); } /** * If enabled and you are using Request Reply messaging (InOut) and an Exchange failed with a SOAP fault (not exception) on the consumer side, * then the fault flag on {@link org.apache.camel.Message#isFault()} will be send back in the response as a JMS header with the key * {@link JmsConstants#JMS_TRANSFER_FAULT}. * If the client is Camel, the returned fault flag will be set on the {@link org.apache.camel.Message#setFault(boolean)}. * <p/> * You may want to enable this when using Camel components that support faults such as SOAP based such as cxf or spring-ws. */ public void setTransferFault(boolean transferFault) { getConfiguration().setTransferFault(transferFault); } /** * Allows you to use your own implementation of the org.springframework.jms.core.JmsOperations interface. * Camel uses JmsTemplate as default. Can be used for testing purpose, but not used much as stated in the spring API docs. */ public void setJmsOperations(JmsOperations jmsOperations) { getConfiguration().setJmsOperations(jmsOperations); } /** * A pluggable org.springframework.jms.support.destination.DestinationResolver that allows you to use your own resolver * (for example, to lookup the real destination in a JNDI registry). */ public void setDestinationResolver(DestinationResolver destinationResolver) { getConfiguration().setDestinationResolver(destinationResolver); } /** * Allows for explicitly specifying which kind of strategy to use for replyTo queues when doing request/reply over JMS. * Possible values are: Temporary, Shared, or Exclusive. * By default Camel will use temporary queues. However if replyTo has been configured, then Shared is used by default. * This option allows you to use exclusive queues instead of shared ones. * See Camel JMS documentation for more details, and especially the notes about the implications if running in a clustered environment, * and the fact that Shared reply queues has lower performance than its alternatives Temporary and Exclusive. */ public void setReplyToType(ReplyToType replyToType) { getConfiguration().setReplyToType(replyToType); } /** * Set to true, if you want to send message using the QoS settings specified on the message, * instead of the QoS settings on the JMS endpoint. The following three headers are considered JMSPriority, JMSDeliveryMode, * and JMSExpiration. You can provide all or only some of them. If not provided, Camel will fall back to use the * values from the endpoint instead. So, when using this option, the headers override the values from the endpoint. * The explicitQosEnabled option, by contrast, will only use options set on the endpoint, and not values from the message header. */ public void setPreserveMessageQos(boolean preserveMessageQos) { getConfiguration().setPreserveMessageQos(preserveMessageQos); } /** * Whether the JmsConsumer processes the Exchange asynchronously. * If enabled then the JmsConsumer may pickup the next message from the JMS queue, * while the previous message is being processed asynchronously (by the Asynchronous Routing Engine). * This means that messages may be processed not 100% strictly in order. If disabled (as default) * then the Exchange is fully processed before the JmsConsumer will pickup the next message from the JMS queue. * Note if transacted has been enabled, then asyncConsumer=true does not run asynchronously, as transaction * must be executed synchronously (Camel 3.0 may support async transactions). */ public void setAsyncConsumer(boolean asyncConsumer) { getConfiguration().setAsyncConsumer(asyncConsumer); } /** * Whether to allow sending messages with no body. If this option is false and the message body is null, then an JMSException is thrown. */ public void setAllowNullBody(boolean allowNullBody) { getConfiguration().setAllowNullBody(allowNullBody); } /** * Only applicable when sending to JMS destination using InOnly (eg fire and forget). * Enabling this option will enrich the Camel Exchange with the actual JMSMessageID * that was used by the JMS client when the message was sent to the JMS destination. */ public void setIncludeSentJMSMessageID(boolean includeSentJMSMessageID) { getConfiguration().setIncludeSentJMSMessageID(includeSentJMSMessageID); } /** * Whether to include all JMSXxxx properties when mapping from JMS to Camel Message. * Setting this to true will include properties such as JMSXAppID, and JMSXUserID etc. * Note: If you are using a custom headerFilterStrategy then this option does not apply. */ public void setIncludeAllJMSXProperties(boolean includeAllJMSXProperties) { getConfiguration().setIncludeAllJMSXProperties(includeAllJMSXProperties); } /** * Specifies what default TaskExecutor type to use in the DefaultMessageListenerContainer, * for both consumer endpoints and the ReplyTo consumer of producer endpoints. * Possible values: SimpleAsync (uses Spring's SimpleAsyncTaskExecutor) or ThreadPool * (uses Spring's ThreadPoolTaskExecutor with optimal values - cached threadpool-like). * If not set, it defaults to the previous behaviour, which uses a cached thread pool * for consumer endpoints and SimpleAsync for reply consumers. * The use of ThreadPool is recommended to reduce "thread trash" in elastic configurations * with dynamically increasing and decreasing concurrent consumers. */ public void setDefaultTaskExecutorType(DefaultTaskExecutorType type) { getConfiguration().setDefaultTaskExecutorType(type); } /** * Pluggable strategy for encoding and decoding JMS keys so they can be compliant with the JMS specification. * Camel provides two implementations out of the box: default and passthrough. * The default strategy will safely marshal dots and hyphens (. and -). The passthrough strategy leaves the key as is. * Can be used for JMS brokers which do not care whether JMS header keys contain illegal characters. * You can provide your own implementation of the org.apache.camel.component.jms.JmsKeyFormatStrategy * and refer to it using the # notation. */ public void setJmsKeyFormatStrategy(JmsKeyFormatStrategy jmsKeyFormatStrategy) { getConfiguration().setJmsKeyFormatStrategy(jmsKeyFormatStrategy); } /** * Pluggable strategy for encoding and decoding JMS keys so they can be compliant with the JMS specification. * Camel provides two implementations out of the box: default and passthrough. * The default strategy will safely marshal dots and hyphens (. and -). The passthrough strategy leaves the key as is. * Can be used for JMS brokers which do not care whether JMS header keys contain illegal characters. * You can provide your own implementation of the org.apache.camel.component.jms.JmsKeyFormatStrategy * and refer to it using the # notation. */ public void setJmsKeyFormatStrategy(String jmsKeyFormatStrategyName) { // allow to configure a standard by its name, which is simpler JmsKeyFormatStrategy strategy = resolveStandardJmsKeyFormatStrategy(jmsKeyFormatStrategyName); if (strategy == null) { throw new IllegalArgumentException("JmsKeyFormatStrategy with name " + jmsKeyFormatStrategyName + " is not a standard supported name"); } else { getConfiguration().setJmsKeyFormatStrategy(strategy); } } /** * Sets the Spring ApplicationContext to use */ public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; } public QueueBrowseStrategy getQueueBrowseStrategy() { if (queueBrowseStrategy == null) { queueBrowseStrategy = new DefaultQueueBrowseStrategy(); } return queueBrowseStrategy; } /** * To use a custom QueueBrowseStrategy when browsing queues */ public void setQueueBrowseStrategy(QueueBrowseStrategy queueBrowseStrategy) { this.queueBrowseStrategy = queueBrowseStrategy; } public HeaderFilterStrategy getHeaderFilterStrategy() { return headerFilterStrategy; } /** * To use a custom HeaderFilterStrategy to filter header to and from Camel message. */ public void setHeaderFilterStrategy(HeaderFilterStrategy strategy) { this.headerFilterStrategy = strategy; } public MessageCreatedStrategy getMessageCreatedStrategy() { return messageCreatedStrategy; } /** * To use the given MessageCreatedStrategy which are invoked when Camel creates new instances of <tt>javax.jms.Message</tt> * objects when Camel is sending a JMS message. */ public void setMessageCreatedStrategy(MessageCreatedStrategy messageCreatedStrategy) { this.messageCreatedStrategy = messageCreatedStrategy; } public int getWaitForProvisionCorrelationToBeUpdatedCounter() { return getConfiguration().getWaitForProvisionCorrelationToBeUpdatedCounter(); } /** * Number of times to wait for provisional correlation id to be updated to the actual correlation id when doing request/reply over JMS * and when the option useMessageIDAsCorrelationID is enabled. */ public void setWaitForProvisionCorrelationToBeUpdatedCounter(int counter) { getConfiguration().setWaitForProvisionCorrelationToBeUpdatedCounter(counter); } public long getWaitForProvisionCorrelationToBeUpdatedThreadSleepingTime() { return getConfiguration().getWaitForProvisionCorrelationToBeUpdatedThreadSleepingTime(); } /** * Interval in millis to sleep each time while waiting for provisional correlation id to be updated. */ public void setWaitForProvisionCorrelationToBeUpdatedThreadSleepingTime(long sleepingTime) { getConfiguration().setWaitForProvisionCorrelationToBeUpdatedThreadSleepingTime(sleepingTime); } // Implementation methods // ------------------------------------------------------------------------- @Override protected void doStart() throws Exception { if (headerFilterStrategy == null) { headerFilterStrategy = new JmsHeaderFilterStrategy(getConfiguration().isIncludeAllJMSXProperties()); } } @Override protected void doShutdown() throws Exception { if (asyncStartStopExecutorService != null) { getCamelContext().getExecutorServiceManager().shutdownNow(asyncStartStopExecutorService); asyncStartStopExecutorService = null; } super.doShutdown(); } protected synchronized ExecutorService getAsyncStartStopExecutorService() { if (asyncStartStopExecutorService == null) { // use a cached thread pool for async start tasks as they can run for a while, and we need a dedicated thread // for each task, and the thread pool will shrink when no more tasks running asyncStartStopExecutorService = getCamelContext().getExecutorServiceManager().newCachedThreadPool(this, "AsyncStartStopListener"); } return asyncStartStopExecutorService; } @Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { boolean pubSubDomain = false; boolean tempDestination = false; if (remaining.startsWith(JmsConfiguration.QUEUE_PREFIX)) { pubSubDomain = false; remaining = removeStartingCharacters(remaining.substring(JmsConfiguration.QUEUE_PREFIX.length()), '/'); } else if (remaining.startsWith(JmsConfiguration.TOPIC_PREFIX)) { pubSubDomain = true; remaining = removeStartingCharacters(remaining.substring(JmsConfiguration.TOPIC_PREFIX.length()), '/'); } else if (remaining.startsWith(JmsConfiguration.TEMP_QUEUE_PREFIX)) { pubSubDomain = false; tempDestination = true; remaining = removeStartingCharacters(remaining.substring(JmsConfiguration.TEMP_QUEUE_PREFIX.length()), '/'); } else if (remaining.startsWith(JmsConfiguration.TEMP_TOPIC_PREFIX)) { pubSubDomain = true; tempDestination = true; remaining = removeStartingCharacters(remaining.substring(JmsConfiguration.TEMP_TOPIC_PREFIX.length()), '/'); } final String subject = convertPathToActualDestination(remaining, parameters); // lets make sure we copy the configuration as each endpoint can // customize its own version JmsConfiguration newConfiguration = getConfiguration().copy(); JmsEndpoint endpoint; if (pubSubDomain) { if (tempDestination) { endpoint = createTemporaryTopicEndpoint(uri, this, subject, newConfiguration); } else { endpoint = createTopicEndpoint(uri, this, subject, newConfiguration); } } else { QueueBrowseStrategy strategy = getQueueBrowseStrategy(); if (tempDestination) { endpoint = createTemporaryQueueEndpoint(uri, this, subject, newConfiguration, strategy); } else { endpoint = createQueueEndpoint(uri, this, subject, newConfiguration, strategy); } } // resolve any custom connection factory first ConnectionFactory cf = resolveAndRemoveReferenceParameter(parameters, "connectionFactory", ConnectionFactory.class); if (cf != null) { endpoint.getConfiguration().setConnectionFactory(cf); } // if username or password provided then wrap the connection factory String cfUsername = getAndRemoveParameter(parameters, "username", String.class, getConfiguration().getUsername()); String cfPassword = getAndRemoveParameter(parameters, "password", String.class, getConfiguration().getPassword()); if (cfUsername != null && cfPassword != null) { cf = endpoint.getConfiguration().getConnectionFactory(); ObjectHelper.notNull(cf, "ConnectionFactory"); LOG.debug("Wrapping existing ConnectionFactory with UserCredentialsConnectionFactoryAdapter using username: {} and password: ******", cfUsername); UserCredentialsConnectionFactoryAdapter ucfa = new UserCredentialsConnectionFactoryAdapter(); ucfa.setTargetConnectionFactory(cf); ucfa.setPassword(cfPassword); ucfa.setUsername(cfUsername); endpoint.getConfiguration().setConnectionFactory(ucfa); } else { // if only username or password was provided then fail if (cfUsername != null || cfPassword != null) { if (cfUsername == null) { throw new IllegalArgumentException("Password must also be provided when using username/password as credentials."); } else { throw new IllegalArgumentException("Username must also be provided when using username/password as credentials."); } } } // jms header strategy String strategyVal = getAndRemoveParameter(parameters, KEY_FORMAT_STRATEGY_PARAM, String.class); JmsKeyFormatStrategy strategy = resolveStandardJmsKeyFormatStrategy(strategyVal); if (strategy != null) { endpoint.setJmsKeyFormatStrategy(strategy); } else { // its not a standard, but a reference parameters.put(KEY_FORMAT_STRATEGY_PARAM, strategyVal); endpoint.setJmsKeyFormatStrategy(resolveAndRemoveReferenceParameter( parameters, KEY_FORMAT_STRATEGY_PARAM, JmsKeyFormatStrategy.class)); } MessageListenerContainerFactory messageListenerContainerFactory = resolveAndRemoveReferenceParameter(parameters, "messageListenerContainerFactoryRef", MessageListenerContainerFactory.class); if (messageListenerContainerFactory == null) { messageListenerContainerFactory = resolveAndRemoveReferenceParameter(parameters, "messageListenerContainerFactory", MessageListenerContainerFactory.class); } if (messageListenerContainerFactory != null) { endpoint.setMessageListenerContainerFactory(messageListenerContainerFactory); } setProperties(endpoint.getConfiguration(), parameters); endpoint.setHeaderFilterStrategy(getHeaderFilterStrategy()); return endpoint; } protected JmsEndpoint createTemporaryTopicEndpoint(String uri, JmsComponent component, String subject, JmsConfiguration configuration) { return new JmsTemporaryTopicEndpoint(uri, component, subject, configuration); } protected JmsEndpoint createTopicEndpoint(String uri, JmsComponent component, String subject, JmsConfiguration configuration) { return new JmsEndpoint(uri, component, subject, true, configuration); } protected JmsEndpoint createTemporaryQueueEndpoint(String uri, JmsComponent component, String subject, JmsConfiguration configuration, QueueBrowseStrategy queueBrowseStrategy) { return new JmsTemporaryQueueEndpoint(uri, component, subject, configuration, queueBrowseStrategy); } protected JmsEndpoint createQueueEndpoint(String uri, JmsComponent component, String subject, JmsConfiguration configuration, QueueBrowseStrategy queueBrowseStrategy) { return new JmsQueueEndpoint(uri, component, subject, configuration, queueBrowseStrategy); } /** * Resolves the standard supported {@link JmsKeyFormatStrategy} by a name which can be: * <ul> * <li>default - to use the default strategy</li> * <li>passthrough - to use the passthrough strategy</li> * </ul> * * @param name the name * @return the strategy, or <tt>null</tt> if not a standard name. */ private static JmsKeyFormatStrategy resolveStandardJmsKeyFormatStrategy(String name) { if ("default".equalsIgnoreCase(name)) { return new DefaultJmsKeyFormatStrategy(); } else if ("passthrough".equalsIgnoreCase(name)) { return new PassThroughJmsKeyFormatStrategy(); } else { return null; } } /** * A strategy method allowing the URI destination to be translated into the * actual JMS destination name (say by looking up in JNDI or something) */ protected String convertPathToActualDestination(String path, Map<String, Object> parameters) { return path; } /** * Factory method to create the default configuration instance * * @return a newly created configuration object which can then be further * customized */ protected JmsConfiguration createConfiguration() { return new JmsConfiguration(); } }
/* * Copyright (C) 2012 Brian Muramatsu * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.btmura.android.reddit.database; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; public class DbHelper extends SQLiteOpenHelper { public static final String DATABASE_REDDIT = "reddit"; public static final String DATABASE_TEST = "test"; public static final int LATEST_VERSION = 4; /** * Singleton instances accessible via {@link #getInstance(Context)}. */ private static DbHelper INSTANCE; /** * Return singleton instance of {@link DbHelper} that all users should use to * avoid database locked errors. Make sure to do database writes in serial * though. */ public static DbHelper getInstance(Context context) { synchronized (DbHelper.class) { if (INSTANCE == null) { INSTANCE = new DbHelper(context.getApplicationContext(), DATABASE_REDDIT, LATEST_VERSION); } return INSTANCE; } } /** * Version kept to control what tables are created mostly for testing. */ private final int version; /** * Test constructor. Use {@link #getInstance(Context)}. */ public DbHelper(Context context, String name, int version) { super(context, name, null, version); this.version = version; } @Override public void onOpen(SQLiteDatabase db) { if (!db.isReadOnly() && version == 2) { Sessions.createTempTableV2(db); Things.createTempTableV2(db); Messages.createTempTableV2(db); SubredditResults.createTempTableV2(db); } } @Override public void onCreate(SQLiteDatabase db) { switch (version) { case 4: createDatabaseV4(db); break; case 3: createDatabaseV3(db); break; case 2: createDatabaseV2(db); break; case 1: createDatabaseV1(db); break; } } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { // Upgrades are applied incrementally to get up to the latest version. if (needsUpgrade(oldVersion, newVersion, 2)) { upgradeToDatabaseV2(db); } if (needsUpgrade(oldVersion, newVersion, 3)) { upgradeToDatabaseV3(db); } if (needsUpgrade(oldVersion, newVersion, 4)) { upgradeToDatabaseV4(db); } } private static boolean needsUpgrade( int oldVersion, int newVersion, int upgrade) { return oldVersion < upgrade && newVersion >= upgrade; } private static void createDatabaseV4(SQLiteDatabase db) { AccountActions.create(db); Comments.create(db); HideActions.createV2(db); Messages.create(db); Sessions.create(db); SubredditResults.create(db); Things.create(db); Accounts.create(db); CommentActions.createV2(db); MessageActions.createV2(db); ReadActions.createV2(db); SaveActions.createV3(db); VoteActions.createV3(db); Subreddits.createV2(db); Subreddits.insertDefaults(db); } private static void upgradeToDatabaseV4(SQLiteDatabase db) { AccountActions.create(db); CommentActions.upgradeToV2(db); HideActions.upgradeToV2(db); MessageActions.upgradeToV2(db); ReadActions.upgradeToV2(db); SaveActions.upgradeToV3(db); VoteActions.upgradeToV3(db); } /** * Creates the tables for database version 3. It converts the temporary tables * of V2 into permanent tables. It also adds new tables for comments and * hiding things. */ private static void createDatabaseV3(SQLiteDatabase db) { Comments.create(db); HideActions.create(db); Messages.create(db); Sessions.create(db); SubredditResults.create(db); Things.create(db); Accounts.create(db); CommentActions.create(db); MessageActions.create(db); ReadActions.create(db); SaveActions.createV2(db); VoteActions.createV2(db); Subreddits.createV2(db); Subreddits.insertDefaults(db); } /** * Upgrade database to version 3 from version 2. */ private static void upgradeToDatabaseV3(SQLiteDatabase db) { Comments.create(db); HideActions.create(db); Messages.create(db); Sessions.create(db); SubredditResults.create(db); Things.create(db); SaveActions.upgradeToV2(db); VoteActions.upgradeToV2(db); } /** * Creates the tables for database version 2. It creates a bunch of new tables * to support accounts and sync adapters. It also uses temporary tables to * store data created in {@link #onOpen(SQLiteDatabase)}. */ private static void createDatabaseV2(SQLiteDatabase db) { Accounts.create(db); CommentActions.create(db); MessageActions.create(db); ReadActions.create(db); SaveActions.create(db); VoteActions.create(db); Subreddits.createV2(db); Subreddits.insertDefaults(db); } /** * Upgrade database to version 2 from version 1. */ private static void upgradeToDatabaseV2(SQLiteDatabase db) { Accounts.create(db); CommentActions.create(db); MessageActions.create(db); ReadActions.create(db); SaveActions.create(db); VoteActions.create(db); Subreddits.upgradeToV2(db); } /** * Creates the tables for database version 1. It supports storing local * subreddits. */ private static void createDatabaseV1(SQLiteDatabase db) { Subreddits.create(db); Subreddits.insertDefaults(db); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.qpid.protonj2.test.driver.codec.transport; import java.util.List; import org.apache.qpid.protonj2.test.driver.codec.primitives.Binary; import org.apache.qpid.protonj2.test.driver.codec.primitives.DescribedType; import org.apache.qpid.protonj2.test.driver.codec.primitives.Symbol; import org.apache.qpid.protonj2.test.driver.codec.primitives.UnsignedByte; import org.apache.qpid.protonj2.test.driver.codec.primitives.UnsignedInteger; import org.apache.qpid.protonj2.test.driver.codec.primitives.UnsignedLong; import io.netty.buffer.ByteBuf; public class Transfer extends PerformativeDescribedType { public static final Symbol DESCRIPTOR_SYMBOL = Symbol.valueOf("amqp:transfer:list"); public static final UnsignedLong DESCRIPTOR_CODE = UnsignedLong.valueOf(0x0000000000000014L); /** * Enumeration which maps to fields in the Transfer Performative */ public enum Field { HANDLE, DELIVERY_ID, DELIVERY_TAG, MESSAGE_FORMAT, SETTLED, MORE, RCV_SETTLE_MODE, STATE, RESUME, ABORTED, BATCHABLE } public Transfer() { super(Field.values().length); } @SuppressWarnings("unchecked") public Transfer(Object described) { super(Field.values().length, (List<Object>) described); } public Transfer(List<Object> described) { super(Field.values().length, described); } @Override public Symbol getDescriptor() { return DESCRIPTOR_SYMBOL; } public Transfer setHandle(UnsignedInteger o) { getList().set(Field.HANDLE.ordinal(), o); return this; } public UnsignedInteger getHandle() { return (UnsignedInteger) getList().get(Field.HANDLE.ordinal()); } public Transfer setDeliveryId(UnsignedInteger o) { getList().set(Field.DELIVERY_ID.ordinal(), o); return this; } public UnsignedInteger getDeliveryId() { return (UnsignedInteger) getList().get(Field.DELIVERY_ID.ordinal()); } public Transfer setDeliveryTag(Binary o) { getList().set(Field.DELIVERY_TAG.ordinal(), o); return this; } public Binary getDeliveryTag() { return (Binary) getList().get(Field.DELIVERY_TAG.ordinal()); } public Transfer setMessageFormat(UnsignedInteger o) { getList().set(Field.MESSAGE_FORMAT.ordinal(), o); return this; } public UnsignedInteger getMessageFormat() { return (UnsignedInteger) getList().get(Field.MESSAGE_FORMAT.ordinal()); } public Transfer setSettled(Boolean o) { getList().set(Field.SETTLED.ordinal(), o); return this; } public Boolean getSettled() { return (Boolean) getList().get(Field.SETTLED.ordinal()); } public Transfer setMore(Boolean o) { getList().set(Field.MORE.ordinal(), o); return this; } public Boolean getMore() { return (Boolean) getList().get(Field.MORE.ordinal()); } public Transfer setRcvSettleMode(UnsignedByte o) { getList().set(Field.RCV_SETTLE_MODE.ordinal(), o); return this; } public UnsignedByte getRcvSettleMode() { return (UnsignedByte) getList().get(Field.RCV_SETTLE_MODE.ordinal()); } public Transfer setState(DescribedType o) { getList().set(Field.STATE.ordinal(), o); return this; } public DescribedType getState() { return (DescribedType) getList().get(Field.STATE.ordinal()); } public Transfer setResume(Boolean o) { getList().set(Field.RESUME.ordinal(), o); return this; } public Boolean getResume() { return (Boolean) getList().get(Field.RESUME.ordinal()); } public Transfer setAborted(Boolean o) { getList().set(Field.ABORTED.ordinal(), o); return this; } public Boolean getAborted() { return (Boolean) getList().get(Field.ABORTED.ordinal()); } public Transfer setBatchable(Boolean o) { getList().set(Field.BATCHABLE.ordinal(), o); return this; } public Boolean getBatchable() { return (Boolean) getList().get(Field.BATCHABLE.ordinal()); } @Override public PerformativeType getPerformativeType() { return PerformativeType.TRANSFER; } @Override public <E> void invoke(PerformativeHandler<E> handler, ByteBuf payload, int channel, E context) { handler.handleTransfer(this, payload, channel, context); } @Override public Object getFieldValueOrSpecDefault(int index) { Object result = getFieldValue(index); if (result == null) { Field field = Field.values()[index]; switch (field) { case MORE: result = Boolean.FALSE; break; case RESUME: result = Boolean.FALSE; break; case ABORTED: result = Boolean.FALSE; break; case BATCHABLE: result = Boolean.FALSE; break; default: break; } } return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.apache.drill.exec.util.concurrent.ExecutorServiceUtil; import org.apache.drill.test.DrillTest; import org.junit.Test; /** Tests for validating the Drill executor service utility class */ public final class ExecutorServiceUtilTest extends DrillTest { @Test public void testSuccessfulExecution() { final int numThreads = 2; final int numTasks = 20; ExecutorService service = Executors.newFixedThreadPool(numThreads); List<RequestContainer> requests = new ArrayList<>(numTasks); // Set the test parameters (using the default values) TestParams params = new TestParams(); // Launch the tasks for (int idx = 0; idx < numTasks; idx++) { CallableTask task = new CallableTask(params); Future<TaskResult> future = ExecutorServiceUtil.submit(service, task); requests.add(new RequestContainer(future, task)); } int numSuccess = 0; // Wait for the tasks to finish for (int idx = 0; idx < numTasks; idx++) { RequestContainer request = requests.get(idx); try { TaskResult result = request.future.get(); assertNotNull(result); if (result.isSuccess()) { ++numSuccess; } } catch (Exception e) { // NOOP } } assertEquals(numTasks, numSuccess); } @Test public void testFailedExecution() { final int numThreads = 2; final int numTasks = 20; ExecutorService service = Executors.newFixedThreadPool(numThreads); List<RequestContainer> requests = new ArrayList<>(numTasks); // Set the test parameters TestParams params = new TestParams(); params.generateException = true; // Launch the tasks for (int idx = 0; idx < numTasks; idx++) { CallableTask task = new CallableTask(params); Future<TaskResult> future = ExecutorServiceUtil.submit(service, task); requests.add(new RequestContainer(future, task)); } int numSuccess = 0; int numFailures = 0; // Wait for the tasks to finish for (int idx = 0; idx < numTasks; idx++) { RequestContainer request = requests.get(idx); try { TaskResult result = request.future.get(); assertNotNull(result); if (result.isSuccess()) { ++numSuccess; } } catch (Exception e) { assertTrue(request.task.result.isFailed()); ++numFailures; } } assertEquals(0, numSuccess); assertEquals(numTasks, numFailures); } @Test public void testMixedExecution() { final int numThreads = 2; final int numTasks = 20; ExecutorService service = Executors.newFixedThreadPool(numThreads); List<RequestContainer> requests = new ArrayList<>(numTasks); // Set the test parameters TestParams successParams = new TestParams(); TestParams failedParams = new TestParams(); failedParams.generateException = true; int expNumFailedTasks = 0; int expNumSuccessTasks = 0; // Launch the tasks for (int idx = 0; idx < numTasks; idx++) { CallableTask task; if (idx % 2 == 0) { task = new CallableTask(successParams); ++expNumSuccessTasks; } else { task = new CallableTask(failedParams); ++expNumFailedTasks; } Future<TaskResult> future = ExecutorServiceUtil.submit(service, task); requests.add(new RequestContainer(future, task)); } int numSuccess = 0; int numFailures = 0; // Wait for the tasks to finish for (int idx = 0; idx < numTasks; idx++) { RequestContainer request = requests.get(idx); try { TaskResult result = request.future.get(); assertNotNull(result); if (result.isSuccess()) { ++numSuccess; } } catch (Exception e) { assertTrue(request.task.result.isFailed()); ++numFailures; } } assertEquals(expNumSuccessTasks, numSuccess); assertEquals(expNumFailedTasks, numFailures); } @Test public void testCancelExecution() { final int numThreads = 2; ExecutorService service = Executors.newFixedThreadPool(numThreads); RequestContainer request; // Set the test parameters TestParams params = new TestParams(); params.controller = new TaskExecutionController(); // Launch the task CallableTask task = new CallableTask(params); Future<TaskResult> future = ExecutorServiceUtil.submit(service, task); request = new RequestContainer(future, task); // Allow the task to start params.controller.start(); params.controller.hasStarted(); // Allow the task to exit but with a delay so that we can test the blocking nature of "cancel" params.controller.delayMillisOnExit = 50; params.controller.exit(); // Cancel the task boolean result = request.future.cancel(true); if (result) { // We were able to cancel this task; let's make sure that it is done now that the current thread is // unblocked assertTrue(task.result.isCancelled()); } else { // Cancellation could't happen most probably because this thread got context switched for // for a long time (should be rare); let's make sure the task is done and successful assertTrue(task.result.isSuccess()); } } // ---------------------------------------------------------------------------- // Internal Classes // ---------------------------------------------------------------------------- @SuppressWarnings("unused") private static final class TaskResult { private enum ExecutionStatus { NOT_STARTED, RUNNING, SUCCEEDED, FAILED, CANCELLED } private ExecutionStatus status; TaskResult() { status = ExecutionStatus.NOT_STARTED; } private boolean isSuccess() { return status.equals(ExecutionStatus.SUCCEEDED); } private boolean isFailed() { return status.equals(ExecutionStatus.FAILED); } private boolean isCancelled() { return status.equals(ExecutionStatus.CANCELLED); } private boolean isFailedOrCancelled() { return status.equals(ExecutionStatus.CANCELLED) || status.equals(ExecutionStatus.FAILED); } } @SuppressWarnings("unused") private static final class TaskExecutionController { private volatile boolean canStart = false; private volatile boolean canExit = false; private volatile boolean started = false; private volatile boolean exited = false; private volatile int delayMillisOnExit = 0; private final Object monitor = new Object(); private void canStart() { synchronized(monitor) { while (!canStart) { try { monitor.wait(); } catch (InterruptedException ie) { // NOOP } } started = true; monitor.notify(); } } private void canExit() { synchronized(monitor) { while (!canExit) { try { monitor.wait(); } catch (InterruptedException ie) { // NOOP } } } // Wait requested delay time before exiting for (int i = 0; i < delayMillisOnExit; i++) { try { Thread.sleep(1); // sleep 1 ms } catch (InterruptedException ie) { // NOOP } } synchronized(monitor) { exited = true; monitor.notify(); } } private void start() { synchronized(monitor) { canStart = true; monitor.notify(); } } private void exit() { synchronized(monitor) { canExit = true; monitor.notify(); } } private void hasStarted() { synchronized(monitor) { while (!started) { try { monitor.wait(); } catch (InterruptedException ie) { // NOOP } } } } private void hasExited() { synchronized(monitor) { while (!exited) { try { monitor.wait(); } catch (InterruptedException ie) { // NOOP } } } } } private static final class TestParams { private final int waitTimeMillis = 2; private boolean generateException = false; private TaskExecutionController controller = null; } private static final class CallableTask implements Callable<TaskResult> { private final TaskResult result = new TaskResult(); private final TestParams params; private CallableTask(TestParams params) { this.params = params; } @Override public TaskResult call() throws Exception { beforeStart(); result.status = TaskResult.ExecutionStatus.RUNNING; boolean interrupted = false; Exception exc = null; try { for (int i = 0; i < params.waitTimeMillis; i++) { try { Thread.sleep(1); // sleep 1 ms } catch (InterruptedException ie) { interrupted = true; } } if (params.generateException) { throw new RuntimeException("Test emulated exception.."); } } catch (Exception e) { exc = e; throw e; } finally { beforeExit(); if (interrupted) { result.status = TaskResult.ExecutionStatus.CANCELLED; } else if (exc != null) { result.status = TaskResult.ExecutionStatus.FAILED; } else { result.status = TaskResult.ExecutionStatus.SUCCEEDED; } } return result; } private void beforeStart() { if (params.controller != null) { params.controller.canStart(); } } private void beforeExit() { if (params.controller != null) { params.controller.canExit(); } } } private static final class RequestContainer { private final Future<TaskResult> future; private final CallableTask task; private RequestContainer(Future<TaskResult> future, CallableTask task) { this.future = future; this.task = task; } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.networksecurity.v1beta1; import static com.google.cloud.networksecurity.v1beta1.NetworkSecurityClient.ListAuthorizationPoliciesPagedResponse; import static com.google.cloud.networksecurity.v1beta1.NetworkSecurityClient.ListClientTlsPoliciesPagedResponse; import static com.google.cloud.networksecurity.v1beta1.NetworkSecurityClient.ListServerTlsPoliciesPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class NetworkSecurityClientTest { private static MockNetworkSecurity mockNetworkSecurity; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private NetworkSecurityClient client; @BeforeClass public static void startStaticServer() { mockNetworkSecurity = new MockNetworkSecurity(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockNetworkSecurity)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); NetworkSecuritySettings settings = NetworkSecuritySettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = NetworkSecurityClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void listAuthorizationPoliciesTest() throws Exception { AuthorizationPolicy responsesElement = AuthorizationPolicy.newBuilder().build(); ListAuthorizationPoliciesResponse expectedResponse = ListAuthorizationPoliciesResponse.newBuilder() .setNextPageToken("") .addAllAuthorizationPolicies(Arrays.asList(responsesElement)) .build(); mockNetworkSecurity.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListAuthorizationPoliciesPagedResponse pagedListResponse = client.listAuthorizationPolicies(parent); List<AuthorizationPolicy> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getAuthorizationPoliciesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListAuthorizationPoliciesRequest actualRequest = ((ListAuthorizationPoliciesRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listAuthorizationPoliciesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listAuthorizationPolicies(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listAuthorizationPoliciesTest2() throws Exception { AuthorizationPolicy responsesElement = AuthorizationPolicy.newBuilder().build(); ListAuthorizationPoliciesResponse expectedResponse = ListAuthorizationPoliciesResponse.newBuilder() .setNextPageToken("") .addAllAuthorizationPolicies(Arrays.asList(responsesElement)) .build(); mockNetworkSecurity.addResponse(expectedResponse); String parent = "parent-995424086"; ListAuthorizationPoliciesPagedResponse pagedListResponse = client.listAuthorizationPolicies(parent); List<AuthorizationPolicy> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getAuthorizationPoliciesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListAuthorizationPoliciesRequest actualRequest = ((ListAuthorizationPoliciesRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listAuthorizationPoliciesExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String parent = "parent-995424086"; client.listAuthorizationPolicies(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getAuthorizationPolicyTest() throws Exception { AuthorizationPolicy expectedResponse = AuthorizationPolicy.newBuilder() .setName( AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]") .toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .addAllRules(new ArrayList<AuthorizationPolicy.Rule>()) .build(); mockNetworkSecurity.addResponse(expectedResponse); AuthorizationPolicyName name = AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]"); AuthorizationPolicy actualResponse = client.getAuthorizationPolicy(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetAuthorizationPolicyRequest actualRequest = ((GetAuthorizationPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getAuthorizationPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { AuthorizationPolicyName name = AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]"); client.getAuthorizationPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getAuthorizationPolicyTest2() throws Exception { AuthorizationPolicy expectedResponse = AuthorizationPolicy.newBuilder() .setName( AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]") .toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .addAllRules(new ArrayList<AuthorizationPolicy.Rule>()) .build(); mockNetworkSecurity.addResponse(expectedResponse); String name = "name3373707"; AuthorizationPolicy actualResponse = client.getAuthorizationPolicy(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetAuthorizationPolicyRequest actualRequest = ((GetAuthorizationPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getAuthorizationPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String name = "name3373707"; client.getAuthorizationPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createAuthorizationPolicyTest() throws Exception { AuthorizationPolicy expectedResponse = AuthorizationPolicy.newBuilder() .setName( AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]") .toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .addAllRules(new ArrayList<AuthorizationPolicy.Rule>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createAuthorizationPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); AuthorizationPolicyName parent = AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]"); AuthorizationPolicy authorizationPolicy = AuthorizationPolicy.newBuilder().build(); String authorizationPolicyId = "authorizationPolicyId1314252166"; AuthorizationPolicy actualResponse = client .createAuthorizationPolicyAsync(parent, authorizationPolicy, authorizationPolicyId) .get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateAuthorizationPolicyRequest actualRequest = ((CreateAuthorizationPolicyRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(authorizationPolicy, actualRequest.getAuthorizationPolicy()); Assert.assertEquals(authorizationPolicyId, actualRequest.getAuthorizationPolicyId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createAuthorizationPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { AuthorizationPolicyName parent = AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]"); AuthorizationPolicy authorizationPolicy = AuthorizationPolicy.newBuilder().build(); String authorizationPolicyId = "authorizationPolicyId1314252166"; client .createAuthorizationPolicyAsync(parent, authorizationPolicy, authorizationPolicyId) .get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void createAuthorizationPolicyTest2() throws Exception { AuthorizationPolicy expectedResponse = AuthorizationPolicy.newBuilder() .setName( AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]") .toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .addAllRules(new ArrayList<AuthorizationPolicy.Rule>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createAuthorizationPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); String parent = "parent-995424086"; AuthorizationPolicy authorizationPolicy = AuthorizationPolicy.newBuilder().build(); String authorizationPolicyId = "authorizationPolicyId1314252166"; AuthorizationPolicy actualResponse = client .createAuthorizationPolicyAsync(parent, authorizationPolicy, authorizationPolicyId) .get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateAuthorizationPolicyRequest actualRequest = ((CreateAuthorizationPolicyRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(authorizationPolicy, actualRequest.getAuthorizationPolicy()); Assert.assertEquals(authorizationPolicyId, actualRequest.getAuthorizationPolicyId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createAuthorizationPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String parent = "parent-995424086"; AuthorizationPolicy authorizationPolicy = AuthorizationPolicy.newBuilder().build(); String authorizationPolicyId = "authorizationPolicyId1314252166"; client .createAuthorizationPolicyAsync(parent, authorizationPolicy, authorizationPolicyId) .get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void updateAuthorizationPolicyTest() throws Exception { AuthorizationPolicy expectedResponse = AuthorizationPolicy.newBuilder() .setName( AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]") .toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .addAllRules(new ArrayList<AuthorizationPolicy.Rule>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("updateAuthorizationPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); AuthorizationPolicy authorizationPolicy = AuthorizationPolicy.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); AuthorizationPolicy actualResponse = client.updateAuthorizationPolicyAsync(authorizationPolicy, updateMask).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateAuthorizationPolicyRequest actualRequest = ((UpdateAuthorizationPolicyRequest) actualRequests.get(0)); Assert.assertEquals(authorizationPolicy, actualRequest.getAuthorizationPolicy()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateAuthorizationPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { AuthorizationPolicy authorizationPolicy = AuthorizationPolicy.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateAuthorizationPolicyAsync(authorizationPolicy, updateMask).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteAuthorizationPolicyTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteAuthorizationPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); AuthorizationPolicyName name = AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]"); client.deleteAuthorizationPolicyAsync(name).get(); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteAuthorizationPolicyRequest actualRequest = ((DeleteAuthorizationPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteAuthorizationPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { AuthorizationPolicyName name = AuthorizationPolicyName.of("[PROJECT]", "[LOCATION]", "[AUTHORIZATION_POLICY]"); client.deleteAuthorizationPolicyAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteAuthorizationPolicyTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteAuthorizationPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); String name = "name3373707"; client.deleteAuthorizationPolicyAsync(name).get(); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteAuthorizationPolicyRequest actualRequest = ((DeleteAuthorizationPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteAuthorizationPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String name = "name3373707"; client.deleteAuthorizationPolicyAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void listServerTlsPoliciesTest() throws Exception { ServerTlsPolicy responsesElement = ServerTlsPolicy.newBuilder().build(); ListServerTlsPoliciesResponse expectedResponse = ListServerTlsPoliciesResponse.newBuilder() .setNextPageToken("") .addAllServerTlsPolicies(Arrays.asList(responsesElement)) .build(); mockNetworkSecurity.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListServerTlsPoliciesPagedResponse pagedListResponse = client.listServerTlsPolicies(parent); List<ServerTlsPolicy> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getServerTlsPoliciesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListServerTlsPoliciesRequest actualRequest = ((ListServerTlsPoliciesRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listServerTlsPoliciesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listServerTlsPolicies(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listServerTlsPoliciesTest2() throws Exception { ServerTlsPolicy responsesElement = ServerTlsPolicy.newBuilder().build(); ListServerTlsPoliciesResponse expectedResponse = ListServerTlsPoliciesResponse.newBuilder() .setNextPageToken("") .addAllServerTlsPolicies(Arrays.asList(responsesElement)) .build(); mockNetworkSecurity.addResponse(expectedResponse); String parent = "parent-995424086"; ListServerTlsPoliciesPagedResponse pagedListResponse = client.listServerTlsPolicies(parent); List<ServerTlsPolicy> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getServerTlsPoliciesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListServerTlsPoliciesRequest actualRequest = ((ListServerTlsPoliciesRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listServerTlsPoliciesExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String parent = "parent-995424086"; client.listServerTlsPolicies(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getServerTlsPolicyTest() throws Exception { ServerTlsPolicy expectedResponse = ServerTlsPolicy.newBuilder() .setName( ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setAllowOpen(true) .setServerCertificate(CertificateProvider.newBuilder().build()) .setMtlsPolicy(ServerTlsPolicy.MTLSPolicy.newBuilder().build()) .build(); mockNetworkSecurity.addResponse(expectedResponse); ServerTlsPolicyName name = ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]"); ServerTlsPolicy actualResponse = client.getServerTlsPolicy(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetServerTlsPolicyRequest actualRequest = ((GetServerTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getServerTlsPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { ServerTlsPolicyName name = ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]"); client.getServerTlsPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getServerTlsPolicyTest2() throws Exception { ServerTlsPolicy expectedResponse = ServerTlsPolicy.newBuilder() .setName( ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setAllowOpen(true) .setServerCertificate(CertificateProvider.newBuilder().build()) .setMtlsPolicy(ServerTlsPolicy.MTLSPolicy.newBuilder().build()) .build(); mockNetworkSecurity.addResponse(expectedResponse); String name = "name3373707"; ServerTlsPolicy actualResponse = client.getServerTlsPolicy(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetServerTlsPolicyRequest actualRequest = ((GetServerTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getServerTlsPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String name = "name3373707"; client.getServerTlsPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createServerTlsPolicyTest() throws Exception { ServerTlsPolicy expectedResponse = ServerTlsPolicy.newBuilder() .setName( ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setAllowOpen(true) .setServerCertificate(CertificateProvider.newBuilder().build()) .setMtlsPolicy(ServerTlsPolicy.MTLSPolicy.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createServerTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); ServerTlsPolicyName parent = ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]"); ServerTlsPolicy serverTlsPolicy = ServerTlsPolicy.newBuilder().build(); String serverTlsPolicyId = "serverTlsPolicyId-1966046011"; ServerTlsPolicy actualResponse = client.createServerTlsPolicyAsync(parent, serverTlsPolicy, serverTlsPolicyId).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateServerTlsPolicyRequest actualRequest = ((CreateServerTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(serverTlsPolicy, actualRequest.getServerTlsPolicy()); Assert.assertEquals(serverTlsPolicyId, actualRequest.getServerTlsPolicyId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createServerTlsPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { ServerTlsPolicyName parent = ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]"); ServerTlsPolicy serverTlsPolicy = ServerTlsPolicy.newBuilder().build(); String serverTlsPolicyId = "serverTlsPolicyId-1966046011"; client.createServerTlsPolicyAsync(parent, serverTlsPolicy, serverTlsPolicyId).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void createServerTlsPolicyTest2() throws Exception { ServerTlsPolicy expectedResponse = ServerTlsPolicy.newBuilder() .setName( ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setAllowOpen(true) .setServerCertificate(CertificateProvider.newBuilder().build()) .setMtlsPolicy(ServerTlsPolicy.MTLSPolicy.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createServerTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); String parent = "parent-995424086"; ServerTlsPolicy serverTlsPolicy = ServerTlsPolicy.newBuilder().build(); String serverTlsPolicyId = "serverTlsPolicyId-1966046011"; ServerTlsPolicy actualResponse = client.createServerTlsPolicyAsync(parent, serverTlsPolicy, serverTlsPolicyId).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateServerTlsPolicyRequest actualRequest = ((CreateServerTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(serverTlsPolicy, actualRequest.getServerTlsPolicy()); Assert.assertEquals(serverTlsPolicyId, actualRequest.getServerTlsPolicyId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createServerTlsPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String parent = "parent-995424086"; ServerTlsPolicy serverTlsPolicy = ServerTlsPolicy.newBuilder().build(); String serverTlsPolicyId = "serverTlsPolicyId-1966046011"; client.createServerTlsPolicyAsync(parent, serverTlsPolicy, serverTlsPolicyId).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void updateServerTlsPolicyTest() throws Exception { ServerTlsPolicy expectedResponse = ServerTlsPolicy.newBuilder() .setName( ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setAllowOpen(true) .setServerCertificate(CertificateProvider.newBuilder().build()) .setMtlsPolicy(ServerTlsPolicy.MTLSPolicy.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() .setName("updateServerTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); ServerTlsPolicy serverTlsPolicy = ServerTlsPolicy.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); ServerTlsPolicy actualResponse = client.updateServerTlsPolicyAsync(serverTlsPolicy, updateMask).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateServerTlsPolicyRequest actualRequest = ((UpdateServerTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(serverTlsPolicy, actualRequest.getServerTlsPolicy()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateServerTlsPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { ServerTlsPolicy serverTlsPolicy = ServerTlsPolicy.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateServerTlsPolicyAsync(serverTlsPolicy, updateMask).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteServerTlsPolicyTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteServerTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); ServerTlsPolicyName name = ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]"); client.deleteServerTlsPolicyAsync(name).get(); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteServerTlsPolicyRequest actualRequest = ((DeleteServerTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteServerTlsPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { ServerTlsPolicyName name = ServerTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[SERVER_TLS_POLICY]"); client.deleteServerTlsPolicyAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteServerTlsPolicyTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteServerTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); String name = "name3373707"; client.deleteServerTlsPolicyAsync(name).get(); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteServerTlsPolicyRequest actualRequest = ((DeleteServerTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteServerTlsPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String name = "name3373707"; client.deleteServerTlsPolicyAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void listClientTlsPoliciesTest() throws Exception { ClientTlsPolicy responsesElement = ClientTlsPolicy.newBuilder().build(); ListClientTlsPoliciesResponse expectedResponse = ListClientTlsPoliciesResponse.newBuilder() .setNextPageToken("") .addAllClientTlsPolicies(Arrays.asList(responsesElement)) .build(); mockNetworkSecurity.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListClientTlsPoliciesPagedResponse pagedListResponse = client.listClientTlsPolicies(parent); List<ClientTlsPolicy> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getClientTlsPoliciesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListClientTlsPoliciesRequest actualRequest = ((ListClientTlsPoliciesRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listClientTlsPoliciesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listClientTlsPolicies(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listClientTlsPoliciesTest2() throws Exception { ClientTlsPolicy responsesElement = ClientTlsPolicy.newBuilder().build(); ListClientTlsPoliciesResponse expectedResponse = ListClientTlsPoliciesResponse.newBuilder() .setNextPageToken("") .addAllClientTlsPolicies(Arrays.asList(responsesElement)) .build(); mockNetworkSecurity.addResponse(expectedResponse); String parent = "parent-995424086"; ListClientTlsPoliciesPagedResponse pagedListResponse = client.listClientTlsPolicies(parent); List<ClientTlsPolicy> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getClientTlsPoliciesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListClientTlsPoliciesRequest actualRequest = ((ListClientTlsPoliciesRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listClientTlsPoliciesExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String parent = "parent-995424086"; client.listClientTlsPolicies(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getClientTlsPolicyTest() throws Exception { ClientTlsPolicy expectedResponse = ClientTlsPolicy.newBuilder() .setName( ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setSni("sni114030") .setClientCertificate(CertificateProvider.newBuilder().build()) .addAllServerValidationCa(new ArrayList<ValidationCA>()) .build(); mockNetworkSecurity.addResponse(expectedResponse); ClientTlsPolicyName name = ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]"); ClientTlsPolicy actualResponse = client.getClientTlsPolicy(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetClientTlsPolicyRequest actualRequest = ((GetClientTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getClientTlsPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { ClientTlsPolicyName name = ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]"); client.getClientTlsPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getClientTlsPolicyTest2() throws Exception { ClientTlsPolicy expectedResponse = ClientTlsPolicy.newBuilder() .setName( ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setSni("sni114030") .setClientCertificate(CertificateProvider.newBuilder().build()) .addAllServerValidationCa(new ArrayList<ValidationCA>()) .build(); mockNetworkSecurity.addResponse(expectedResponse); String name = "name3373707"; ClientTlsPolicy actualResponse = client.getClientTlsPolicy(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetClientTlsPolicyRequest actualRequest = ((GetClientTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getClientTlsPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String name = "name3373707"; client.getClientTlsPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createClientTlsPolicyTest() throws Exception { ClientTlsPolicy expectedResponse = ClientTlsPolicy.newBuilder() .setName( ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setSni("sni114030") .setClientCertificate(CertificateProvider.newBuilder().build()) .addAllServerValidationCa(new ArrayList<ValidationCA>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createClientTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); ClientTlsPolicyName parent = ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]"); ClientTlsPolicy clientTlsPolicy = ClientTlsPolicy.newBuilder().build(); String clientTlsPolicyId = "clientTlsPolicyId-188933315"; ClientTlsPolicy actualResponse = client.createClientTlsPolicyAsync(parent, clientTlsPolicy, clientTlsPolicyId).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateClientTlsPolicyRequest actualRequest = ((CreateClientTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(clientTlsPolicy, actualRequest.getClientTlsPolicy()); Assert.assertEquals(clientTlsPolicyId, actualRequest.getClientTlsPolicyId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createClientTlsPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { ClientTlsPolicyName parent = ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]"); ClientTlsPolicy clientTlsPolicy = ClientTlsPolicy.newBuilder().build(); String clientTlsPolicyId = "clientTlsPolicyId-188933315"; client.createClientTlsPolicyAsync(parent, clientTlsPolicy, clientTlsPolicyId).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void createClientTlsPolicyTest2() throws Exception { ClientTlsPolicy expectedResponse = ClientTlsPolicy.newBuilder() .setName( ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setSni("sni114030") .setClientCertificate(CertificateProvider.newBuilder().build()) .addAllServerValidationCa(new ArrayList<ValidationCA>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createClientTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); String parent = "parent-995424086"; ClientTlsPolicy clientTlsPolicy = ClientTlsPolicy.newBuilder().build(); String clientTlsPolicyId = "clientTlsPolicyId-188933315"; ClientTlsPolicy actualResponse = client.createClientTlsPolicyAsync(parent, clientTlsPolicy, clientTlsPolicyId).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateClientTlsPolicyRequest actualRequest = ((CreateClientTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(clientTlsPolicy, actualRequest.getClientTlsPolicy()); Assert.assertEquals(clientTlsPolicyId, actualRequest.getClientTlsPolicyId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createClientTlsPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String parent = "parent-995424086"; ClientTlsPolicy clientTlsPolicy = ClientTlsPolicy.newBuilder().build(); String clientTlsPolicyId = "clientTlsPolicyId-188933315"; client.createClientTlsPolicyAsync(parent, clientTlsPolicy, clientTlsPolicyId).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void updateClientTlsPolicyTest() throws Exception { ClientTlsPolicy expectedResponse = ClientTlsPolicy.newBuilder() .setName( ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]").toString()) .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .putAllLabels(new HashMap<String, String>()) .setSni("sni114030") .setClientCertificate(CertificateProvider.newBuilder().build()) .addAllServerValidationCa(new ArrayList<ValidationCA>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("updateClientTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); ClientTlsPolicy clientTlsPolicy = ClientTlsPolicy.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); ClientTlsPolicy actualResponse = client.updateClientTlsPolicyAsync(clientTlsPolicy, updateMask).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateClientTlsPolicyRequest actualRequest = ((UpdateClientTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(clientTlsPolicy, actualRequest.getClientTlsPolicy()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateClientTlsPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { ClientTlsPolicy clientTlsPolicy = ClientTlsPolicy.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateClientTlsPolicyAsync(clientTlsPolicy, updateMask).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteClientTlsPolicyTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteClientTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); ClientTlsPolicyName name = ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]"); client.deleteClientTlsPolicyAsync(name).get(); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteClientTlsPolicyRequest actualRequest = ((DeleteClientTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteClientTlsPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { ClientTlsPolicyName name = ClientTlsPolicyName.of("[PROJECT]", "[LOCATION]", "[CLIENT_TLS_POLICY]"); client.deleteClientTlsPolicyAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteClientTlsPolicyTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteClientTlsPolicyTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockNetworkSecurity.addResponse(resultOperation); String name = "name3373707"; client.deleteClientTlsPolicyAsync(name).get(); List<AbstractMessage> actualRequests = mockNetworkSecurity.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteClientTlsPolicyRequest actualRequest = ((DeleteClientTlsPolicyRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteClientTlsPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockNetworkSecurity.addException(exception); try { String name = "name3373707"; client.deleteClientTlsPolicyAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.processor.jdbclookup; import com.google.common.base.Throwables; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.UncheckedExecutionException; import com.streamsets.pipeline.api.Batch; import com.streamsets.pipeline.api.Field; import com.streamsets.pipeline.api.Processor; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.api.base.OnRecordErrorException; import com.streamsets.pipeline.api.base.SingleLaneRecordProcessor; import com.streamsets.pipeline.api.el.ELEval; import com.streamsets.pipeline.api.el.ELEvalException; import com.streamsets.pipeline.api.el.ELVars; import com.streamsets.pipeline.lib.cache.CacheCleaner; import com.streamsets.pipeline.lib.el.RecordEL; import com.streamsets.pipeline.lib.executor.SafeScheduledExecutorService; import com.streamsets.pipeline.lib.jdbc.DataType; import com.streamsets.pipeline.lib.jdbc.HikariPoolConfigBean; import com.streamsets.pipeline.lib.jdbc.JdbcErrors; import com.streamsets.pipeline.lib.jdbc.JdbcFieldColumnMapping; import com.streamsets.pipeline.lib.jdbc.JdbcUtil; import com.streamsets.pipeline.lib.jdbc.UnknownTypeAction; import com.streamsets.pipeline.lib.jdbc.UtilsProvider; import com.streamsets.pipeline.stage.common.DefaultErrorRecordHandler; import com.streamsets.pipeline.stage.common.ErrorRecordHandler; import com.streamsets.pipeline.stage.common.MissingValuesBehavior; import com.streamsets.pipeline.stage.common.MultipleValuesBehavior; import com.streamsets.pipeline.stage.destination.jdbc.Groups; import com.streamsets.pipeline.stage.processor.kv.CacheConfig; import com.streamsets.pipeline.stage.processor.kv.LookupUtils; import com.zaxxer.hikari.HikariDataSource; import java.sql.Connection; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Iterator; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.StringUtils; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.ExecutionException; public class JdbcLookupProcessor extends SingleLaneRecordProcessor { private static final Logger LOG = LoggerFactory.getLogger(JdbcLookupProcessor.class); public static final String DATE_FORMAT = "yyyy/MM/dd"; public static final String DATETIME_FORMAT = "yyyy/MM/dd HH:mm:ss"; static final DateTimeFormatter DATE_FORMATTER = DateTimeFormat.forPattern(DATE_FORMAT); static final DateTimeFormatter DATETIME_FORMATTER = DateTimeFormat.forPattern(DATETIME_FORMAT); private static final String HIKARI_CONFIG_PREFIX = "hikariConfigBean."; private static final String CONNECTION_STRING = HIKARI_CONFIG_PREFIX + "connectionString"; private static final String COLUMN_MAPPINGS = "columnMappings"; private final CacheConfig cacheConfig; private ELEval queryEval; private final String query; private final boolean validateColumnMappings; private final List<JdbcFieldColumnMapping> columnMappings; private final MultipleValuesBehavior multipleValuesBehavior; private final int maxClobSize; private final int maxBlobSize; private final HikariPoolConfigBean hikariConfigBean; private ErrorRecordHandler errorRecordHandler; private HikariDataSource dataSource = null; private Map<String, String> columnsToFields = new HashMap<>(); private Map<String, String> columnsToDefaults = new HashMap<>(); private Map<String, DataType> columnsToTypes = new HashMap<>(); private LoadingCache<String, Optional<List<Map<String, Field>>>> cache; private Optional<List<Map<String, Field>>> defaultValue; private CacheCleaner cacheCleaner; private final MissingValuesBehavior missingValuesBehavior; private final UnknownTypeAction unknownTypeAction; private ExecutorService generationExecutor; private int preprocessThreads = 0; private JdbcUtil jdbcUtil; public JdbcLookupProcessor( String query, Boolean validateColumnMappings, List<JdbcFieldColumnMapping> columnMappings, MultipleValuesBehavior multipleValuesBehavior, MissingValuesBehavior missingValuesBehavior, UnknownTypeAction unknownTypeAction, int maxClobSize, int maxBlobSize, HikariPoolConfigBean hikariConfigBean, CacheConfig cacheConfig ) { this.query = query; this.validateColumnMappings = validateColumnMappings; this.columnMappings = columnMappings; this.multipleValuesBehavior = multipleValuesBehavior; this.missingValuesBehavior = missingValuesBehavior; this.unknownTypeAction = unknownTypeAction; this.maxClobSize = maxClobSize; this.maxBlobSize = maxBlobSize; this.hikariConfigBean = hikariConfigBean; this.cacheConfig = cacheConfig; } /** {@inheritDoc} */ @Override protected List<ConfigIssue> init() { // Validate configuration values and open any required resources. List<ConfigIssue> issues = super.init(); if (issues.isEmpty()) { jdbcUtil = UtilsProvider.getJdbcUtil(); } errorRecordHandler = new DefaultErrorRecordHandler(getContext()); Processor.Context context = getContext(); queryEval = getContext().createELEval("query"); issues = hikariConfigBean.validateConfigs(context, issues); if (context.getRunnerId() == 0) { if (issues.isEmpty() && null == dataSource) { try { dataSource = jdbcUtil.createDataSourceForRead(hikariConfigBean); context.getStageRunnerSharedMap().put("jdbcLookupProcessor.dataSource", dataSource); } catch (StageException e) { issues.add(context.createConfigIssue(Groups.JDBC.name(), CONNECTION_STRING, e.getErrorCode(), e.getParams())); } } } else { dataSource = (HikariDataSource) context.getStageRunnerSharedMap().get("jdbcLookupProcessor.dataSource"); } if(issues.isEmpty()) { this.defaultValue = calculateDefault(context, issues); } if(issues.isEmpty() && this.validateColumnMappings) { try (Connection validationConnection = dataSource.getConnection(); Statement statement = validationConnection.createStatement()) { String preparedQuery = prepareQuery(query); statement.setFetchSize(1); statement.setMaxRows(1); List<String> columnNamesFromDb = getColumnsFromValidationQuery(issues, context, statement, preparedQuery); if (issues.isEmpty()) { for (String columnName : columnsToFields.keySet()) { if (!columnNamesFromDb.contains(columnName)) { issues.add(context.createConfigIssue(Groups.JDBC.name(), COLUMN_MAPPINGS, JdbcErrors.JDBC_95, columnName)); } } } } catch (SQLException e) { issues.add(context.createConfigIssue( Groups.JDBC.name(), CONNECTION_STRING, JdbcErrors.JDBC_00, jdbcUtil.formatSqlException(e) )); } } if (issues.isEmpty()) { cache = buildCache(); cacheCleaner = new CacheCleaner(cache, "JdbcLookupProcessor", 10 * 60 * 1000); if (cacheConfig.enabled) { preprocessThreads = Math.min(hikariConfigBean.minIdle, Runtime.getRuntime().availableProcessors()-1); preprocessThreads = Math.max(preprocessThreads, 1); } } if (context.getRunnerId() == 0) { if (issues.isEmpty() && generationExecutor == null) { generationExecutor = new SafeScheduledExecutorService( hikariConfigBean.maximumPoolSize, "JDBC Lookup Cache Warmer" ); context.getStageRunnerSharedMap().put("jdbcLookupProcessor.generationExecutor", generationExecutor); } } else { generationExecutor = (SafeScheduledExecutorService) context.getStageRunnerSharedMap().get( "jdbcLookupProcessor.generationExecutor"); } // If issues is not empty, the UI will inform the user of each configuration issue in the list. return issues; } private Optional<List<Map<String, Field>>> calculateDefault(Processor.Context context, List<ConfigIssue> issues) { for (JdbcFieldColumnMapping mapping : columnMappings) { LOG.debug("Mapping field {} to column {}", mapping.field, mapping.columnName); columnsToFields.put(mapping.columnName, mapping.field); if (!StringUtils.isEmpty(mapping.defaultValue) && mapping.dataType == DataType.USE_COLUMN_TYPE) { issues.add(context.createConfigIssue(Groups.JDBC.name(), COLUMN_MAPPINGS, JdbcErrors.JDBC_53, mapping.field)); } columnsToDefaults.put(mapping.columnName, mapping.defaultValue); columnsToTypes.put(mapping.columnName, mapping.dataType); if (mapping.dataType == DataType.DATE) { try { DATE_FORMATTER.parseDateTime(mapping.defaultValue); } catch (IllegalArgumentException e) { issues.add(context.createConfigIssue( Groups.JDBC.name(), COLUMN_MAPPINGS, JdbcErrors.JDBC_55, mapping.field, e.toString() )); } } else if (mapping.dataType == DataType.DATETIME) { try { DATETIME_FORMATTER.parseDateTime(mapping.defaultValue); } catch (IllegalArgumentException e) { issues.add(context.createConfigIssue( Groups.JDBC.name(), COLUMN_MAPPINGS, JdbcErrors.JDBC_56, mapping.field, e.toString() )); } } } if(!issues.isEmpty()) { return Optional.empty(); } Map<String, Field> defaultValues = new HashMap<>(); for (String column : columnsToFields.keySet()) { String defaultValue = columnsToDefaults.get(column); DataType dataType = columnsToTypes.get(column); if (dataType != DataType.USE_COLUMN_TYPE) { Field field; try { if (dataType == DataType.DATE) { field = Field.createDate(DATE_FORMATTER.parseDateTime(defaultValue).toDate()); } else if (dataType == DataType.DATETIME) { field = Field.createDatetime(DATETIME_FORMATTER.parseDateTime(defaultValue).toDate()); } else { field = Field.create(Field.Type.valueOf(columnsToTypes.get(column).getLabel()), defaultValue); } defaultValues.put(column, field); } catch (IllegalArgumentException e) { issues.add(context.createConfigIssue( Groups.JDBC.name(), COLUMN_MAPPINGS, JdbcErrors.JDBC_410, column, defaultValue, e )); } } } return defaultValues.isEmpty() ? Optional.empty() : Optional.of(ImmutableList.of(defaultValues)); } /** {@inheritDoc} */ @Override public void destroy() { if (getContext().getRunnerId() == 0) { if (generationExecutor != null) { generationExecutor.shutdown(); try { if (!generationExecutor.awaitTermination(5, TimeUnit.SECONDS)) { generationExecutor.shutdownNow(); } } catch (InterruptedException ex) { LOG.error("Interrupted while attempting to shutdown Generator Executor: ", ex); Thread.currentThread().interrupt(); } } // close dataSource after closing threadpool executor as we could have queries running before closing the executor if (jdbcUtil != null) { jdbcUtil.closeQuietly(dataSource); } } super.destroy(); } public void preprocess(Batch batch) throws StageException { //Gather all JDBC queries Iterator<Record> it = batch.getRecords(); List<List<String>> preparedQueries = new ArrayList<>(); for (int i =0; i < preprocessThreads; i++) { preparedQueries.add(new ArrayList<String>()); } int recordNum = 0; while (it.hasNext()) { Record record = it.next(); recordNum++; try { ELVars elVars = getContext().createELVars(); RecordEL.setRecordInContext(elVars, record); String preparedQuery = queryEval.eval(elVars, query, String.class); preparedQueries.get((recordNum-1) % preprocessThreads).add(preparedQuery); } catch (ELEvalException e) { LOG.error(JdbcErrors.JDBC_01.getMessage(), query, e); throw new OnRecordErrorException(record, JdbcErrors.JDBC_01, query); } } for (int i =0; i < preprocessThreads; i++) { final List<String> preparedQueriesPart = preparedQueries.get(i); generationExecutor.submit(() -> { try { for ( String query : preparedQueriesPart) cache.get(query); } catch (Throwable ex) { LOG.error("Error while producing records", ex); } }); } } @Override public void process(Batch batch, SingleLaneBatchMaker batchMaker) throws StageException { if (!batch.getRecords().hasNext()) { // No records - take the opportunity to clean up the cache so that we don't hold on to memory indefinitely cacheCleaner.periodicCleanUp(); } //Cache warming if (preprocessThreads > 0) { preprocess(batch); } //Normal processing per record super.process(batch, batchMaker); } /** {@inheritDoc} */ @Override protected void process(Record record, SingleLaneBatchMaker batchMaker) throws StageException { try { ELVars elVars = getContext().createELVars(); RecordEL.setRecordInContext(elVars, record); String preparedQuery = queryEval.eval(elVars, query, String.class); Optional<List<Map<String, Field>>> entry = cache.get(preparedQuery); if (!entry.isPresent()) { // No results switch (missingValuesBehavior) { case SEND_TO_ERROR: LOG.error(JdbcErrors.JDBC_04.getMessage(), preparedQuery); errorRecordHandler.onError(new OnRecordErrorException(record, JdbcErrors.JDBC_04, preparedQuery)); break; case PASS_RECORD_ON: batchMaker.addRecord(record); break; default: throw new IllegalStateException("Unknown missing value behavior: " + missingValuesBehavior); } } else { List<Map<String, Field>> values = entry.get(); switch (multipleValuesBehavior) { case FIRST_ONLY: setFieldsInRecord(record, values.get(0)); batchMaker.addRecord(record); break; case SPLIT_INTO_MULTIPLE_RECORDS: int i = 0; for(Map<String, Field> lookupItem : values) { Record newRecord = getContext().cloneRecord(record, String.valueOf(i++)); setFieldsInRecord(newRecord, lookupItem); batchMaker.addRecord(newRecord); } break; case ALL_AS_LIST: Map<String, List<Field>> valuesMap = new HashMap<>(); for (Map<String, Field> lookupItem : values) { lookupItem.forEach((k, v) -> { if (valuesMap.get(k) == null) { List<Field> lookupValue = new ArrayList<>(); valuesMap.put(k, lookupValue); } valuesMap.get(k).add(v); }); } Map<String, Field> valueMap = new HashMap<>(); valuesMap.forEach( (k,v) -> valueMap.put(k, Field.create(v))); setFieldsInRecord(record, valueMap); batchMaker.addRecord(record); break; default: throw new IllegalStateException("Unknown multiple value behavior: " + multipleValuesBehavior); } } } catch (ELEvalException e) { LOG.error(JdbcErrors.JDBC_01.getMessage(), query, e); throw new OnRecordErrorException(record, JdbcErrors.JDBC_01, query); } catch (UncheckedExecutionException | ExecutionException e) { Throwables.propagateIfPossible(e.getCause(), StageException.class); throw new IllegalStateException(e); // The cache loader shouldn't throw anything that isn't a StageException. } catch (OnRecordErrorException error) { // NOSONAR errorRecordHandler.onError(new OnRecordErrorException(record, error.getErrorCode(), error.getParams())); } } private String prepareQuery(String query) { String preparedQuery = query.replaceAll("(\\$\\{)(.*?)(\\})", "0"); return preparedQuery; } private List<String> getColumnsFromValidationQuery( List<ConfigIssue> issues, Processor.Context context, Statement statement, String preparedQuery ) { List<String> columnNamesFromDb = new ArrayList<>(); try { ResultSet rs = statement.executeQuery(preparedQuery); ResultSetMetaData rsmd = rs.getMetaData(); for (int i = 1; i <= rsmd.getColumnCount(); i++){ columnNamesFromDb.add(rsmd.getColumnLabel(i)); } } catch (SQLException e) { String formattedError = jdbcUtil.formatSqlException(e); LOG.error(formattedError); LOG.debug(formattedError, e); issues.add(context.createConfigIssue(Groups.JDBC.name(), preparedQuery, JdbcErrors.JDBC_34, preparedQuery, formattedError )); } return columnNamesFromDb; } private void setFieldsInRecord(Record record, Map<String, Field>fields) { for (Map.Entry<String, Field> entry : fields.entrySet()) { String columnName = entry.getKey(); String fieldPath = columnsToFields.get(columnName); Field field = entry.getValue(); if (fieldPath == null) { Field root = record.get(); // No mapping switch (root.getType()) { case LIST: // Add new field to the end of the list fieldPath = "[" + root.getValueAsList().size() + "]"; Map<String, Field> cell = new HashMap<>(); cell.put("header", Field.create(columnName)); cell.put("value", field); field = Field.create(cell); break; case LIST_MAP: case MAP: // Just use the column name fieldPath = "/" + columnName; break; default: break; } } record.set(fieldPath, field); } } @SuppressWarnings("unchecked") private LoadingCache<String, Optional<List<Map<String, Field>>>> buildCache() { JdbcLookupLoader loader = new JdbcLookupLoader( getContext(), dataSource, columnsToTypes, maxClobSize, maxBlobSize, errorRecordHandler, hikariConfigBean.getVendor(), unknownTypeAction ); return LookupUtils.buildCache(loader, cacheConfig, defaultValue); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.peering.implementation; import com.azure.core.management.Region; import com.azure.core.util.Context; import com.azure.resourcemanager.peering.fluent.models.PeeringInner; import com.azure.resourcemanager.peering.models.Kind; import com.azure.resourcemanager.peering.models.Peering; import com.azure.resourcemanager.peering.models.PeeringPropertiesDirect; import com.azure.resourcemanager.peering.models.PeeringPropertiesExchange; import com.azure.resourcemanager.peering.models.PeeringSku; import com.azure.resourcemanager.peering.models.ProvisioningState; import com.azure.resourcemanager.peering.models.ResourceTags; import java.util.Collections; import java.util.Map; public final class PeeringImpl implements Peering, Peering.Definition, Peering.Update { private PeeringInner innerObject; private final com.azure.resourcemanager.peering.PeeringManager serviceManager; public String id() { return this.innerModel().id(); } public String name() { return this.innerModel().name(); } public String type() { return this.innerModel().type(); } public PeeringSku sku() { return this.innerModel().sku(); } public Kind kind() { return this.innerModel().kind(); } public String location() { return this.innerModel().location(); } public Map<String, String> tags() { Map<String, String> inner = this.innerModel().tags(); if (inner != null) { return Collections.unmodifiableMap(inner); } else { return Collections.emptyMap(); } } public PeeringPropertiesDirect direct() { return this.innerModel().direct(); } public PeeringPropertiesExchange exchange() { return this.innerModel().exchange(); } public String peeringLocation() { return this.innerModel().peeringLocation(); } public ProvisioningState provisioningState() { return this.innerModel().provisioningState(); } public Region region() { return Region.fromName(this.regionName()); } public String regionName() { return this.location(); } public PeeringInner innerModel() { return this.innerObject; } private com.azure.resourcemanager.peering.PeeringManager manager() { return this.serviceManager; } private String resourceGroupName; private String peeringName; private ResourceTags updateTags; public PeeringImpl withExistingResourceGroup(String resourceGroupName) { this.resourceGroupName = resourceGroupName; return this; } public Peering create() { this.innerObject = serviceManager .serviceClient() .getPeerings() .createOrUpdateWithResponse(resourceGroupName, peeringName, this.innerModel(), Context.NONE) .getValue(); return this; } public Peering create(Context context) { this.innerObject = serviceManager .serviceClient() .getPeerings() .createOrUpdateWithResponse(resourceGroupName, peeringName, this.innerModel(), context) .getValue(); return this; } PeeringImpl(String name, com.azure.resourcemanager.peering.PeeringManager serviceManager) { this.innerObject = new PeeringInner(); this.serviceManager = serviceManager; this.peeringName = name; } public PeeringImpl update() { this.updateTags = new ResourceTags(); return this; } public Peering apply() { this.innerObject = serviceManager .serviceClient() .getPeerings() .updateWithResponse(resourceGroupName, peeringName, updateTags, Context.NONE) .getValue(); return this; } public Peering apply(Context context) { this.innerObject = serviceManager .serviceClient() .getPeerings() .updateWithResponse(resourceGroupName, peeringName, updateTags, context) .getValue(); return this; } PeeringImpl(PeeringInner innerObject, com.azure.resourcemanager.peering.PeeringManager serviceManager) { this.innerObject = innerObject; this.serviceManager = serviceManager; this.resourceGroupName = Utils.getValueFromIdByName(innerObject.id(), "resourceGroups"); this.peeringName = Utils.getValueFromIdByName(innerObject.id(), "peerings"); } public Peering refresh() { this.innerObject = serviceManager .serviceClient() .getPeerings() .getByResourceGroupWithResponse(resourceGroupName, peeringName, Context.NONE) .getValue(); return this; } public Peering refresh(Context context) { this.innerObject = serviceManager .serviceClient() .getPeerings() .getByResourceGroupWithResponse(resourceGroupName, peeringName, context) .getValue(); return this; } public PeeringImpl withRegion(Region location) { this.innerModel().withLocation(location.toString()); return this; } public PeeringImpl withRegion(String location) { this.innerModel().withLocation(location); return this; } public PeeringImpl withSku(PeeringSku sku) { this.innerModel().withSku(sku); return this; } public PeeringImpl withKind(Kind kind) { this.innerModel().withKind(kind); return this; } public PeeringImpl withTags(Map<String, String> tags) { if (isInCreateMode()) { this.innerModel().withTags(tags); return this; } else { this.updateTags.withTags(tags); return this; } } public PeeringImpl withDirect(PeeringPropertiesDirect direct) { this.innerModel().withDirect(direct); return this; } public PeeringImpl withExchange(PeeringPropertiesExchange exchange) { this.innerModel().withExchange(exchange); return this; } public PeeringImpl withPeeringLocation(String peeringLocation) { this.innerModel().withPeeringLocation(peeringLocation); return this; } private boolean isInCreateMode() { return this.innerModel().id() == null; } }
/* * JBoss, Home of Professional Open Source * Copyright 2010, Red Hat, Inc. and/or its affiliates, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hibernate.validator.internal.metadata.core; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.ConcurrentMap; import javax.validation.Constraint; import javax.validation.ConstraintTarget; import javax.validation.ConstraintValidator; import javax.validation.ValidationException; import javax.validation.constraints.AssertFalse; import javax.validation.constraints.AssertTrue; import javax.validation.constraints.DecimalMax; import javax.validation.constraints.DecimalMin; import javax.validation.constraints.Digits; import javax.validation.constraints.Future; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import javax.validation.constraints.Null; import javax.validation.constraints.Past; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import javax.validation.constraintvalidation.SupportedValidationTarget; import javax.validation.constraintvalidation.ValidationTarget; import org.hibernate.validator.constraints.ConstraintComposition; import org.hibernate.validator.constraints.EAN; import org.hibernate.validator.constraints.Email; import org.hibernate.validator.constraints.Length; import org.hibernate.validator.constraints.LuhnCheck; import org.hibernate.validator.constraints.Mod10Check; import org.hibernate.validator.constraints.Mod11Check; import org.hibernate.validator.constraints.ModCheck; import org.hibernate.validator.constraints.NotBlank; import org.hibernate.validator.constraints.ParameterScriptAssert; import org.hibernate.validator.constraints.SafeHtml; import org.hibernate.validator.constraints.ScriptAssert; import org.hibernate.validator.constraints.URL; import org.hibernate.validator.internal.constraintvalidators.bv.AssertFalseValidator; import org.hibernate.validator.internal.constraintvalidators.bv.AssertTrueValidator; import org.hibernate.validator.internal.constraintvalidators.bv.DecimalMaxValidatorForCharSequence; import org.hibernate.validator.internal.constraintvalidators.bv.DecimalMaxValidatorForNumber; import org.hibernate.validator.internal.constraintvalidators.bv.DecimalMinValidatorForCharSequence; import org.hibernate.validator.internal.constraintvalidators.bv.DecimalMinValidatorForNumber; import org.hibernate.validator.internal.constraintvalidators.bv.DigitsValidatorForCharSequence; import org.hibernate.validator.internal.constraintvalidators.bv.DigitsValidatorForNumber; import org.hibernate.validator.internal.constraintvalidators.hv.EANValidator; import org.hibernate.validator.internal.constraintvalidators.hv.EmailValidator; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForCalendar; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForChronoLocalDate; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForChronoLocalDateTime; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForChronoZonedDateTime; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForDate; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForInstant; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForOffsetDateTime; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForReadableInstant; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForReadablePartial; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForYear; import org.hibernate.validator.internal.constraintvalidators.bv.future.FutureValidatorForYearMonth; import org.hibernate.validator.internal.constraintvalidators.hv.LengthValidator; import org.hibernate.validator.internal.constraintvalidators.hv.LuhnCheckValidator; import org.hibernate.validator.internal.constraintvalidators.bv.MaxValidatorForCharSequence; import org.hibernate.validator.internal.constraintvalidators.bv.MaxValidatorForNumber; import org.hibernate.validator.internal.constraintvalidators.bv.MinValidatorForCharSequence; import org.hibernate.validator.internal.constraintvalidators.bv.MinValidatorForNumber; import org.hibernate.validator.internal.constraintvalidators.hv.Mod10CheckValidator; import org.hibernate.validator.internal.constraintvalidators.hv.Mod11CheckValidator; import org.hibernate.validator.internal.constraintvalidators.hv.ModCheckValidator; import org.hibernate.validator.internal.constraintvalidators.hv.NotBlankValidator; import org.hibernate.validator.internal.constraintvalidators.bv.NotNullValidator; import org.hibernate.validator.internal.constraintvalidators.bv.NullValidator; import org.hibernate.validator.internal.constraintvalidators.hv.ParameterScriptAssertValidator; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForCalendar; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForChronoLocalDate; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForChronoLocalDateTime; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForChronoZonedDateTime; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForDate; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForInstant; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForOffsetDateTime; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForReadableInstant; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForReadablePartial; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForYear; import org.hibernate.validator.internal.constraintvalidators.bv.past.PastValidatorForYearMonth; import org.hibernate.validator.internal.constraintvalidators.bv.PatternValidator; import org.hibernate.validator.internal.constraintvalidators.hv.SafeHtmlValidator; import org.hibernate.validator.internal.constraintvalidators.hv.ScriptAssertValidator; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForArray; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForArraysOfBoolean; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForArraysOfByte; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForArraysOfChar; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForArraysOfDouble; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForArraysOfFloat; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForArraysOfInt; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForArraysOfLong; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForCharSequence; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForCollection; import org.hibernate.validator.internal.constraintvalidators.bv.size.SizeValidatorForMap; import org.hibernate.validator.internal.constraintvalidators.hv.URLValidator; import org.hibernate.validator.internal.util.Contracts; import org.hibernate.validator.internal.util.Version; import org.hibernate.validator.internal.util.logging.Log; import org.hibernate.validator.internal.util.logging.LoggerFactory; import org.hibernate.validator.internal.util.privilegedactions.GetAnnotationParameter; import org.hibernate.validator.internal.util.privilegedactions.GetDeclaredMethods; import org.hibernate.validator.internal.util.privilegedactions.GetMethod; import org.hibernate.validator.internal.util.privilegedactions.LoadClass; import static org.hibernate.validator.internal.util.CollectionHelper.newArrayList; import static org.hibernate.validator.internal.util.CollectionHelper.newConcurrentHashMap; import static org.hibernate.validator.internal.util.logging.Messages.MESSAGES; /** * Keeps track of builtin constraints and their validator implementations, as well as already resolved validator definitions. * * @author Hardy Ferentschik * @author Alaa Nassef * @author Gunnar Morling */ public class ConstraintHelper { public static final String GROUPS = "groups"; public static final String PAYLOAD = "payload"; public static final String MESSAGE = "message"; public static final String VALIDATION_APPLIES_TO = "validationAppliesTo"; private static final Log log = LoggerFactory.make(); private static final String JODA_TIME_CLASS_NAME = "org.joda.time.ReadableInstant"; private final ConcurrentMap<Class<? extends Annotation>, List<? extends Class<?>>> builtinConstraints = newConcurrentHashMap(); private final ValidatorClassMap validatorClasses = new ValidatorClassMap(); public ConstraintHelper() { List<Class<? extends ConstraintValidator<?, ?>>> constraintList = newArrayList(); constraintList.add( AssertFalseValidator.class ); builtinConstraints.put( AssertFalse.class, constraintList ); constraintList = newArrayList(); constraintList.add( AssertTrueValidator.class ); builtinConstraints.put( AssertTrue.class, constraintList ); constraintList = newArrayList(); constraintList.add( DecimalMaxValidatorForNumber.class ); constraintList.add( DecimalMaxValidatorForCharSequence.class ); builtinConstraints.put( DecimalMax.class, constraintList ); constraintList = newArrayList(); constraintList.add( DecimalMinValidatorForNumber.class ); constraintList.add( DecimalMinValidatorForCharSequence.class ); builtinConstraints.put( DecimalMin.class, constraintList ); constraintList = newArrayList(); constraintList.add( DigitsValidatorForCharSequence.class ); constraintList.add( DigitsValidatorForNumber.class ); builtinConstraints.put( Digits.class, constraintList ); constraintList = newArrayList(); constraintList.add( FutureValidatorForCalendar.class ); constraintList.add( FutureValidatorForDate.class ); if ( isJodaTimeInClasspath() ) { constraintList.add( FutureValidatorForReadableInstant.class ); constraintList.add( FutureValidatorForReadablePartial.class ); } if ( Version.getJavaRelease() >= 8 ) { // Java 8 date/time API validators constraintList.add( FutureValidatorForChronoLocalDate.class ); constraintList.add( FutureValidatorForChronoLocalDateTime.class ); constraintList.add( FutureValidatorForChronoZonedDateTime.class ); constraintList.add( FutureValidatorForInstant.class ); constraintList.add( FutureValidatorForYear.class ); constraintList.add( FutureValidatorForYearMonth.class ); constraintList.add( FutureValidatorForOffsetDateTime.class ); } builtinConstraints.put( Future.class, constraintList ); constraintList = newArrayList(); constraintList.add( MaxValidatorForNumber.class ); constraintList.add( MaxValidatorForCharSequence.class ); builtinConstraints.put( Max.class, constraintList ); constraintList = newArrayList(); constraintList.add( MinValidatorForNumber.class ); constraintList.add( MinValidatorForCharSequence.class ); builtinConstraints.put( Min.class, constraintList ); constraintList = newArrayList(); constraintList.add( NotNullValidator.class ); builtinConstraints.put( NotNull.class, constraintList ); constraintList = newArrayList(); constraintList.add( NullValidator.class ); builtinConstraints.put( Null.class, constraintList ); constraintList = newArrayList(); constraintList.add( PastValidatorForCalendar.class ); constraintList.add( PastValidatorForDate.class ); if ( isJodaTimeInClasspath() ) { constraintList.add( PastValidatorForReadableInstant.class ); constraintList.add( PastValidatorForReadablePartial.class ); } if ( Version.getJavaRelease() >= 8 ) { // Java 8 date/time API validators constraintList.add( PastValidatorForChronoLocalDate.class ); constraintList.add( PastValidatorForChronoLocalDateTime.class ); constraintList.add( PastValidatorForChronoZonedDateTime.class ); constraintList.add( PastValidatorForInstant.class ); constraintList.add( PastValidatorForYear.class ); constraintList.add( PastValidatorForYearMonth.class ); constraintList.add( PastValidatorForOffsetDateTime.class ); } builtinConstraints.put( Past.class, constraintList ); constraintList = newArrayList(); constraintList.add( PatternValidator.class ); builtinConstraints.put( Pattern.class, constraintList ); constraintList = newArrayList(); constraintList.add( SizeValidatorForCharSequence.class ); constraintList.add( SizeValidatorForCollection.class ); constraintList.add( SizeValidatorForArray.class ); constraintList.add( SizeValidatorForMap.class ); constraintList.add( SizeValidatorForArraysOfBoolean.class ); constraintList.add( SizeValidatorForArraysOfByte.class ); constraintList.add( SizeValidatorForArraysOfChar.class ); constraintList.add( SizeValidatorForArraysOfDouble.class ); constraintList.add( SizeValidatorForArraysOfFloat.class ); constraintList.add( SizeValidatorForArraysOfInt.class ); constraintList.add( SizeValidatorForArraysOfLong.class ); builtinConstraints.put( Size.class, constraintList ); constraintList = newArrayList(); constraintList.add( EANValidator.class ); builtinConstraints.put( EAN.class, constraintList ); constraintList = newArrayList(); constraintList.add( EmailValidator.class ); builtinConstraints.put( Email.class, constraintList ); constraintList = newArrayList(); constraintList.add( LengthValidator.class ); builtinConstraints.put( Length.class, constraintList ); constraintList = newArrayList(); constraintList.add( ModCheckValidator.class ); builtinConstraints.put( ModCheck.class, constraintList ); constraintList = newArrayList(); constraintList.add( LuhnCheckValidator.class ); builtinConstraints.put( LuhnCheck.class, constraintList ); constraintList = newArrayList(); constraintList.add( Mod10CheckValidator.class ); builtinConstraints.put( Mod10Check.class, constraintList ); constraintList = newArrayList(); constraintList.add( Mod11CheckValidator.class ); builtinConstraints.put( Mod11Check.class, constraintList ); constraintList = newArrayList(); constraintList.add( NotBlankValidator.class ); builtinConstraints.put( NotBlank.class, constraintList ); constraintList = newArrayList(); constraintList.add( ParameterScriptAssertValidator.class ); builtinConstraints.put( ParameterScriptAssert.class, constraintList ); constraintList = newArrayList(); constraintList.add( SafeHtmlValidator.class ); builtinConstraints.put( SafeHtml.class, constraintList ); constraintList = newArrayList(); constraintList.add( ScriptAssertValidator.class ); builtinConstraints.put( ScriptAssert.class, constraintList ); constraintList = newArrayList(); constraintList.add( URLValidator.class ); builtinConstraints.put( URL.class, constraintList ); } private <A extends Annotation> List<Class<? extends ConstraintValidator<A, ?>>> getBuiltInConstraints(Class<A> annotationClass) { //safe cause all CV for a given annotation A are CV<A, ?> @SuppressWarnings("unchecked") final List<Class<? extends ConstraintValidator<A, ?>>> builtInList = (List<Class<? extends ConstraintValidator<A, ?>>>) builtinConstraints .get( annotationClass ); if ( builtInList == null || builtInList.size() == 0 ) { throw log.getUnableToFindAnnotationConstraintsException( annotationClass ); } return builtInList; } private boolean isBuiltinConstraint(Class<? extends Annotation> annotationType) { return builtinConstraints.containsKey( annotationType ); } /** * Returns the constraint validator classes for the given constraint * annotation type, as retrieved from * * <ul> * <li>{@link Constraint#validatedBy()}, * <li>internally registered validators for built-in constraints and</li> * <li>XML configuration.</li> * </ul> * * The result is cached internally. * * @param annotationType The constraint annotation type. * @param <A> the type of the annotation * * @return The validator classes for the given type. */ public <A extends Annotation> List<Class<? extends ConstraintValidator<A, ?>>> getAllValidatorClasses(Class<A> annotationType) { Contracts.assertNotNull( annotationType, MESSAGES.classCannotBeNull() ); List<Class<? extends ConstraintValidator<A, ?>>> classes = validatorClasses.get( annotationType ); if ( classes == null ) { classes = getDefaultValidatorClasses( annotationType ); List<Class<? extends ConstraintValidator<A, ?>>> cachedValidatorClasses = validatorClasses.putIfAbsent( annotationType, classes ); if ( cachedValidatorClasses != null ) { classes = cachedValidatorClasses; } } return Collections.unmodifiableList( classes ); } /** * Returns those validator classes for the given constraint annotation * matching the given target. * * @param annotationType The annotation of interest. * @param validationTarget The target, either annotated element or parameters. * @param <A> the type of the annotation * * @return A list with matching validator classes. */ public <A extends Annotation> List<Class<? extends ConstraintValidator<A, ?>>> findValidatorClasses(Class<A> annotationType, ValidationTarget validationTarget) { List<Class<? extends ConstraintValidator<A, ?>>> validatorClasses = getAllValidatorClasses( annotationType ); List<Class<? extends ConstraintValidator<A, ?>>> matchingValidatorClasses = newArrayList(); for ( Class<? extends ConstraintValidator<A, ?>> validatorClass : validatorClasses ) { if ( supportsValidationTarget( validatorClass, validationTarget ) ) { matchingValidatorClasses.add( validatorClass ); } } return matchingValidatorClasses; } private boolean supportsValidationTarget(Class<? extends ConstraintValidator<?, ?>> validatorClass, ValidationTarget target) { SupportedValidationTarget supportedTargetAnnotation = validatorClass.getAnnotation( SupportedValidationTarget.class ); //by default constraints target the annotated element if ( supportedTargetAnnotation == null ) { return target == ValidationTarget.ANNOTATED_ELEMENT; } return Arrays.asList( supportedTargetAnnotation.value() ).contains( target ); } /** * Registers the given validator classes with the given constraint * annotation type. * * @param annotationType The constraint annotation type * @param definitionClasses The validators to register * @param keepDefaultClasses Whether any default validators should be kept or not * @param <A> the type of the annotation */ public <A extends Annotation> void putValidatorClasses(Class<A> annotationType, List<Class<? extends ConstraintValidator<A, ?>>> definitionClasses, boolean keepDefaultClasses) { if ( keepDefaultClasses ) { List<Class<? extends ConstraintValidator<A, ?>>> defaultValidators = getDefaultValidatorClasses( annotationType ); for ( Class<? extends ConstraintValidator<A, ?>> defaultValidator : defaultValidators ) { definitionClasses.add( 0, defaultValidator ); } } validatorClasses.put( annotationType, definitionClasses ); } /** * Checks whether a given annotation is a multi value constraint or not. * * @param annotationType the annotation type to check. * * @return {@code true} if the specified annotation is a multi value constraints, {@code false} * otherwise. */ public boolean isMultiValueConstraint(Class<? extends Annotation> annotationType) { boolean isMultiValueConstraint = false; final Method method = run( GetMethod.action( annotationType, "value" ) ); if ( method != null ) { Class<?> returnType = method.getReturnType(); if ( returnType.isArray() && returnType.getComponentType().isAnnotation() ) { @SuppressWarnings("unchecked") Class<? extends Annotation> componentType = (Class<? extends Annotation>) returnType.getComponentType(); if ( isConstraintAnnotation( componentType ) || isBuiltinConstraint( componentType ) ) { isMultiValueConstraint = true; } else { isMultiValueConstraint = false; } } } return isMultiValueConstraint; } /** * Returns the constraints which are part of the given multi-value constraint. * <p> * Invoke {@link #isMultiValueConstraint(Class)} prior to calling this method to check whether a given constraint * actually is a multi-value constraint. * * @param multiValueConstraint the multi-value constraint annotation from which to retrieve the contained constraints * @param <A> the type of the annotation * * @return A list of constraint annotations, may be empty but never {@code null}. */ public <A extends Annotation> List<Annotation> getConstraintsFromMultiValueConstraint(A multiValueConstraint) { Annotation[] annotations = run( GetAnnotationParameter.action( multiValueConstraint, "value", Annotation[].class ) ); return Arrays.asList( annotations ); } /** * Checks whether the specified annotation is a valid constraint annotation. A constraint annotation has to * fulfill the following conditions: * <ul> * <li>Must be annotated with {@link Constraint} * <li>Define a message parameter</li> * <li>Define a group parameter</li> * <li>Define a payload parameter</li> * </ul> * * @param annotationType The annotation type to test. * * @return {@code true} if the annotation fulfills the above conditions, {@code false} otherwise. */ public boolean isConstraintAnnotation(Class<? extends Annotation> annotationType) { if ( annotationType.getAnnotation( Constraint.class ) == null ) { return false; } assertMessageParameterExists( annotationType ); assertGroupsParameterExists( annotationType ); assertPayloadParameterExists( annotationType ); assertValidationAppliesToParameterSetUpCorrectly( annotationType ); assertNoParameterStartsWithValid( annotationType ); return true; } private void assertNoParameterStartsWithValid(Class<? extends Annotation> annotationType) { final Method[] methods = run( GetDeclaredMethods.action( annotationType ) ); for ( Method m : methods ) { if ( m.getName().startsWith( "valid" ) && !m.getName().equals( VALIDATION_APPLIES_TO ) ) { throw log.getConstraintParametersCannotStartWithValidException(); } } } private void assertPayloadParameterExists(Class<? extends Annotation> annotationType) { try { final Method method = run( GetMethod.action( annotationType, PAYLOAD ) ); if ( method == null ) { throw log.getConstraintWithoutMandatoryParameterException( PAYLOAD, annotationType.getName() ); } Class<?>[] defaultPayload = (Class<?>[]) method.getDefaultValue(); if ( defaultPayload.length != 0 ) { throw log.getWrongDefaultValueForPayloadParameterException( annotationType.getName() ); } } catch ( ClassCastException e ) { throw log.getWrongTypeForPayloadParameterException( annotationType.getName(), e ); } } private void assertGroupsParameterExists(Class<? extends Annotation> annotationType) { try { final Method method = run( GetMethod.action( annotationType, GROUPS ) ); if ( method == null ) { throw log.getConstraintWithoutMandatoryParameterException( GROUPS, annotationType.getName() ); } Class<?>[] defaultGroups = (Class<?>[]) method.getDefaultValue(); if ( defaultGroups.length != 0 ) { throw log.getWrongDefaultValueForGroupsParameterException( annotationType.getName() ); } } catch ( ClassCastException e ) { throw log.getWrongTypeForGroupsParameterException( annotationType.getName(), e ); } } private void assertMessageParameterExists(Class<? extends Annotation> annotationType) { final Method method = run( GetMethod.action( annotationType, MESSAGE ) ); if ( method == null ) { throw log.getConstraintWithoutMandatoryParameterException( MESSAGE, annotationType.getName() ); } if ( method.getReturnType() != String.class ) { throw log.getWrongTypeForMessageParameterException( annotationType.getName() ); } } private void assertValidationAppliesToParameterSetUpCorrectly(Class<? extends Annotation> annotationType) { boolean hasGenericValidators = !findValidatorClasses( annotationType, ValidationTarget.ANNOTATED_ELEMENT ).isEmpty(); boolean hasCrossParameterValidator = !findValidatorClasses( annotationType, ValidationTarget.PARAMETERS ).isEmpty(); final Method method = run( GetMethod.action( annotationType, VALIDATION_APPLIES_TO ) ); if ( hasGenericValidators && hasCrossParameterValidator ) { if ( method == null ) { throw log.getGenericAndCrossParameterConstraintDoesNotDefineValidationAppliesToParameterException( annotationType.getName() ); } if ( method.getReturnType() != ConstraintTarget.class ) { throw log.getValidationAppliesToParameterMustHaveReturnTypeConstraintTargetException( annotationType.getName() ); } ConstraintTarget defaultValue = (ConstraintTarget) method.getDefaultValue(); if ( defaultValue != ConstraintTarget.IMPLICIT ) { throw log.getValidationAppliesToParameterMustHaveDefaultValueImplicitException( annotationType.getName() ); } } else if ( method != null ) { throw log.getValidationAppliesToParameterMustNotBeDefinedForNonGenericAndCrossParameterConstraintException( annotationType.getName() ); } } public boolean isConstraintComposition(Class<? extends Annotation> annotationType) { return annotationType == ConstraintComposition.class; } private static boolean isJodaTimeInClasspath() { return isClassPresent( JODA_TIME_CLASS_NAME ); } /** * Returns the default validators for the given constraint type. * * @param annotationType The constraint annotation type. * * @return A list with the default validators as retrieved from * {@link Constraint#validatedBy()} or the list of validators for * built-in constraints. */ private <A extends Annotation> List<Class<? extends ConstraintValidator<A, ?>>> getDefaultValidatorClasses(Class<A> annotationType) { if ( isBuiltinConstraint( annotationType ) ) { return getBuiltInConstraints( annotationType ); } else { @SuppressWarnings("unchecked") Class<? extends ConstraintValidator<A, ?>>[] validatedBy = (Class<? extends ConstraintValidator<A, ?>>[]) annotationType .getAnnotation( Constraint.class ) .validatedBy(); return Arrays.asList( validatedBy ); } } private static boolean isClassPresent(String className) { try { run( LoadClass.action( className, ConstraintHelper.class ) ); return true; } catch ( ValidationException e ) { return false; } } /** * Runs the given privileged action, using a privileged block if required. * <p> * <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary * privileged actions within HV's protection domain. */ private static <T> T run(PrivilegedAction<T> action) { return System.getSecurityManager() != null ? AccessController.doPrivileged( action ) : action.run(); } /** * A type-safe wrapper around a concurrent map from constraint types to * associated validator classes. The casts are safe as data is added trough * the typed API only. * * @author Gunnar Morling */ @SuppressWarnings("unchecked") private static class ValidatorClassMap { private final ConcurrentMap<Class<? extends Annotation>, List<? extends Class<?>>> constraintValidatorClasses = newConcurrentHashMap(); private <A extends Annotation> List<Class<? extends ConstraintValidator<A, ?>>> get(Class<A> annotationType) { return (List<Class<? extends ConstraintValidator<A, ?>>>) constraintValidatorClasses.get( annotationType ); } private <A extends Annotation> void put(Class<A> annotationType, List<Class<? extends ConstraintValidator<A, ?>>> validatorClasses) { constraintValidatorClasses.put( annotationType, validatorClasses ); } private <A extends Annotation> List<Class<? extends ConstraintValidator<A, ?>>> putIfAbsent(Class<A> annotationType, List<Class<? extends ConstraintValidator<A, ?>>> classes) { return (List<Class<? extends ConstraintValidator<A, ?>>>) constraintValidatorClasses.putIfAbsent( annotationType, classes ); } } }
package org.cagrid.dorian; import java.io.Serializable; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import gov.nih.nci.cagrid.metadata.ServiceMetadata; import org.cagrid.dorian.model.federation.TrustedIdentityProviders; import org.cagrid.dorian.policy.DorianPolicy; import org.cagrid.gaards.authentication.AuthenticationProfiles; import org.jvnet.jaxb2_commons.lang.Equals; import org.jvnet.jaxb2_commons.lang.EqualsStrategy; import org.jvnet.jaxb2_commons.lang.HashCode; import org.jvnet.jaxb2_commons.lang.HashCodeStrategy; import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy; import org.jvnet.jaxb2_commons.lang.JAXBHashCodeStrategy; import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy; import org.jvnet.jaxb2_commons.lang.ToString; import org.jvnet.jaxb2_commons.lang.ToStringStrategy; import org.jvnet.jaxb2_commons.locator.ObjectLocator; import org.jvnet.jaxb2_commons.locator.util.LocatorUtils; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{gme://caGrid.caBIG/1.0/gov.nih.nci.cagrid.metadata}ServiceMetadata"/> * &lt;element ref="{http://gaards.cagrid.org/authentication}AuthenticationProfiles"/> * &lt;element ref="{http://cagrid.nci.nih.gov/1/dorian-ifs}TrustedIdentityProviders"/> * &lt;element ref="{http://cagrid.nci.nih.gov/1/dorian-policy}DorianPolicy"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "serviceMetadata", "authenticationProfiles", "trustedIdentityProviders", "dorianPolicy" }) @XmlRootElement(name = "DorianResourceProperties") public class DorianResourceProperties implements Serializable, Equals, HashCode, ToString { @XmlElement(name = "ServiceMetadata", namespace = "gme://caGrid.caBIG/1.0/gov.nih.nci.cagrid.metadata", required = true) protected ServiceMetadata serviceMetadata; @XmlElement(name = "AuthenticationProfiles", namespace = "http://gaards.cagrid.org/authentication", required = true) protected AuthenticationProfiles authenticationProfiles; @XmlElement(name = "TrustedIdentityProviders", namespace = "http://cagrid.nci.nih.gov/1/dorian-ifs", required = true) protected TrustedIdentityProviders trustedIdentityProviders; @XmlElement(name = "DorianPolicy", namespace = "http://cagrid.nci.nih.gov/1/dorian-policy", required = true) protected DorianPolicy dorianPolicy; /** * Gets the value of the serviceMetadata property. * * @return * possible object is * {@link ServiceMetadata } * */ public ServiceMetadata getServiceMetadata() { return serviceMetadata; } /** * Sets the value of the serviceMetadata property. * * @param value * allowed object is * {@link ServiceMetadata } * */ public void setServiceMetadata(ServiceMetadata value) { this.serviceMetadata = value; } /** * Gets the value of the authenticationProfiles property. * * @return * possible object is * {@link AuthenticationProfiles } * */ public AuthenticationProfiles getAuthenticationProfiles() { return authenticationProfiles; } /** * Sets the value of the authenticationProfiles property. * * @param value * allowed object is * {@link AuthenticationProfiles } * */ public void setAuthenticationProfiles(AuthenticationProfiles value) { this.authenticationProfiles = value; } /** * Gets the value of the trustedIdentityProviders property. * * @return * possible object is * {@link TrustedIdentityProviders } * */ public TrustedIdentityProviders getTrustedIdentityProviders() { return trustedIdentityProviders; } /** * Sets the value of the trustedIdentityProviders property. * * @param value * allowed object is * {@link TrustedIdentityProviders } * */ public void setTrustedIdentityProviders(TrustedIdentityProviders value) { this.trustedIdentityProviders = value; } /** * Gets the value of the dorianPolicy property. * * @return * possible object is * {@link DorianPolicy } * */ public DorianPolicy getDorianPolicy() { return dorianPolicy; } /** * Sets the value of the dorianPolicy property. * * @param value * allowed object is * {@link DorianPolicy } * */ public void setDorianPolicy(DorianPolicy value) { this.dorianPolicy = value; } public String toString() { final ToStringStrategy strategy = JAXBToStringStrategy.INSTANCE; final StringBuilder buffer = new StringBuilder(); append(null, buffer, strategy); return buffer.toString(); } public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) { strategy.appendStart(locator, this, buffer); appendFields(locator, buffer, strategy); strategy.appendEnd(locator, this, buffer); return buffer; } public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) { { ServiceMetadata theServiceMetadata; theServiceMetadata = this.getServiceMetadata(); strategy.appendField(locator, this, "serviceMetadata", buffer, theServiceMetadata); } { AuthenticationProfiles theAuthenticationProfiles; theAuthenticationProfiles = this.getAuthenticationProfiles(); strategy.appendField(locator, this, "authenticationProfiles", buffer, theAuthenticationProfiles); } { TrustedIdentityProviders theTrustedIdentityProviders; theTrustedIdentityProviders = this.getTrustedIdentityProviders(); strategy.appendField(locator, this, "trustedIdentityProviders", buffer, theTrustedIdentityProviders); } { DorianPolicy theDorianPolicy; theDorianPolicy = this.getDorianPolicy(); strategy.appendField(locator, this, "dorianPolicy", buffer, theDorianPolicy); } return buffer; } public int hashCode(ObjectLocator locator, HashCodeStrategy strategy) { int currentHashCode = 1; { ServiceMetadata theServiceMetadata; theServiceMetadata = this.getServiceMetadata(); currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "serviceMetadata", theServiceMetadata), currentHashCode, theServiceMetadata); } { AuthenticationProfiles theAuthenticationProfiles; theAuthenticationProfiles = this.getAuthenticationProfiles(); currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "authenticationProfiles", theAuthenticationProfiles), currentHashCode, theAuthenticationProfiles); } { TrustedIdentityProviders theTrustedIdentityProviders; theTrustedIdentityProviders = this.getTrustedIdentityProviders(); currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "trustedIdentityProviders", theTrustedIdentityProviders), currentHashCode, theTrustedIdentityProviders); } { DorianPolicy theDorianPolicy; theDorianPolicy = this.getDorianPolicy(); currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "dorianPolicy", theDorianPolicy), currentHashCode, theDorianPolicy); } return currentHashCode; } public int hashCode() { final HashCodeStrategy strategy = JAXBHashCodeStrategy.INSTANCE; return this.hashCode(null, strategy); } public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy strategy) { if (!(object instanceof DorianResourceProperties)) { return false; } if (this == object) { return true; } final DorianResourceProperties that = ((DorianResourceProperties) object); { ServiceMetadata lhsServiceMetadata; lhsServiceMetadata = this.getServiceMetadata(); ServiceMetadata rhsServiceMetadata; rhsServiceMetadata = that.getServiceMetadata(); if (!strategy.equals(LocatorUtils.property(thisLocator, "serviceMetadata", lhsServiceMetadata), LocatorUtils.property(thatLocator, "serviceMetadata", rhsServiceMetadata), lhsServiceMetadata, rhsServiceMetadata)) { return false; } } { AuthenticationProfiles lhsAuthenticationProfiles; lhsAuthenticationProfiles = this.getAuthenticationProfiles(); AuthenticationProfiles rhsAuthenticationProfiles; rhsAuthenticationProfiles = that.getAuthenticationProfiles(); if (!strategy.equals(LocatorUtils.property(thisLocator, "authenticationProfiles", lhsAuthenticationProfiles), LocatorUtils.property(thatLocator, "authenticationProfiles", rhsAuthenticationProfiles), lhsAuthenticationProfiles, rhsAuthenticationProfiles)) { return false; } } { TrustedIdentityProviders lhsTrustedIdentityProviders; lhsTrustedIdentityProviders = this.getTrustedIdentityProviders(); TrustedIdentityProviders rhsTrustedIdentityProviders; rhsTrustedIdentityProviders = that.getTrustedIdentityProviders(); if (!strategy.equals(LocatorUtils.property(thisLocator, "trustedIdentityProviders", lhsTrustedIdentityProviders), LocatorUtils.property(thatLocator, "trustedIdentityProviders", rhsTrustedIdentityProviders), lhsTrustedIdentityProviders, rhsTrustedIdentityProviders)) { return false; } } { DorianPolicy lhsDorianPolicy; lhsDorianPolicy = this.getDorianPolicy(); DorianPolicy rhsDorianPolicy; rhsDorianPolicy = that.getDorianPolicy(); if (!strategy.equals(LocatorUtils.property(thisLocator, "dorianPolicy", lhsDorianPolicy), LocatorUtils.property(thatLocator, "dorianPolicy", rhsDorianPolicy), lhsDorianPolicy, rhsDorianPolicy)) { return false; } } return true; } public boolean equals(Object object) { final EqualsStrategy strategy = JAXBEqualsStrategy.INSTANCE; return equals(null, null, object, strategy); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.framework.service; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.framework.response.ResponseObject; import com.framework.utils.Configuration; public abstract class AbstractService implements Service { private static final Log LOG = LogFactory.getLog(AbstractService.class); private final String name; private final ServiceStateModel stateModel; private long startTime; private volatile Configuration config; private final ServiceOperations.ServiceListeners listeners = new ServiceOperations.ServiceListeners(); private static ServiceOperations.ServiceListeners globalListeners = new ServiceOperations.ServiceListeners(); private Exception failureCause; private STATE failureState = null; private final AtomicBoolean terminationNotification = new AtomicBoolean(false); private final List<LifecycleEvent> lifecycleHistory = new ArrayList<LifecycleEvent>(5); private final Map<String, String> blockerMap = new HashMap<String, String>(); private final Object stateChangeLock = new Object(); public AbstractService(String name) { this.name = name; stateModel = new ServiceStateModel(name); } @Override public final STATE getServiceState() { return stateModel.getState(); } @Override public final synchronized Throwable getFailureCause() { return failureCause; } @Override public synchronized STATE getFailureState() { return failureState; } protected void setConfig(Configuration conf) { this.config = conf; } @Override public void init(Configuration conf) { if (conf == null) { throw new ServiceStateException("Cannot initialize service " + getName() + ": null configuration"); } if (isInState(STATE.INITED)) { return; } synchronized (stateChangeLock) { if (enterState(STATE.INITED) != STATE.INITED) { setConfig(conf); try { serviceInit(config); if (isInState(STATE.INITED)) { // if the service ended up here during init, // notify the listeners notifyListeners(); } } catch (Exception e) { noteFailure(e); ServiceOperations.stopQuietly(LOG, this); throw ServiceStateException.convert(e); } } } } /** * {@inheritDoc} * * @throws ServiceStateException if the current service state does not permit this action */ @Override public void start() { if (isInState(STATE.STARTED)) { return; } // enter the started state synchronized (stateChangeLock) { if (stateModel.enterState(STATE.STARTED) != STATE.STARTED) { try { startTime = System.currentTimeMillis(); serviceStart(); if (isInState(STATE.STARTED)) { // if the service started (and isn't now in a later // state), notify if (LOG.isDebugEnabled()) { LOG.debug("Service " + getName() + " is started"); } notifyListeners(); } } catch (Exception e) { noteFailure(e); ServiceOperations.stopQuietly(LOG, this); throw ServiceStateException.convert(e); } } } } /** * {@inheritDoc} */ @Override public void stop() { if (isInState(STATE.STOPPED)) { return; } synchronized (stateChangeLock) { if (enterState(STATE.STOPPED) != STATE.STOPPED) { try { serviceStop(); } catch (Exception e) { // stop-time exceptions are logged if they are the first // one, noteFailure(e); throw ServiceStateException.convert(e); } finally { // report that the service has terminated terminationNotification.set(true); synchronized (terminationNotification) { terminationNotification.notifyAll(); } // notify anything listening for events notifyListeners(); } } else { // already stopped: note it if (LOG.isDebugEnabled()) { LOG.debug("Ignoring re-entrant call to stop()"); } } } } @Override public final void close() throws IOException { stop(); } protected final void noteFailure(Exception exception) { if (LOG.isDebugEnabled()) { LOG.debug("noteFailure " + exception, null); } if (exception == null) { // make sure failure logic doesn't itself cause problems return; } // record the failure details, and log it synchronized (this) { if (failureCause == null) { failureCause = exception; failureState = getServiceState(); LOG.info("Service " + getName() + " failed in state " + failureState + "; cause: " + exception, exception); } } } @Override public final boolean waitForServiceToStop(long timeout) { boolean completed = terminationNotification.get(); while (!completed) { try { synchronized (terminationNotification) { terminationNotification.wait(timeout); } // here there has been a timeout, the object has terminated, // or there has been a spurious wakeup (which we ignore) completed = true; } catch (InterruptedException e) { // interrupted; have another look at the flag completed = terminationNotification.get(); } } return terminationNotification.get(); } protected void serviceInit(Configuration conf) throws Exception { if (conf != config) { LOG.debug("Config has been overridden during init"); setConfig(conf); } } protected void serviceStart() throws Exception { } protected void serviceStop() throws Exception { } @Override public void registerServiceListener(ServiceStateChangeListener l) { listeners.add(l); } @Override public void unregisterServiceListener(ServiceStateChangeListener l) { listeners.remove(l); } public static void registerGlobalListener(ServiceStateChangeListener l) { globalListeners.add(l); } public static boolean unregisterGlobalListener(ServiceStateChangeListener l) { return globalListeners.remove(l); } static void resetGlobalListeners() { globalListeners.reset(); } @Override public String getName() { return name; } @Override public synchronized Configuration getConfig() { return config; } @Override public long getStartTime() { return startTime; } private void notifyListeners() { try { listeners.notifyListeners(this); globalListeners.notifyListeners(this); } catch (Throwable e) { LOG.warn("Exception while notifying listeners of " + this + ": " + e, e); } } private void recordLifecycleEvent() { LifecycleEvent event = new LifecycleEvent(); event.time = System.currentTimeMillis(); event.state = getServiceState(); lifecycleHistory.add(event); } @Override public synchronized List<LifecycleEvent> getLifecycleHistory() { return new ArrayList<LifecycleEvent>(lifecycleHistory); } private STATE enterState(STATE newState) { assert stateModel != null : "null state in " + name + " " + this.getClass(); STATE oldState = stateModel.enterState(newState); if (oldState != newState) { if (LOG.isDebugEnabled()) { LOG.debug("Service: " + getName() + " entered state " + getServiceState()); } recordLifecycleEvent(); } return oldState; } @Override public final boolean isInState(Service.STATE expected) { return stateModel.isInState(expected); } @Override public String toString() { return "Service " + name + " in state " + stateModel; } protected void putBlocker(String name, String details) { synchronized (blockerMap) { blockerMap.put(name, details); } } public void removeBlocker(String name) { synchronized (blockerMap) { blockerMap.remove(name); } } @Override public Map<String, String> getBlockers() { synchronized (blockerMap) { Map<String, String> map = new HashMap<String, String>(blockerMap); return map; } } }
/* * The MIT License (MIT) * * Copyright (c) 2015 Reinventing Geospatial, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.rgi.geopackage.features.geometry.m; import com.rgi.geopackage.features.ByteOutputStream; import com.rgi.geopackage.features.Contents; import org.junit.Test; import java.nio.ByteBuffer; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; /** * @author Luke Lambert */ public class CoordinateMTest { /** * Test the constructor */ @Test @SuppressWarnings("JUnitTestMethodWithNoAssertions") public void constructor() { new CoordinateM(0.0, 0.0, 0.0); } /** * Test equals */ @Test public void testEquals() { final CoordinateM coord = new CoordinateM(0.0, 0.0, 0.0); //noinspection EqualsWithItself,SimplifiableJUnitAssertion assertTrue("Equals failed on self reference", coord.equals(coord)); } /** * Test equals with null */ @Test @SuppressWarnings("ObjectEqualsNull") public void testEqualsWithNull() { final CoordinateM coord = new CoordinateM(0.0, 0.0, 0.0); //noinspection EqualsWithItself assertFalse("Equals should have failed on null comparison", coord.equals(null)); } /** * Test equals with a different object type */ @Test public void testEqualsWithDifferentObjectType() { final CoordinateM coord = new CoordinateM(0.0, 0.0, 0.0); //noinspection EqualsWithItself,UnnecessaryBoxing,EqualsBetweenInconvertibleTypes assertFalse("Equals should fail on a different object type", coord.equals(Integer.valueOf(0))); } /** * Test equals */ @Test public void testEqualsTrue() { final CoordinateM coord1 = new CoordinateM(0.0, 0.0, 0.0); final CoordinateM coord2 = new CoordinateM(0.0, 0.0, 0.0); //noinspection SimplifiableJUnitAssertion assertTrue("Equals failed to return true", coord1.equals(coord2)); } /** * Test hashCode() */ @Test public void testHashCode() { final CoordinateM coord = new CoordinateM(1.0, 1.0, 1.0); assertEquals("Hash code failed", 32505856, coord.hashCode()); } /** * Test toString() */ @Test public void testToString() { final CoordinateM coord = new CoordinateM(1.0, 1.0, 1.0); assertEquals("To string failed", String.format("(%f, %f, %f m)", coord.getX(), coord.getY(), coord.getM()), coord.toString()); } /** * Test isEmpty() */ @Test public void testIsEmpty() { assertFalse("isEmpty failed", new CoordinateM(1.0, 1.0, 1.0).isEmpty()); assertTrue("isEmpty failed", new CoordinateM(Double.NaN, Double.NaN, Double.NaN).isEmpty()); } /** * Test getContents() */ @Test public void getContents() { assertSame("getContents failed", Contents.NotEmpty, new CoordinateM(1.0, 1.0, 1.0).getContents()); assertSame("getContents failed", Contents.Empty, new CoordinateM(Double.NaN, Double.NaN, Double.NaN).getContents()); } /** * Test createEnvelope() */ @Test public void createEnvelope() { final double x = 1.0; final double y = 1.0; final double m = 1.0; final EnvelopeM envelope = new CoordinateM(x, y, m).createEnvelope(); assertEquals("createEnvelope failed", x, envelope.getMinimumX(), 0.0); assertEquals("createEnvelope failed", x, envelope.getMaximumX(), 0.0); assertEquals("createEnvelope failed", y, envelope.getMinimumY(), 0.0); assertEquals("createEnvelope failed", y, envelope.getMaximumY(), 0.0); assertEquals("createEnvelope failed", m, envelope.getMinimumM(), 0.0); assertEquals("createEnvelope failed", m, envelope.getMaximumM(), 0.0); final EnvelopeM emptyEnvelope = new CoordinateM(Double.NaN, Double.NaN, Double.NaN).createEnvelope(); assertTrue("createemptyEnvelope failed", Double.isNaN(emptyEnvelope.getMinimumX())); assertTrue("createemptyEnvelope failed", Double.isNaN(emptyEnvelope.getMaximumX())); assertTrue("createemptyEnvelope failed", Double.isNaN(emptyEnvelope.getMinimumY())); assertTrue("createemptyEnvelope failed", Double.isNaN(emptyEnvelope.getMaximumY())); assertTrue("createemptyEnvelope failed", Double.isNaN(emptyEnvelope.getMinimumM())); assertTrue("createemptyEnvelope failed", Double.isNaN(emptyEnvelope.getMaximumM())); } /** * Test writeWellKnownBinary */ @Test public void writeWellKnownText() { try(final ByteOutputStream output = new ByteOutputStream()) { final double x = 1.0; final double y = 1.0; final double m = 1.0; new CoordinateM(x, y, m).writeWellKnownBinary(output); final ByteBuffer byteBuffer = ByteBuffer.wrap(output.array()); assertEquals("writeWellKnownBinary incorrectly wrote x", x, byteBuffer.getDouble(), 0.0); assertEquals("writeWellKnownBinary incorrectly wrote y", y, byteBuffer.getDouble(), 0.0); assertEquals("writeWellKnownBinary incorrectly wrote m", m, byteBuffer.getDouble(), 0.0); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.deploy; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.HashMap; import java.util.HashSet; import java.util.Set; import javax.naming.NamingException; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Engine; import org.apache.catalina.JmxEnabled; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.Server; import org.apache.catalina.mbeans.MBeanUtils; import org.apache.catalina.util.Introspection; import org.apache.catalina.util.LifecycleMBeanBase; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.naming.ContextBindings; import org.apache.tomcat.util.ExceptionUtils; import org.apache.tomcat.util.descriptor.web.ContextEjb; import org.apache.tomcat.util.descriptor.web.ContextEnvironment; import org.apache.tomcat.util.descriptor.web.ContextLocalEjb; import org.apache.tomcat.util.descriptor.web.ContextResource; import org.apache.tomcat.util.descriptor.web.ContextResourceEnvRef; import org.apache.tomcat.util.descriptor.web.ContextResourceLink; import org.apache.tomcat.util.descriptor.web.ContextService; import org.apache.tomcat.util.descriptor.web.ContextTransaction; import org.apache.tomcat.util.descriptor.web.InjectionTarget; import org.apache.tomcat.util.descriptor.web.MessageDestinationRef; import org.apache.tomcat.util.descriptor.web.NamingResources; import org.apache.tomcat.util.descriptor.web.ResourceBase; import org.apache.tomcat.util.res.StringManager; /** * Holds and manages the naming resources defined in the J2EE Enterprise * Naming Context and their associated JNDI context. * * @author Remy Maucherat */ public class NamingResourcesImpl extends LifecycleMBeanBase implements Serializable, NamingResources { private static final long serialVersionUID = 1L; private static final Log log = LogFactory.getLog(NamingResourcesImpl.class); private static final StringManager sm = StringManager.getManager(Constants.Package); private volatile boolean resourceRequireExplicitRegistration = false; // ----------------------------------------------------------- Constructors /** * Create a new NamingResources instance. */ public NamingResourcesImpl() { // NOOP } // ----------------------------------------------------- Instance Variables /** * Associated container object. */ private Object container = null; /** * Set of naming entries, keyed by name. */ private final Set<String> entries = new HashSet<>(); /** * The EJB resource references for this web application, keyed by name. */ private final HashMap<String, ContextEjb> ejbs = new HashMap<>(); /** * The environment entries for this web application, keyed by name. */ private final HashMap<String, ContextEnvironment> envs = new HashMap<>(); /** * The local EJB resource references for this web application, keyed by * name. */ private final HashMap<String, ContextLocalEjb> localEjbs = new HashMap<>(); /** * The message destination referencess for this web application, * keyed by name. */ private final HashMap<String, MessageDestinationRef> mdrs = new HashMap<>(); /** * The resource environment references for this web application, * keyed by name. */ private final HashMap<String, ContextResourceEnvRef> resourceEnvRefs = new HashMap<>(); /** * The resource references for this web application, keyed by name. */ private final HashMap<String, ContextResource> resources = new HashMap<>(); /** * The resource links for this web application, keyed by name. */ private final HashMap<String, ContextResourceLink> resourceLinks = new HashMap<>(); /** * The web service references for this web application, keyed by name. */ private final HashMap<String, ContextService> services = new HashMap<>(); /** * The transaction for this webapp. */ private ContextTransaction transaction = null; /** * The property change support for this component. */ protected final PropertyChangeSupport support = new PropertyChangeSupport(this); // ------------------------------------------------------------- Properties /** * Get the container with which the naming resources are associated. */ @Override public Object getContainer() { return container; } /** * Set the container with which the naming resources are associated. */ public void setContainer(Object container) { this.container = container; } /** * Set the transaction object. */ public void setTransaction(ContextTransaction transaction) { this.transaction = transaction; } /** * Get the transaction object. */ public ContextTransaction getTransaction() { return transaction; } /** * Add an EJB resource reference for this web application. * * @param ejb New EJB resource reference */ public void addEjb(ContextEjb ejb) { if (entries.contains(ejb.getName())) { return; } else { entries.add(ejb.getName()); } synchronized (ejbs) { ejb.setNamingResources(this); ejbs.put(ejb.getName(), ejb); } support.firePropertyChange("ejb", null, ejb); } /** * Add an environment entry for this web application. * * @param environment New environment entry */ @Override public void addEnvironment(ContextEnvironment environment) { if (entries.contains(environment.getName())) { ContextEnvironment ce = findEnvironment(environment.getName()); ContextResourceLink rl = findResourceLink(environment.getName()); if (ce != null) { if (ce.getOverride()) { removeEnvironment(environment.getName()); } else { return; } } else if (rl != null) { // Link. Need to look at the global resources NamingResourcesImpl global = getServer().getGlobalNamingResources(); if (global.findEnvironment(rl.getGlobal()) != null) { if (global.findEnvironment(rl.getGlobal()).getOverride()) { removeResourceLink(environment.getName()); } else { return; } } } else { // It exists but it isn't an env or a res link... return; } } if (!checkResourceType(environment)) { throw new IllegalArgumentException(sm.getString( "namingResources.resourceTypeFail", environment.getName(), environment.getType())); } entries.add(environment.getName()); synchronized (envs) { environment.setNamingResources(this); envs.put(environment.getName(), environment); } support.firePropertyChange("environment", null, environment); // Register with JMX if (resourceRequireExplicitRegistration) { try { MBeanUtils.createMBean(environment); } catch (Exception e) { log.warn(sm.getString("namingResources.mbeanCreateFail", environment.getName()), e); } } } // Container should be an instance of Server or Context. If it is anything // else, return null which will trigger a NPE. private Server getServer() { if (container instanceof Server) { return (Server) container; } if (container instanceof Context) { // Could do this in one go. Lots of casts so split out for clarity Engine engine = (Engine) ((Context) container).getParent().getParent(); return engine.getService().getServer(); } return null; } /** * Add a local EJB resource reference for this web application. * * @param ejb New EJB resource reference */ public void addLocalEjb(ContextLocalEjb ejb) { if (entries.contains(ejb.getName())) { return; } else { entries.add(ejb.getName()); } synchronized (localEjbs) { ejb.setNamingResources(this); localEjbs.put(ejb.getName(), ejb); } support.firePropertyChange("localEjb", null, ejb); } /** * Add a message destination reference for this web application. * * @param mdr New message destination reference */ public void addMessageDestinationRef(MessageDestinationRef mdr) { if (entries.contains(mdr.getName())) { return; } else { if (!checkResourceType(mdr)) { throw new IllegalArgumentException(sm.getString( "namingResources.resourceTypeFail", mdr.getName(), mdr.getType())); } entries.add(mdr.getName()); } synchronized (mdrs) { mdr.setNamingResources(this); mdrs.put(mdr.getName(), mdr); } support.firePropertyChange("messageDestinationRef", null, mdr); } /** * Add a property change listener to this component. * * @param listener The listener to add */ public void addPropertyChangeListener(PropertyChangeListener listener) { support.addPropertyChangeListener(listener); } /** * Add a resource reference for this web application. * * @param resource New resource reference */ @Override public void addResource(ContextResource resource) { if (entries.contains(resource.getName())) { return; } else { if (!checkResourceType(resource)) { throw new IllegalArgumentException(sm.getString( "namingResources.resourceTypeFail", resource.getName(), resource.getType())); } entries.add(resource.getName()); } synchronized (resources) { resource.setNamingResources(this); resources.put(resource.getName(), resource); } support.firePropertyChange("resource", null, resource); // Register with JMX if (resourceRequireExplicitRegistration) { try { MBeanUtils.createMBean(resource); } catch (Exception e) { log.warn(sm.getString("namingResources.mbeanCreateFail", resource.getName()), e); } } } /** * Add a resource environment reference for this web application. * * @param resource The resource */ public void addResourceEnvRef(ContextResourceEnvRef resource) { if (entries.contains(resource.getName())) { return; } else { if (!checkResourceType(resource)) { throw new IllegalArgumentException(sm.getString( "namingResources.resourceTypeFail", resource.getName(), resource.getType())); } entries.add(resource.getName()); } synchronized (resourceEnvRefs) { resource.setNamingResources(this); resourceEnvRefs.put(resource.getName(), resource); } support.firePropertyChange("resourceEnvRef", null, resource); } /** * Add a resource link for this web application. * * @param resourceLink New resource link */ @Override public void addResourceLink(ContextResourceLink resourceLink) { if (entries.contains(resourceLink.getName())) { return; } else { entries.add(resourceLink.getName()); } synchronized (resourceLinks) { resourceLink.setNamingResources(this); resourceLinks.put(resourceLink.getName(), resourceLink); } support.firePropertyChange("resourceLink", null, resourceLink); // Register with JMX if (resourceRequireExplicitRegistration) { try { MBeanUtils.createMBean(resourceLink); } catch (Exception e) { log.warn(sm.getString("namingResources.mbeanCreateFail", resourceLink.getName()), e); } } } /** * Add a web service reference for this web application. * * @param service New web service reference */ public void addService(ContextService service) { if (entries.contains(service.getName())) { return; } else { entries.add(service.getName()); } synchronized (services) { service.setNamingResources(this); services.put(service.getName(), service); } support.firePropertyChange("service", null, service); } /** * Return the EJB resource reference with the specified name, if any; * otherwise, return <code>null</code>. * * @param name Name of the desired EJB resource reference */ public ContextEjb findEjb(String name) { synchronized (ejbs) { return ejbs.get(name); } } /** * Return the defined EJB resource references for this application. * If there are none, a zero-length array is returned. */ public ContextEjb[] findEjbs() { synchronized (ejbs) { ContextEjb results[] = new ContextEjb[ejbs.size()]; return ejbs.values().toArray(results); } } /** * Return the environment entry with the specified name, if any; * otherwise, return <code>null</code>. * * @param name Name of the desired environment entry */ public ContextEnvironment findEnvironment(String name) { synchronized (envs) { return envs.get(name); } } /** * Return the set of defined environment entries for this web * application. If none have been defined, a zero-length array * is returned. */ public ContextEnvironment[] findEnvironments() { synchronized (envs) { ContextEnvironment results[] = new ContextEnvironment[envs.size()]; return envs.values().toArray(results); } } /** * Return the local EJB resource reference with the specified name, if any; * otherwise, return <code>null</code>. * * @param name Name of the desired EJB resource reference */ public ContextLocalEjb findLocalEjb(String name) { synchronized (localEjbs) { return localEjbs.get(name); } } /** * Return the defined local EJB resource references for this application. * If there are none, a zero-length array is returned. */ public ContextLocalEjb[] findLocalEjbs() { synchronized (localEjbs) { ContextLocalEjb results[] = new ContextLocalEjb[localEjbs.size()]; return localEjbs.values().toArray(results); } } /** * Return the message destination reference with the specified name, * if any; otherwise, return <code>null</code>. * * @param name Name of the desired message destination reference */ public MessageDestinationRef findMessageDestinationRef(String name) { synchronized (mdrs) { return mdrs.get(name); } } /** * Return the defined message destination references for this application. * If there are none, a zero-length array is returned. */ public MessageDestinationRef[] findMessageDestinationRefs() { synchronized (mdrs) { MessageDestinationRef results[] = new MessageDestinationRef[mdrs.size()]; return mdrs.values().toArray(results); } } /** * Return the resource reference with the specified name, if any; * otherwise return <code>null</code>. * * @param name Name of the desired resource reference */ public ContextResource findResource(String name) { synchronized (resources) { return resources.get(name); } } /** * Return the resource link with the specified name, if any; * otherwise return <code>null</code>. * * @param name Name of the desired resource link */ public ContextResourceLink findResourceLink(String name) { synchronized (resourceLinks) { return resourceLinks.get(name); } } /** * Return the defined resource links for this application. If * none have been defined, a zero-length array is returned. */ public ContextResourceLink[] findResourceLinks() { synchronized (resourceLinks) { ContextResourceLink results[] = new ContextResourceLink[resourceLinks.size()]; return resourceLinks.values().toArray(results); } } /** * Return the defined resource references for this application. If * none have been defined, a zero-length array is returned. */ public ContextResource[] findResources() { synchronized (resources) { ContextResource results[] = new ContextResource[resources.size()]; return resources.values().toArray(results); } } /** * Return the resource environment reference type for the specified * name, if any; otherwise return <code>null</code>. * * @param name Name of the desired resource environment reference */ public ContextResourceEnvRef findResourceEnvRef(String name) { synchronized (resourceEnvRefs) { return resourceEnvRefs.get(name); } } /** * Return the set of resource environment reference names for this * web application. If none have been specified, a zero-length * array is returned. */ public ContextResourceEnvRef[] findResourceEnvRefs() { synchronized (resourceEnvRefs) { ContextResourceEnvRef results[] = new ContextResourceEnvRef[resourceEnvRefs.size()]; return resourceEnvRefs.values().toArray(results); } } /** * Return the web service reference for the specified * name, if any; otherwise return <code>null</code>. * * @param name Name of the desired web service */ public ContextService findService(String name) { synchronized (services) { return services.get(name); } } /** * Return the defined web service references for this application. If * none have been defined, a zero-length array is returned. */ public ContextService[] findServices() { synchronized (services) { ContextService results[] = new ContextService[services.size()]; return services.values().toArray(results); } } /** * Remove any EJB resource reference with the specified name. * * @param name Name of the EJB resource reference to remove */ public void removeEjb(String name) { entries.remove(name); ContextEjb ejb = null; synchronized (ejbs) { ejb = ejbs.remove(name); } if (ejb != null) { support.firePropertyChange("ejb", ejb, null); ejb.setNamingResources(null); } } /** * Remove any environment entry with the specified name. * * @param name Name of the environment entry to remove */ @Override public void removeEnvironment(String name) { entries.remove(name); ContextEnvironment environment = null; synchronized (envs) { environment = envs.remove(name); } if (environment != null) { support.firePropertyChange("environment", environment, null); // De-register with JMX if (resourceRequireExplicitRegistration) { try { MBeanUtils.destroyMBean(environment); } catch (Exception e) { log.warn(sm.getString("namingResources.mbeanDestroyFail", environment.getName()), e); } } environment.setNamingResources(null); } } /** * Remove any local EJB resource reference with the specified name. * * @param name Name of the EJB resource reference to remove */ public void removeLocalEjb(String name) { entries.remove(name); ContextLocalEjb localEjb = null; synchronized (localEjbs) { localEjb = localEjbs.remove(name); } if (localEjb != null) { support.firePropertyChange("localEjb", localEjb, null); localEjb.setNamingResources(null); } } /** * Remove any message destination reference with the specified name. * * @param name Name of the message destination resource reference to remove */ public void removeMessageDestinationRef(String name) { entries.remove(name); MessageDestinationRef mdr = null; synchronized (mdrs) { mdr = mdrs.remove(name); } if (mdr != null) { support.firePropertyChange("messageDestinationRef", mdr, null); mdr.setNamingResources(null); } } /** * Remove a property change listener from this component. * * @param listener The listener to remove */ public void removePropertyChangeListener(PropertyChangeListener listener) { support.removePropertyChangeListener(listener); } /** * Remove any resource reference with the specified name. * * @param name Name of the resource reference to remove */ @Override public void removeResource(String name) { entries.remove(name); ContextResource resource = null; synchronized (resources) { resource = resources.remove(name); } if (resource != null) { support.firePropertyChange("resource", resource, null); // De-register with JMX if (resourceRequireExplicitRegistration) { try { MBeanUtils.destroyMBean(resource); } catch (Exception e) { log.warn(sm.getString("namingResources.mbeanDestroyFail", resource.getName()), e); } } resource.setNamingResources(null); } } /** * Remove any resource environment reference with the specified name. * * @param name Name of the resource environment reference to remove */ public void removeResourceEnvRef(String name) { entries.remove(name); ContextResourceEnvRef resourceEnvRef = null; synchronized (resourceEnvRefs) { resourceEnvRef = resourceEnvRefs.remove(name); } if (resourceEnvRef != null) { support.firePropertyChange("resourceEnvRef", resourceEnvRef, null); resourceEnvRef.setNamingResources(null); } } /** * Remove any resource link with the specified name. * * @param name Name of the resource link to remove */ @Override public void removeResourceLink(String name) { entries.remove(name); ContextResourceLink resourceLink = null; synchronized (resourceLinks) { resourceLink = resourceLinks.remove(name); } if (resourceLink != null) { support.firePropertyChange("resourceLink", resourceLink, null); // De-register with JMX if (resourceRequireExplicitRegistration) { try { MBeanUtils.destroyMBean(resourceLink); } catch (Exception e) { log.warn(sm.getString("namingResources.mbeanDestroyFail", resourceLink.getName()), e); } } resourceLink.setNamingResources(null); } } /** * Remove any web service reference with the specified name. * * @param name Name of the web service reference to remove */ public void removeService(String name) { entries.remove(name); ContextService service = null; synchronized (services) { service = services.remove(name); } if (service != null) { support.firePropertyChange("service", service, null); service.setNamingResources(null); } } // ------------------------------------------------------- Lifecycle methods @Override protected void initInternal() throws LifecycleException { super.initInternal(); // Set this before we register currently known naming resources to avoid // timing issues. Duplication registration is not an issue. resourceRequireExplicitRegistration = true; for (ContextResource cr : resources.values()) { try { MBeanUtils.createMBean(cr); } catch (Exception e) { log.warn(sm.getString( "namingResources.mbeanCreateFail", cr.getName()), e); } } for (ContextEnvironment ce : envs.values()) { try { MBeanUtils.createMBean(ce); } catch (Exception e) { log.warn(sm.getString( "namingResources.mbeanCreateFail", ce.getName()), e); } } for (ContextResourceLink crl : resourceLinks.values()) { try { MBeanUtils.createMBean(crl); } catch (Exception e) { log.warn(sm.getString( "namingResources.mbeanCreateFail", crl.getName()), e); } } } @Override protected void startInternal() throws LifecycleException { fireLifecycleEvent(CONFIGURE_START_EVENT, null); setState(LifecycleState.STARTING); } @Override protected void stopInternal() throws LifecycleException { cleanUp(); setState(LifecycleState.STOPPING); fireLifecycleEvent(CONFIGURE_STOP_EVENT, null); } /** * Close those resources that an explicit close may help clean-up faster. */ private void cleanUp() { if (resources.size() == 0) { return; } javax.naming.Context ctxt; try { if (container instanceof Server) { ctxt = ((Server) container).getGlobalNamingContext(); } else { ctxt = ContextBindings.getClassLoader(); ctxt = (javax.naming.Context) ctxt.lookup("comp/env"); } } catch (NamingException e) { log.warn(sm.getString("namingResources.cleanupNoContext", container), e); return; } for (ContextResource cr: resources.values()) { if (cr.getSingleton()) { String closeMethod = cr.getCloseMethod(); if (closeMethod != null && closeMethod.length() > 0) { String name = cr.getName(); Object resource; try { resource = ctxt.lookup(name); } catch (NamingException e) { log.warn(sm.getString( "namingResources.cleanupNoResource", cr.getName(), container), e); continue; } cleanUp(resource, name, closeMethod); } } } } /** * Clean up a resource by calling the defined close method. For example, * closing a database connection pool will close it's open connections. This * will happen on GC but that leaves db connections open that may cause * issues. * * @param resource The resource to close. */ private void cleanUp(Object resource, String name, String closeMethod) { // Look for a zero-arg close() method Method m = null; try { m = resource.getClass().getMethod(closeMethod, (Class<?>[]) null); } catch (SecurityException e) { log.debug(sm.getString("namingResources.cleanupCloseSecurity", closeMethod, name, container)); return; } catch (NoSuchMethodException e) { log.debug(sm.getString("namingResources.cleanupNoClose", name, container, closeMethod)); return; } if (m != null) { try { m.invoke(resource, (Object[]) null); } catch (IllegalArgumentException | IllegalAccessException e) { log.warn(sm.getString("namingResources.cleanupCloseFailed", closeMethod, name, container), e); } catch (InvocationTargetException e) { Throwable t = ExceptionUtils.unwrapInvocationTargetException(e); ExceptionUtils.handleThrowable(t); log.warn(sm.getString("namingResources.cleanupCloseFailed", closeMethod, name, container), t); } } } @Override protected void destroyInternal() throws LifecycleException { // Set this before we de-register currently known naming resources to // avoid timing issues. Duplication de-registration is not an issue. resourceRequireExplicitRegistration = false; // Destroy in reverse order to create, although it should not matter for (ContextResourceLink crl : resourceLinks.values()) { try { MBeanUtils.destroyMBean(crl); } catch (Exception e) { log.warn(sm.getString( "namingResources.mbeanDestroyFail", crl.getName()), e); } } for (ContextEnvironment ce : envs.values()) { try { MBeanUtils.destroyMBean(ce); } catch (Exception e) { log.warn(sm.getString( "namingResources.mbeanDestroyFail", ce.getName()), e); } } for (ContextResource cr : resources.values()) { try { MBeanUtils.destroyMBean(cr); } catch (Exception e) { log.warn(sm.getString( "namingResources.mbeanDestroyFail", cr.getName()), e); } } super.destroyInternal(); } @Override protected String getDomainInternal() { // Use the same domain as our associated container if we have one Object c = getContainer(); if (c instanceof JmxEnabled) { return ((JmxEnabled) c).getDomain(); } return null; } @Override protected String getObjectNameKeyProperties() { Object c = getContainer(); if (c instanceof Container) { return "type=NamingResources" + ((Container) c).getMBeanKeyProperties(); } // Server or just unknown return "type=NamingResources"; } /** * Checks that the configuration of the type for the specified resource is * consistent with any injection targets and if the type is not specified, * tries to configure the type based on the injection targets * * @param resource The resource to check * * @return <code>true</code> if the type for the resource is now valid (if * previously <code>null</code> this means it is now set) or * <code>false</code> if the current resource type is inconsistent * with the injection targets and/or cannot be determined */ private boolean checkResourceType(ResourceBase resource) { if (!(container instanceof Context)) { // Only Context's will have injection targets return true; } if (resource.getInjectionTargets() == null || resource.getInjectionTargets().size() == 0) { // No injection targets so use the defined type for the resource return true; } Context context = (Context) container; String typeName = resource.getType(); Class<?> typeClass = null; if (typeName != null) { typeClass = Introspection.loadClass(context, typeName); if (typeClass == null) { // Can't load the type - will trigger a failure later so don't // fail here return true; } } Class<?> compatibleClass = getCompatibleType(context, resource, typeClass); if (compatibleClass == null) { // Indicates that a compatible type could not be identified that // worked for all injection targets return false; } resource.setType(compatibleClass.getCanonicalName()); return true; } private Class<?> getCompatibleType(Context context, ResourceBase resource, Class<?> typeClass) { Class<?> result = null; for (InjectionTarget injectionTarget : resource.getInjectionTargets()) { Class<?> clazz = Introspection.loadClass( context, injectionTarget.getTargetClass()); if (clazz == null) { // Can't load class - therefore ignore this target continue; } // Look for a match String targetName = injectionTarget.getTargetName(); // Look for a setter match first Class<?> targetType = getSetterType(clazz, targetName); if (targetType == null) { // Try a field match if no setter match targetType = getFieldType(clazz,targetName); } if (targetType == null) { // No match - ignore this injection target continue; } targetType = Introspection.convertPrimitiveType(targetType); if (typeClass == null) { // Need to find a common type amongst the injection targets if (result == null) { result = targetType; } else if (targetType.isAssignableFrom(result)) { // NO-OP - This will work } else if (result.isAssignableFrom(targetType)) { // Need to use more specific type result = targetType; } else { // Incompatible types return null; } } else { // Each injection target needs to be consistent with the defined // type if (targetType.isAssignableFrom(typeClass)) { result = typeClass; } else { // Incompatible types return null; } } } return result; } private Class<?> getSetterType(Class<?> clazz, String name) { Method[] methods = Introspection.getDeclaredMethods(clazz); if (methods != null && methods.length > 0) { for (Method method : methods) { if (Introspection.isValidSetter(method) && Introspection.getPropertyName(method).equals(name)) { return method.getParameterTypes()[0]; } } } return null; } private Class<?> getFieldType(Class<?> clazz, String name) { Field[] fields = Introspection.getDeclaredFields(clazz); if (fields != null && fields.length > 0) { for (Field field : fields) { if (field.getName().equals(name)) { return field.getType(); } } } return null; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.avs.v2019_08_09_preview.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.management.avs.v2019_08_09_preview.ApiErrorException; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in Operations. */ public class OperationsInner { /** The Retrofit service to perform REST calls. */ private OperationsService service; /** The service client containing this operation class. */ private AvsClientImpl client; /** * Initializes an instance of OperationsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public OperationsInner(Retrofit retrofit, AvsClientImpl client) { this.service = retrofit.create(OperationsService.class); this.client = client; } /** * The interface defining all the services for Operations to be * used by Retrofit to perform actually REST calls. */ interface OperationsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.avs.v2019_08_09_preview.Operations list" }) @GET("providers/Microsoft.AVS/operations") Observable<Response<ResponseBody>> list(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.avs.v2019_08_09_preview.Operations listNext" }) @GET Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Lists all of the available operations. * * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ApiErrorException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;OperationInner&gt; object if successful. */ public PagedList<OperationInner> list() { ServiceResponse<Page<OperationInner>> response = listSinglePageAsync().toBlocking().single(); return new PagedList<OperationInner>(response.body()) { @Override public Page<OperationInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all of the available operations. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<OperationInner>> listAsync(final ListOperationCallback<OperationInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listSinglePageAsync(), new Func1<String, Observable<ServiceResponse<Page<OperationInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all of the available operations. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;OperationInner&gt; object */ public Observable<Page<OperationInner>> listAsync() { return listWithServiceResponseAsync() .map(new Func1<ServiceResponse<Page<OperationInner>>, Page<OperationInner>>() { @Override public Page<OperationInner> call(ServiceResponse<Page<OperationInner>> response) { return response.body(); } }); } /** * Lists all of the available operations. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;OperationInner&gt; object */ public Observable<ServiceResponse<Page<OperationInner>>> listWithServiceResponseAsync() { return listSinglePageAsync() .concatMap(new Func1<ServiceResponse<Page<OperationInner>>, Observable<ServiceResponse<Page<OperationInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationInner>>> call(ServiceResponse<Page<OperationInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all of the available operations. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;OperationInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<OperationInner>>> listSinglePageAsync() { if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.list(this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<OperationInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<OperationInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<OperationInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<OperationInner>> listDelegate(Response<ResponseBody> response) throws ApiErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<OperationInner>, ApiErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<OperationInner>>() { }.getType()) .registerError(ApiErrorException.class) .build(response); } /** * Lists all of the available operations. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ApiErrorException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;OperationInner&gt; object if successful. */ public PagedList<OperationInner> listNext(final String nextPageLink) { ServiceResponse<Page<OperationInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<OperationInner>(response.body()) { @Override public Page<OperationInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all of the available operations. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<OperationInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<OperationInner>> serviceFuture, final ListOperationCallback<OperationInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<OperationInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all of the available operations. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;OperationInner&gt; object */ public Observable<Page<OperationInner>> listNextAsync(final String nextPageLink) { return listNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<OperationInner>>, Page<OperationInner>>() { @Override public Page<OperationInner> call(ServiceResponse<Page<OperationInner>> response) { return response.body(); } }); } /** * Lists all of the available operations. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;OperationInner&gt; object */ public Observable<ServiceResponse<Page<OperationInner>>> listNextWithServiceResponseAsync(final String nextPageLink) { return listNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<OperationInner>>, Observable<ServiceResponse<Page<OperationInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationInner>>> call(ServiceResponse<Page<OperationInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all of the available operations. * ServiceResponse<PageImpl<OperationInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;OperationInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<OperationInner>>> listNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<OperationInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<OperationInner>> result = listNextDelegate(response); return Observable.just(new ServiceResponse<Page<OperationInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<OperationInner>> listNextDelegate(Response<ResponseBody> response) throws ApiErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<OperationInner>, ApiErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<OperationInner>>() { }.getType()) .registerError(ApiErrorException.class) .build(response); } }
/* * Copyright 2016 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.driver.optical.handshaker; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import org.onosproject.net.AnnotationKeys; import org.onosproject.net.DefaultAnnotations; import org.onosproject.net.Device; import org.onosproject.net.Port; import org.onosproject.net.PortNumber; import org.onosproject.net.device.DefaultPortDescription; import org.onosproject.net.device.DeviceService; import org.onosproject.net.device.PortDescription; import org.onosproject.openflow.controller.OpenFlowOpticalSwitch; import org.onosproject.openflow.controller.PortDescPropertyType; import org.onosproject.openflow.controller.driver.AbstractOpenFlowSwitch; import org.onosproject.openflow.controller.driver.SwitchDriverSubHandshakeAlreadyStarted; import org.onosproject.openflow.controller.driver.SwitchDriverSubHandshakeCompleted; import org.onosproject.openflow.controller.driver.SwitchDriverSubHandshakeNotStarted; import org.projectfloodlight.openflow.protocol.OFCircuitPortStatus; import org.projectfloodlight.openflow.protocol.OFCircuitPortsReply; import org.projectfloodlight.openflow.protocol.OFCircuitPortsRequest; import org.projectfloodlight.openflow.protocol.OFMessage; import org.projectfloodlight.openflow.protocol.OFObject; import org.projectfloodlight.openflow.protocol.OFOplinkPortPower; import org.projectfloodlight.openflow.protocol.OFPortDesc; import org.projectfloodlight.openflow.protocol.OFPortOptical; import org.projectfloodlight.openflow.protocol.OFStatsReply; import org.projectfloodlight.openflow.protocol.OFStatsRequest; import org.projectfloodlight.openflow.protocol.OFStatsType; import org.projectfloodlight.openflow.protocol.OFType; import org.projectfloodlight.openflow.protocol.OFOplinkPortPowerRequest; import org.projectfloodlight.openflow.protocol.OFOplinkPortPowerReply; /** * Driver for Oplink single WSS 8D ROADM. * * Driver implements custom handshaker and supports for Optical channel Port based on OpenFlow OTN extension. * The device consists of Och ports, and performances wavelength cross-connect among the ports. */ public class OplinkRoadmHandshaker extends AbstractOpenFlowSwitch implements OpenFlowOpticalSwitch { private final AtomicBoolean driverHandshakeComplete = new AtomicBoolean(false); private List<OFPortOptical> opticalPorts; @Override public List<? extends OFObject> getPortsOf(PortDescPropertyType type) { return ImmutableList.copyOf(opticalPorts); } @Override /** * Returns a list of standard (Ethernet) ports. * * @return List of ports */ public List<OFPortDesc> getPorts() { return Collections.EMPTY_LIST; } @Override public Set<PortDescPropertyType> getPortTypes() { return ImmutableSet.of(PortDescPropertyType.OPTICAL_TRANSPORT); } @Override public Boolean supportNxRole() { return false; } @Override public void startDriverHandshake() { log.warn("Starting driver handshake for sw {}", getStringId()); if (startDriverHandshakeCalled) { throw new SwitchDriverSubHandshakeAlreadyStarted(); } startDriverHandshakeCalled = true; try { sendHandshakeOFExperimenterPortDescRequest(); } catch (IOException e) { log.error("OPLK ROADM exception while sending experimenter port desc:", e); } } @Override public boolean isDriverHandshakeComplete() { return driverHandshakeComplete.get(); } @Override public void processDriverHandshakeMessage(OFMessage m) { if (!startDriverHandshakeCalled) { throw new SwitchDriverSubHandshakeNotStarted(); } if (driverHandshakeComplete.get()) { throw new SwitchDriverSubHandshakeCompleted(m); } switch (m.getType()) { case BARRIER_REPLY: log.debug("OPLK ROADM Received barrier response"); break; case ERROR: log.error("Switch {} Error {}", getStringId(), m); break; case FEATURES_REPLY: break; case FLOW_REMOVED: break; case GET_ASYNC_REPLY: break; case PACKET_IN: break; case PORT_STATUS: processOFPortStatus((OFCircuitPortStatus) m); break; case QUEUE_GET_CONFIG_REPLY: break; case ROLE_REPLY: break; case STATS_REPLY: OFStatsReply stats = (OFStatsReply) m; if (stats.getStatsType() == OFStatsType.EXPERIMENTER) { log.warn("OPLK ROADM : Received multipart (port desc) reply message {}", m); //OTN Optical extension 1.0 port-desc createOpticalPortList((OFCircuitPortsReply) m); driverHandshakeComplete.set(true); } break; default: log.warn("Received message {} during switch-driver " + "subhandshake " + "from switch {} ... " + "Ignoring message", m, getStringId()); } } private void processOFPortStatus(OFCircuitPortStatus ps) { log.debug("OPLK ROADM ..OF Port Status :", ps); } @Override public Device.Type deviceType() { return Device.Type.ROADM; } @Override public final void sendMsg(OFMessage m) { OFMessage newMsg = m; if (m.getType() == OFType.STATS_REQUEST) { OFStatsRequest sr = (OFStatsRequest) m; log.debug("OPLK ROADM rebuilding stats request type {}", sr.getStatsType()); switch (sr.getStatsType()) { case PORT: //replace with Oplink experiment stats message to get the port current power OFOplinkPortPowerRequest pRequest = this.factory().buildOplinkPortPowerRequest() .setXid(sr.getXid()) .setFlags(sr.getFlags()) .build(); newMsg = pRequest; break; default: break; } } else { log.debug("OPLK ROADM sends msg:{}, as is", m.getType()); } super.sendMsg(newMsg); } private void sendHandshakeOFExperimenterPortDescRequest() throws IOException { // send multi part message for port description for optical switches OFCircuitPortsRequest circuitPortsRequest = factory() .buildCircuitPortsRequest().setXid(getNextTransactionId()) .build(); log.info("OPLK ROADM : Sending experimented circuit port stats " + "message " + "{}", circuitPortsRequest); this.sendHandshakeMessage(circuitPortsRequest); } /** * Builds list of OFPortOptical ports based on the multi-part circuit ports reply. * Ensure the optical transport port's signal type is configured correctly. * * @param wPorts OF reply with circuit ports */ private void createOpticalPortList(OFCircuitPortsReply wPorts) { opticalPorts = new ArrayList<>(); opticalPorts.addAll(wPorts.getEntries()); } @Override public List<PortDescription> processExpPortStats(OFMessage msg) { if (msg instanceof OFOplinkPortPowerReply) { return buildPortPowerDescriptions(((OFOplinkPortPowerReply) msg).getEntries()); } return Collections.emptyList(); } private OFOplinkPortPower getPortPower(List<OFOplinkPortPower> portPowers, PortNumber portNum) { for (OFOplinkPortPower power : portPowers) { if (power.getPort() == portNum.toLong()) { return power; } } return null; } private List<PortDescription> buildPortPowerDescriptions(List<OFOplinkPortPower> portPowers) { DeviceService deviceService = this.handler().get(DeviceService.class); List<Port> ports = deviceService.getPorts(this.data().deviceId()); final List<PortDescription> portDescs = new ArrayList<>(); for (Port port : ports) { DefaultAnnotations.Builder builder = DefaultAnnotations.builder(); builder.putAll(port.annotations()); OFOplinkPortPower power = getPortPower(portPowers, port.number()); if (power != null) { builder.set(AnnotationKeys.CURRENT_POWER, Long.toString(power.getPowerValue())); } portDescs.add(new DefaultPortDescription(port.number(), port.isEnabled(), port.type(), port.portSpeed(), builder.build())); } return portDescs; } }
/* * $Id$ * This file is a part of the Arakhne Foundation Classes, http://www.arakhne.org/afc * * Copyright (c) 2000-2012 Stephane GALLAND. * Copyright (c) 2005-10, Multiagent Team, Laboratoire Systemes et Transports, * Universite de Technologie de Belfort-Montbeliard. * Copyright (c) 2013-2016 The original authors, and other authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.arakhne.afc.math.continous.object3d; import org.arakhne.afc.math.generic.Point3D; import org.arakhne.afc.math.generic.Tuple3D; import org.arakhne.afc.math.generic.Vector3D; import org.arakhne.afc.math.matrix.Matrix3d; import org.arakhne.afc.math.matrix.Transform3D; /** 3D Vector with 3 floating-point values. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ * @deprecated Replacement will be provided in Version 14.0 */ @Deprecated @SuppressWarnings("all") public class Vector3f extends Tuple3f<Vector3D> implements Vector3D { private static final long serialVersionUID = -1222875298451525734L; /** */ public Vector3f() { // } /** * @param tuple is the tuple to copy. */ public Vector3f(Tuple3D<?> tuple) { super(tuple); } /** * @param tuple is the tuple to copy. */ public Vector3f(int[] tuple) { super(tuple); } /** * @param tuple is the tuple to copy. */ public Vector3f(float[] tuple) { super(tuple); } /** * @param x * @param y * @param z */ public Vector3f(int x, int y, int z) { super(x,y,z); } /** * @param x * @param y * @param z */ public Vector3f(float x, float y, float z) { super(x,y,z); } /** * @param x * @param y * @param z */ public Vector3f(double x, double y, double z) { super((float)x,(float)y,(float)z); } /** * @param x * @param y * @param z */ public Vector3f(long x, long y, long z) { super(x,y,z); } /** {@inheritDoc} */ @Override public Vector3f clone() { return (Vector3f)super.clone(); } /** * {@inheritDoc} */ @Override public float angle(Vector3D v1) { double vDot = dot(v1) / ( length()*v1.length() ); if( vDot < -1.) vDot = -1.; if( vDot > 1.) vDot = 1.; return((float) (Math.acos( vDot ))); } /** * {@inheritDoc} */ @Override public float dot(Vector3D v1) { return (this.x*v1.getX() + this.y*v1.getY() + this.z*v1.getZ()); } /** * Multiply this vector, transposed, by the given matrix and replies the resulting vector. * * @param m * @return transpose(this * m) */ public final Vector3f mul(Matrix3d m) { Vector3f r = new Vector3f(); r.x = (float)(this.getX() * m.getM00() + this.getY() * m.getM01() + this.getZ() * m.getM02()); r.y = (float)(this.getX() * m.getM10() + this.getY() * m.getM11() + this.getZ() * m.getM12()); r.z = (float)(this.getX() * m.getM20() + this.getY() * m.getM21() + this.getZ() * m.getM22()); return r; } @Override public Vector3D cross(Vector3D v1) { return crossLeftHand(v1); } @Override public void cross(Vector3D v1, Vector3D v2) { crossLeftHand(v1, v2); } @Override public Vector3D crossLeftHand(Vector3D v1) { float x = v1.getY()*getZ() - v1.getZ()*getY(); float y = v1.getZ()*getX() - v1.getX()*getZ(); float z = v1.getX()*getY() - v1.getY()*getX(); return new Vector3f(x,y,z); } @Override public void crossLeftHand(Vector3D v1, Vector3D v2) { float x = v2.getY()*v1.getZ() - v2.getZ()*v1.getY(); float y = v2.getZ()*v1.getX() - v2.getX()*v1.getZ(); float z = v2.getX()*v1.getY() - v2.getY()*v1.getX(); set(x,y,z); } @Override public Vector3D crossRightHand(Vector3D v1) { float x = getY()*v1.getZ() - getZ()*v1.getY(); float y = getZ()*v1.getX() - getX()*v1.getZ(); float z = getX()*v1.getY() - getY()*v1.getX(); return new Vector3f(x,y,z); } @Override public void crossRightHand(Vector3D v1, Vector3D v2) { float x = v1.getY()*v2.getZ() - v1.getZ()*v2.getY(); float y = v1.getZ()*v2.getX() - v1.getX()*v2.getZ(); float z = v1.getX()*v2.getY() - v1.getY()*v2.getX(); set(x,y,z); } /** * {@inheritDoc} */ @Override public float length() { return (float) Math.sqrt(this.x*this.x + this.y*this.y + this.z*this.z); } /** * {@inheritDoc} */ @Override public float lengthSquared() { return (this.x*this.x + this.y*this.y + this.z*this.z); } /** * {@inheritDoc} */ @Override public void normalize(Vector3D v1) { float norm = 1f / v1.length(); this.x = (int)(v1.getX()*norm); this.y = (int)(v1.getY()*norm); this.z = (int)(v1.getZ()*norm); } /** * {@inheritDoc} */ @Override public void normalize() { float norm; norm = (float)(1./Math.sqrt(this.x*this.x + this.y*this.y + this.z*this.z)); this.x *= norm; this.y *= norm; this.z *= norm; } /** * {@inheritDoc} */ @Override public void turnVector(Vector3D axis, float angle) { Transform3D mat = new Transform3D(); mat.setRotation(new Quaternion(axis, angle)); mat.transform(this); } /** * {@inheritDoc} */ @Override public void add(Vector3D t1, Vector3D t2) { this.x = t1.getX() + t2.getX(); this.y = t1.getY() + t2.getY(); this.z = t1.getZ() + t2.getZ(); } /** * {@inheritDoc} */ @Override public void add(Vector3D t1) { this.x += t1.getX(); this.y += t1.getY(); this.z += t1.getZ(); } /** * {@inheritDoc} */ @Override public void scaleAdd(int s, Vector3D t1, Vector3D t2) { this.x = s * t1.getX() + t2.getX(); this.y = s * t1.getY() + t2.getY(); this.z = s * t1.getZ() + t2.getZ(); } /** * {@inheritDoc} */ @Override public void scaleAdd(float s, Vector3D t1, Vector3D t2) { this.x = s * t1.getX() + t2.getX(); this.y = s * t1.getY() + t2.getY(); this.z = s * t1.getZ() + t2.getZ(); } /** * {@inheritDoc} */ @Override public void scaleAdd(int s, Vector3D t1) { this.x = s * this.x + t1.getX(); this.y = s * this.y + t1.getY(); this.z = s * this.z + t1.getZ(); } /** * {@inheritDoc} */ @Override public void scaleAdd(float s, Vector3D t1) { this.x = s * this.x + t1.getX(); this.y = s * this.y + t1.getY(); this.z = s * this.z + t1.getZ(); } /** * {@inheritDoc} */ @Override public void sub(Vector3D t1, Vector3D t2) { this.x = t1.getX() - t2.getX(); this.y = t1.getY() - t2.getY(); this.z = t1.getZ() - t2.getZ(); } @Override public void sub(Point3D t1, Point3D t2) { this.x = t1.getX() - t2.getX(); this.y = t1.getY() - t2.getY(); this.z = t1.getZ() - t2.getZ(); } /** * {@inheritDoc} */ @Override public void sub(Vector3D t1) { this.x -= t1.getX(); this.y -= t1.getY(); this.z -= t1.getZ(); } }
package scaffold.modeling.uml.basic.cm; import scaffold.modeling.uml.basic.UmlType; import scaffold.modeling.uml.basic.UmlPackage; import scaffold.modeling.uml.UmlIdentifiable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Observable; import java.util.Observer; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import javax.xml.bind.annotation.XmlType; @XmlRootElement(namespace = "test.modeling.uml.basic.cm") @XmlType(propOrder = { "isInterface", "abstract", "finalSpecialization", "superClassIDs", "ownedOperations", "ownedAttributes" }) public class CmClass extends UmlType implements Observer, UmlIdentifiable { public static void initialize() { classes.clear(); } public static CmClass create(String name, boolean isInterface) { if (!isClassNameUndefined(name)) { return null; //not an exception. this is a possible user error case } CmClass clazz = new CmClass(name, isInterface); UmlType.registerType(clazz); UmlPackage.getDefaultPackage().addOwnedType(clazz.getID()); classes.put(clazz.getID(), clazz); return clazz; } public static boolean isClassNameUndefined(String name) { for (CmClass c: classes.values()) { if (c.getName().equals(name)) { return false; } } return true; } public static Map<String,CmClass> getAllClasses() { return classes; } public static CmClass getClass(String id) { return classes.get(id); } public static CmClass removeClass(String id) { return classes.remove(id); } public static CmClass getClassByName(String name) { //ensure the class exists if (isClassNameUndefined(name)) { return null; //not an exception. this is a possible user error case } CmClass clazz = null; for (CmClass c: classes.values()) { if (c.getName().equals(name)) { clazz = c; } } return clazz; } private static Map<String,CmClass> classes = new HashMap<String,CmClass>(); //*** INSTANCE BEGINS HERE *********************************************** //for internalization only public CmClass() { } private CmClass(String name, boolean isInterface) { super(name); this.isInterface = isInterface; this.isAbstract = false; this.isFinalSpecialization = false; } @Override public void update(Observable o, Object obj) { setChanged(); notifyObservers(obj); } public boolean isInterface() { return this.isInterface; } public boolean isAbstract() { return isAbstract; } public void setAbstract(boolean isAbstract) { this.isAbstract = isAbstract; setChanged(); notifyObservers(); } public boolean isFinalSpecialization() { return isFinalSpecialization; } public void setFinalSpecialization(boolean isFinalSpecialization) { this.isFinalSpecialization = isFinalSpecialization; setChanged(); notifyObservers(); } //*** CONTAINED PROPERTY LISTS ******************************************* public List<CmClass> getSuperClassesThatAreInterfaces() { List<CmClass> interfaces = new ArrayList<CmClass>(); for (CmClass c: this.superClasses) { if (c.isInterface()) { interfaces.add(c); } } return interfaces; } public List<CmClass> getSuperClassesThatAreNotInterfaces() { List<CmClass> supers = new ArrayList<CmClass>(); for (CmClass c: this.superClasses) { if (!c.isInterface()) { supers.add(c); } } return supers; } public List<CmClass> getSuperClasses() { return this.superClasses; } public List<String> getSuperClassIDs() { return this.superClassIDs; } public List<CmOperation> getOwnedOperations() { return this.ownedOperations; } public List<CmProperty> getOwnedAttributes() { return this.ownedAttributes; } public List<CmAssocEnd> getAllAssociates() { List<CmAssocEnd> assocs = new ArrayList<CmAssocEnd>(); for (CmAssocEnd ep: this.endPoints) { CmAssociation rel = ep.getAssociation(); CmAssocEnd otherEP = rel.getOtherEndPoint(ep); assocs.add(otherEP); } return assocs; } public List<CmAssocEnd> getNavigableAssociates() { List<CmAssocEnd> assocs = new ArrayList<CmAssocEnd>(); for (CmAssocEnd ep: this.endPoints) { CmAssociation assoc = ep.getAssociation(); CmAssocEnd otherEP = assoc.getOtherEndPoint(ep); //if assoc.isNavigabilityEnabled() then bi-directional navigability is assumed if (assoc.isNavigabilityEnabled() || otherEP.isNavigable()) { assocs.add(otherEP); } } return assocs; } //*** GET PROPERTY BY NAME *********************************************** public CmClass getInterfaceByName(String name) { List<CmClass> interfaces = getSuperClassesThatAreInterfaces(); for (CmClass intrfce: interfaces) { if (intrfce.getName().equals(name)) { return intrfce; } } return null; } public CmClass getSuperClassByName(String name) { List<CmClass> supers = getSuperClassesThatAreNotInterfaces(); for (CmClass superClass: supers) { if (superClass.getName().equals(name)) { return superClass; } } return null; } public CmOperation getOwnedOperationByName(String name) { for (CmOperation op: this.ownedOperations) { if (op.getName().equals(name)) { return op; } } return null; } public CmProperty getOwnedAttributeByName(String name) { for (CmProperty attrib: this.ownedAttributes) { if (attrib.getName().equals(name)) { return attrib; } } return null; } //*** ADD/REMOVE METHODS ************************************************* public boolean addSuperClass(CmClass c) { if (c == null || c == this || this.superClasses.contains(c)) { return false; } boolean success = this.superClasses.add(c); if (success) { this.superClassIDs.add(c.getID()); setChanged(); notifyObservers(); c.addObserver(this); } return success; } public boolean removeSuperClass(CmClass c) { if (c == null) { return false; } boolean success = this.superClasses.remove(c); if (success) { this.superClassIDs.remove(c.getID()); setChanged(); notifyObservers(); c.deleteObserver(this); } return success; } public CmOperation addOwnedOperation(String name) { if (name == null) { return null; } CmOperation o = new CmOperation(name); if (this.ownedOperations.add(o)) { setChanged(); notifyObservers(); o.addObserver(this); return o; } return null; } public CmOperation removeOwnedOperation(String name) { if (name == null) { return null; } CmOperation o = getOwnedOperationByName(name); boolean success = this.ownedOperations.remove(o); if (success) { setChanged(); notifyObservers(); o.deleteObserver(this); } return o; } public CmProperty addOwnedAttribute(String name) { if (name == null) { return null; } CmProperty a = new CmProperty(name); boolean success = this.ownedAttributes.add(a); //make sure we add before we notify if (success) { setChanged(); notifyObservers(); a.addObserver(this); return a; } return null; } public CmProperty removeOwnedAttribute(String name) { if (name == null) { return null; } CmProperty a = getOwnedAttributeByName(name); boolean success = this.ownedAttributes.remove(a); if (success) { setChanged(); notifyObservers(); a.deleteObserver(this); } return a; } //only used in this package; use getSuperClassesThatAreNotInterfaces(), getSuperClassesThatAreInterfaces(), getAssociates() //to derive relationships List<CmAssocEnd> getEndPoints() { return this.endPoints; } public boolean addEndPoint(CmAssocEnd endPoint) { if (endPoint == null) { return false; } boolean success = this.endPoints.add(endPoint); if (success) { endPoint.setClassID(this.getID()); setChanged(); notifyObservers(); } return success; } public boolean removeEndPoint(CmAssocEnd endPoint) { if (endPoint == null) { return false; } boolean success = this.endPoints.remove(endPoint); if (success) { setChanged(); notifyObservers(); } return success; } // the name of the property is 'isInterface' but in XML, it is renamed to 'interface' @XmlElement(name = "isInterface") private boolean isInterface; //these use a bean convention (get/set) and so, are XmlElements by default private boolean isAbstract; private boolean isFinalSpecialization; @XmlTransient private List<CmClass> superClasses = new ArrayList<CmClass>(); @XmlElementWrapper(name = "superClasseIDs") @XmlElement(name = "superClassID") private List<String> superClassIDs = new ArrayList<String>(); @XmlElementWrapper(name = "ownedOperations") @XmlElement(name = "operation") private List<CmOperation> ownedOperations = new ArrayList<CmOperation>(); @XmlElementWrapper(name = "ownedAttributes") //@XmlElement(name = "attribute") private List<CmProperty> ownedAttributes = new ArrayList<CmProperty>(); //this covers association and aggregation @XmlTransient private List<CmAssocEnd> endPoints = new ArrayList<CmAssocEnd>(); }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.speech.spi.v1beta1; import com.google.api.gax.core.ConnectionSettings; import com.google.api.gax.core.RetrySettings; import com.google.api.gax.grpc.ApiCallSettings; import com.google.api.gax.grpc.PageStreamingDescriptor; import com.google.api.gax.grpc.ServiceApiSettings; import com.google.api.gax.grpc.SimpleCallSettings; import com.google.auth.Credentials; import com.google.cloud.speech.v1beta1.AsyncRecognizeRequest; import com.google.cloud.speech.v1beta1.SpeechGrpc; import com.google.cloud.speech.v1beta1.SyncRecognizeRequest; import com.google.cloud.speech.v1beta1.SyncRecognizeResponse; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.longrunning.Operation; import io.grpc.ManagedChannel; import io.grpc.Status; import java.io.IOException; import java.util.List; import java.util.concurrent.ScheduledExecutorService; import org.joda.time.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link SpeechApi}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (speech.googleapis.com) and default port (443) * are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. * When build() is called, the tree of builders is called to create the complete settings * object. For example, to set the total timeout of syncRecognize to 30 seconds: * * <pre> * <code> * SpeechSettings.Builder speechSettingsBuilder = * SpeechSettings.defaultBuilder(); * speechSettingsBuilder.syncRecognizeSettings().getRetrySettingsBuilder() * .setTotalTimeout(Duration.standardSeconds(30)); * SpeechSettings speechSettings = speechSettingsBuilder.build(); * </code> * </pre> */ @javax.annotation.Generated("by GAPIC") public class SpeechSettings extends ServiceApiSettings { /** * The default address of the service. */ private static final String DEFAULT_SERVICE_ADDRESS = "speech.googleapis.com"; /** * The default port of the service. */ private static final int DEFAULT_SERVICE_PORT = 443; /** * The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); /** * The default connection settings of the service. */ public static final ConnectionSettings DEFAULT_CONNECTION_SETTINGS = ConnectionSettings.newBuilder() .setServiceAddress(DEFAULT_SERVICE_ADDRESS) .setPort(DEFAULT_SERVICE_PORT) .provideCredentialsWith(DEFAULT_SERVICE_SCOPES) .build(); private final SimpleCallSettings<SyncRecognizeRequest, SyncRecognizeResponse> syncRecognizeSettings; private final SimpleCallSettings<AsyncRecognizeRequest, Operation> asyncRecognizeSettings; /** * Returns the object with the settings used for calls to syncRecognize. */ public SimpleCallSettings<SyncRecognizeRequest, SyncRecognizeResponse> syncRecognizeSettings() { return syncRecognizeSettings; } /** * Returns the object with the settings used for calls to asyncRecognize. */ public SimpleCallSettings<AsyncRecognizeRequest, Operation> asyncRecognizeSettings() { return asyncRecognizeSettings; } /** * Returns the default service address. */ public static String getDefaultServiceAddress() { return DEFAULT_SERVICE_ADDRESS; } /** * Returns the default service port. */ public static int getDefaultServicePort() { return DEFAULT_SERVICE_PORT; } /** * Returns the default service scopes. */ public static ImmutableList<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** * Returns a builder for this class with recommended defaults. */ public static Builder defaultBuilder() { return Builder.createDefault(); } /** * Returns a new builder for this class. */ public static Builder newBuilder() { return new Builder(); } /** * Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } private SpeechSettings(Builder settingsBuilder) throws IOException { super( settingsBuilder.getChannelProvider(), settingsBuilder.getExecutorProvider(), settingsBuilder.getGeneratorName(), settingsBuilder.getGeneratorVersion(), settingsBuilder.getClientLibName(), settingsBuilder.getClientLibVersion()); syncRecognizeSettings = settingsBuilder.syncRecognizeSettings().build(); asyncRecognizeSettings = settingsBuilder.asyncRecognizeSettings().build(); } /** * Builder for SpeechSettings. */ public static class Builder extends ServiceApiSettings.Builder { private final ImmutableList<ApiCallSettings.Builder> methodSettingsBuilders; private SimpleCallSettings.Builder<SyncRecognizeRequest, SyncRecognizeResponse> syncRecognizeSettings; private SimpleCallSettings.Builder<AsyncRecognizeRequest, Operation> asyncRecognizeSettings; private static final ImmutableMap<String, ImmutableSet<Status.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<Status.Code>> definitions = ImmutableMap.builder(); definitions.put( "idempotent", Sets.immutableEnumSet( Lists.<Status.Code>newArrayList( Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE))); definitions.put("non_idempotent", Sets.immutableEnumSet(Lists.<Status.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings.Builder> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings.Builder> definitions = ImmutableMap.builder(); RetrySettings.Builder settingsBuilder = null; settingsBuilder = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.millis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.millis(60000L)) .setInitialRpcTimeout(Duration.millis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.millis(60000L)) .setTotalTimeout(Duration.millis(600000L)); definitions.put("default", settingsBuilder); RETRY_PARAM_DEFINITIONS = definitions.build(); } private Builder() { super(DEFAULT_CONNECTION_SETTINGS); syncRecognizeSettings = SimpleCallSettings.newBuilder(SpeechGrpc.METHOD_SYNC_RECOGNIZE); asyncRecognizeSettings = SimpleCallSettings.newBuilder(SpeechGrpc.METHOD_ASYNC_RECOGNIZE); methodSettingsBuilders = ImmutableList.<ApiCallSettings.Builder>of(syncRecognizeSettings, asyncRecognizeSettings); } private static Builder createDefault() { Builder builder = new Builder(); builder .syncRecognizeSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .asyncRecognizeSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); return builder; } private Builder(SpeechSettings settings) { super(settings); syncRecognizeSettings = settings.syncRecognizeSettings.toBuilder(); asyncRecognizeSettings = settings.asyncRecognizeSettings.toBuilder(); methodSettingsBuilders = ImmutableList.<ApiCallSettings.Builder>of(syncRecognizeSettings, asyncRecognizeSettings); } @Override protected ConnectionSettings getDefaultConnectionSettings() { return DEFAULT_CONNECTION_SETTINGS; } @Override public Builder provideExecutorWith(ScheduledExecutorService executor, boolean shouldAutoClose) { super.provideExecutorWith(executor, shouldAutoClose); return this; } @Override public Builder provideChannelWith(ManagedChannel channel, boolean shouldAutoClose) { super.provideChannelWith(channel, shouldAutoClose); return this; } @Override public Builder provideChannelWith(ConnectionSettings settings) { super.provideChannelWith(settings); return this; } @Override public Builder provideChannelWith(Credentials credentials) { super.provideChannelWith(credentials); return this; } @Override public Builder provideChannelWith(List<String> scopes) { super.provideChannelWith(scopes); return this; } @Override public Builder setGeneratorHeader(String name, String version) { super.setGeneratorHeader(name, version); return this; } @Override public Builder setClientLibHeader(String name, String version) { super.setClientLibHeader(name, version); return this; } /** * Applies the given settings to all of the API methods in this service. Only * values that are non-null will be applied, so this method is not capable * of un-setting any values. */ public Builder applyToAllApiMethods(ApiCallSettings.Builder apiCallSettings) throws Exception { super.applyToAllApiMethods(methodSettingsBuilders, apiCallSettings); return this; } /** * Returns the builder for the settings used for calls to syncRecognize. */ public SimpleCallSettings.Builder<SyncRecognizeRequest, SyncRecognizeResponse> syncRecognizeSettings() { return syncRecognizeSettings; } /** * Returns the builder for the settings used for calls to asyncRecognize. */ public SimpleCallSettings.Builder<AsyncRecognizeRequest, Operation> asyncRecognizeSettings() { return asyncRecognizeSettings; } @Override public SpeechSettings build() throws IOException { return new SpeechSettings(this); } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.monitoring.exporter; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.monitoring.exporter.http.HttpExporter; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; import org.elasticsearch.xpack.monitoring.exporter.local.LocalExporter; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; public class Exporters extends AbstractLifecycleComponent { private static final Logger logger = LogManager.getLogger(Exporters.class); private final Settings settings; private final Map<String, Exporter.Factory> factories; private final AtomicReference<Map<String, Exporter>> exporters; private final ClusterService clusterService; private final XPackLicenseState licenseState; private final ThreadContext threadContext; public Exporters(Settings settings, Map<String, Exporter.Factory> factories, ClusterService clusterService, XPackLicenseState licenseState, ThreadContext threadContext, SSLService sslService) { this.settings = settings; this.factories = factories; this.exporters = new AtomicReference<>(emptyMap()); this.threadContext = Objects.requireNonNull(threadContext); this.clusterService = Objects.requireNonNull(clusterService); this.licenseState = Objects.requireNonNull(licenseState); final List<Setting.AffixSetting<?>> dynamicSettings = getSettings().stream().filter(Setting::isDynamic).collect(Collectors.toList()); clusterService.getClusterSettings().addSettingsUpdateConsumer(this::setExportersSetting, dynamicSettings); HttpExporter.registerSettingValidators(clusterService, sslService); // this ensures that logging is happening by adding an empty consumer per affix setting for (Setting.AffixSetting<?> affixSetting : dynamicSettings) { clusterService.getClusterSettings().addAffixUpdateConsumer(affixSetting, (s, o) -> {}, (s, o) -> {}); } } public void setExportersSetting(Settings exportersSetting) { if (this.lifecycle.started()) { Map<String, Exporter> updated = initExporters(exportersSetting); closeExporters(logger, this.exporters.getAndSet(updated)); } } @Override protected void doStart() { exporters.set(initExporters(settings)); } @Override protected void doStop() { closeExporters(logger, exporters.get()); } @Override protected void doClose() { } public Exporter getExporter(String name) { return exporters.get().get(name); } /** * Get all enabled {@linkplain Exporter}s. * * @return Never {@code null}. Can be empty if none are enabled. */ public Collection<Exporter> getEnabledExporters() { return exporters.get().values(); } static void closeExporters(Logger logger, Map<String, Exporter> exporters) { for (Exporter exporter : exporters.values()) { try { exporter.close(); } catch (Exception e) { logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to close exporter [{}]", exporter.name()), e); } } } Map<String, Exporter> initExporters(Settings settings) { Set<String> singletons = new HashSet<>(); Map<String, Exporter> exporters = new HashMap<>(); boolean hasDisabled = false; Settings exportersSettings = settings.getByPrefix("xpack.monitoring.exporters."); for (String name : exportersSettings.names()) { Settings exporterSettings = exportersSettings.getAsSettings(name); String type = exporterSettings.get("type"); if (type == null) { throw new SettingsException("missing exporter type for [" + name + "] exporter"); } Exporter.Factory factory = factories.get(type); if (factory == null) { throw new SettingsException("unknown exporter type [" + type + "] set for exporter [" + name + "]"); } Exporter.Config config = new Exporter.Config(name, type, settings, clusterService, licenseState); if (!config.enabled()) { hasDisabled = true; if (logger.isDebugEnabled()) { logger.debug("exporter [{}/{}] is disabled", type, name); } continue; } Exporter exporter = factory.create(config); if (exporter.isSingleton()) { // this is a singleton exporter, let's make sure we didn't already create one // (there can only be one instance of a singleton exporter) if (singletons.contains(type)) { throw new SettingsException("multiple [" + type + "] exporters are configured. there can " + "only be one [" + type + "] exporter configured"); } singletons.add(type); } exporters.put(config.name(), exporter); } // no exporters are configured, lets create a default local one. // // NOTE: if there are exporters configured and they're all disabled, we don't // fallback on the default // if (exporters.isEmpty() && !hasDisabled) { Exporter.Config config = new Exporter.Config("default_" + LocalExporter.TYPE, LocalExporter.TYPE, settings, clusterService, licenseState); exporters.put(config.name(), factories.get(LocalExporter.TYPE).create(config)); } return exporters; } /** * Wrap every {@linkplain Exporter}'s {@linkplain ExportBulk} in a {@linkplain ExportBulk.Compound}. * * @param listener {@code null} if no exporters are ready or available. */ void wrapExportBulk(final ActionListener<ExportBulk> listener) { final ClusterState state = clusterService.state(); // wait until we have a usable cluster state if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) || ClusterState.UNKNOWN_UUID.equals(state.metadata().clusterUUID()) || state.version() == ClusterState.UNKNOWN_VERSION) { logger.trace("skipping exporters because the cluster state is not loaded"); listener.onResponse(null); return; } final Map<String, Exporter> exporterMap = exporters.get(); // if no exporters are defined (which is only possible if all are defined explicitly disabled), // then ignore the request immediately if (exporterMap.isEmpty()) { listener.onResponse(null); return; } final AtomicArray<ExportBulk> accumulatedBulks = new AtomicArray<>(exporterMap.size()); final CountDown countDown = new CountDown(exporterMap.size()); int i = 0; // get every exporter's ExportBulk and, when they've all responded, respond with a wrapped version for (final Exporter exporter : exporterMap.values()) { exporter.openBulk( new AccumulatingExportBulkActionListener(exporter.name(), i++, accumulatedBulks, countDown, threadContext, listener)); } } /** * Exports a collection of monitoring documents using the configured exporters */ public void export(final Collection<MonitoringDoc> docs, final ActionListener<Void> listener) throws ExportException { if (this.lifecycleState() != Lifecycle.State.STARTED) { listener.onFailure(new ExportException("Export service is not started")); } else if (docs != null && docs.size() > 0) { wrapExportBulk(ActionListener.wrap(bulk -> { if (bulk != null) { doExport(bulk, docs, listener); } else { listener.onResponse(null); } }, listener::onFailure)); } else { listener.onResponse(null); } } /** * Add {@code docs} and send the {@code bulk}, then respond to the {@code listener}. * * @param bulk The bulk object to send {@code docs} through. * @param docs The monitoring documents to send. * @param listener Returns {@code null} when complete, or failure where relevant. */ private void doExport(final ExportBulk bulk, final Collection<MonitoringDoc> docs, final ActionListener<Void> listener) { final AtomicReference<ExportException> exceptionRef = new AtomicReference<>(); try { bulk.add(docs); } catch (ExportException e) { exceptionRef.set(e); } finally { bulk.flush(ActionListener.wrap(r -> { if (exceptionRef.get() == null) { listener.onResponse(null); } else { listener.onFailure(exceptionRef.get()); } }, (exception) -> { if (exceptionRef.get() != null) { exception.addSuppressed(exceptionRef.get()); } listener.onFailure(exception); })); } } /** * Return all the settings of all the exporters, no matter if HTTP or Local */ public static List<Setting.AffixSetting<?>> getSettings() { List<Setting.AffixSetting<?>> settings = new ArrayList<>(); settings.addAll(Exporter.getSettings()); settings.addAll(HttpExporter.getSettings()); settings.addAll(LocalExporter.getSettings()); return settings; } /** * {@code AccumulatingExportBulkActionListener} allows us to asynchronously gather all of the {@linkplain ExportBulk}s that are * ready, as associated with the enabled {@linkplain Exporter}s. */ static class AccumulatingExportBulkActionListener implements ActionListener<ExportBulk> { private final String name; private final int indexPosition; private final AtomicArray<ExportBulk> accumulatedBulks; private final CountDown countDown; private final ActionListener<ExportBulk> delegate; private final ThreadContext threadContext; AccumulatingExportBulkActionListener(final String name, final int indexPosition, final AtomicArray<ExportBulk> accumulatedBulks, final CountDown countDown, final ThreadContext threadContext, final ActionListener<ExportBulk> delegate) { this.name = name; this.indexPosition = indexPosition; this.accumulatedBulks = accumulatedBulks; this.countDown = countDown; this.threadContext = threadContext; this.delegate = delegate; } @Override public void onResponse(final ExportBulk exportBulk) { if (exportBulk == null) { logger.debug("skipping exporter [{}] as it is not ready yet", name); } else { accumulatedBulks.set(indexPosition, exportBulk); } delegateIfComplete(); } @Override public void onFailure(Exception e) { logger.error((Supplier<?>) () -> new ParameterizedMessage("exporter [{}] failed to open exporting bulk", name), e); delegateIfComplete(); } /** * Once all {@linkplain Exporter}'s have responded, whether it was success or failure, then this responds with all successful * {@linkplain ExportBulk}s wrapped using an {@linkplain ExportBulk.Compound} wrapper. */ void delegateIfComplete() { if (countDown.countDown()) { final List<ExportBulk> bulkList = accumulatedBulks.asList(); if (bulkList.isEmpty()) { delegate.onResponse(null); } else { delegate.onResponse(new ExportBulk.Compound(bulkList, threadContext)); } } } } }
/* * Copyright (C) 2008 Torgny Bjers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.xorcode.andtweet.data; import java.net.SocketTimeoutException; import java.util.Date; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.content.ContentResolver; import android.content.ContentUris; import android.content.ContentValues; import android.database.sqlite.SQLiteConstraintException; import android.net.Uri; import android.text.Html; import android.util.Log; import com.xorcode.andtweet.data.AndTweetDatabase.Tweets; import com.xorcode.andtweet.net.Connection; import com.xorcode.andtweet.net.ConnectionAuthenticationException; import com.xorcode.andtweet.net.ConnectionException; import com.xorcode.andtweet.net.ConnectionUnavailableException; /** * Handles loading data from JSON into database. * * @author torgny.bjers */ public class FriendTimeline { private static final String TAG = "FriendTimeline"; private ContentResolver mContentResolver; private String mUsername, mPassword; private long mLastStatusId = 0; private int mNewTweets; private int mReplies; public FriendTimeline(ContentResolver contentResolver, String username, String password, long lastStatusId) { mContentResolver = contentResolver; mUsername = username; mPassword = password; mLastStatusId = lastStatusId; } /** * Load the user and friends timeline. * * @throws ConnectionException * @throws JSONException * @throws SQLiteConstraintException * @throws ConnectionAuthenticationException * @throws ConnectionUnavailableException * @throws SocketTimeoutException */ public void loadTimeline() throws ConnectionException, JSONException, SQLiteConstraintException, ConnectionAuthenticationException, ConnectionUnavailableException, SocketTimeoutException { loadTimeline(AndTweetDatabase.Tweets.TWEET_TYPE_TWEET, false); } /** * Load the user and friends timeline. * * @param tweetType * @throws ConnectionException * @throws JSONException * @throws SQLiteConstraintException * @throws ConnectionAuthenticationException * @throws ConnectionUnavailableException * @throws SocketTimeoutException */ public void loadTimeline(int tweetType) throws ConnectionException, JSONException, SQLiteConstraintException, ConnectionAuthenticationException, ConnectionUnavailableException, SocketTimeoutException { loadTimeline(tweetType, false); } /** * Load the user and friends timeline. * * @param tweetType * @param firstRun * @throws ConnectionException * @throws JSONException * @throws SQLiteConstraintException * @throws ConnectionAuthenticationException * @throws ConnectionUnavailableException * @throws SocketTimeoutException */ public void loadTimeline(int tweetType, boolean firstRun) throws ConnectionException, JSONException, SQLiteConstraintException, ConnectionAuthenticationException, ConnectionUnavailableException, SocketTimeoutException { mNewTweets = 0; mReplies = 0; int limit = 200; if (firstRun) { limit = 20; } if (mUsername != null && mUsername.length() > 0) { Connection aConn = new Connection(mUsername, mPassword, mLastStatusId, limit); JSONArray jArr = null; switch (tweetType) { case AndTweetDatabase.Tweets.TWEET_TYPE_TWEET: jArr = aConn.getFriendsTimeline(); break; case AndTweetDatabase.Tweets.TWEET_TYPE_REPLY: jArr = aConn.getMentionsTimeline(); break; default: Log.e(TAG, "Got unhandled tweet type: " + tweetType); break; } for (int index = 0; index < jArr.length(); index++) { JSONObject jo = jArr.getJSONObject(index); long lId = jo.getLong("id"); if (lId > mLastStatusId) { mLastStatusId = lId; } insertFromJSONObject(jo, tweetType); } if (mNewTweets > 0) { mContentResolver.notifyChange(AndTweetDatabase.Tweets.CONTENT_URI, null); } } } /** * Insert a row from a JSONObject. * * @param jo * @param tweetType * @return * @throws JSONException * @throws SQLiteConstraintException */ public Uri insertFromJSONObject(JSONObject jo, int tweetType) throws JSONException, SQLiteConstraintException { JSONObject user; user = jo.getJSONObject("user"); ContentValues values = new ContentValues(); // Construct the Uri to existing record Long lTweetId = Long.parseLong(jo.getString("id")); Uri aTweetUri = ContentUris.withAppendedId(AndTweetDatabase.Tweets.CONTENT_URI, lTweetId); values.put(AndTweetDatabase.Tweets._ID, lTweetId.toString()); values.put(AndTweetDatabase.Tweets.AUTHOR_ID, user.getString("screen_name")); String message = Html.fromHtml(jo.getString("text")).toString(); values.put(AndTweetDatabase.Tweets.MESSAGE, message); values.put(AndTweetDatabase.Tweets.SOURCE, jo.getString("source")); values.put(AndTweetDatabase.Tweets.TWEET_TYPE, tweetType); values.put(AndTweetDatabase.Tweets.IN_REPLY_TO_STATUS_ID, jo.getString("in_reply_to_status_id")); values.put(AndTweetDatabase.Tweets.IN_REPLY_TO_AUTHOR_ID, jo.getString("in_reply_to_screen_name")); values.put(AndTweetDatabase.Tweets.FAVORITED, jo.getBoolean("favorited") ? 1 : 0); try { Long created = Date.parse(jo.getString("created_at")); values.put(Tweets.SENT_DATE, created); } catch (Exception e) { Log.e(TAG, "insertFromJSONObject: " + e.toString()); } if ((mContentResolver.update(aTweetUri, values, null, null)) == 0) { mContentResolver.insert(AndTweetDatabase.Tweets.CONTENT_URI, values); mNewTweets++; if (mUsername.equals(jo.getString("in_reply_to_screen_name")) || message.contains("@" + mUsername)) { mReplies++; } } return aTweetUri; } /** * Insert a row from a JSONObject. * Takes an optional parameter to notify listeners of the change. * * @param jo * @param tweetType * @param notify * @return Uri * @throws JSONException * @throws SQLiteConstraintException */ public Uri insertFromJSONObject(JSONObject jo, int tweetType, boolean notify) throws JSONException, SQLiteConstraintException { Uri aTweetUri = insertFromJSONObject(jo, tweetType); if (notify) mContentResolver.notifyChange(aTweetUri, null); return aTweetUri; } /** * Remove old records to ensure that the database does not grow too large. * * @param sinceTimestamp * @return Number of deleted records */ public int pruneOldRecords(long sinceTimestamp) { if (sinceTimestamp == 0) { sinceTimestamp = System.currentTimeMillis(); } return mContentResolver.delete(AndTweetDatabase.Tweets.CONTENT_URI, AndTweetDatabase.Tweets.CREATED_DATE + " < " + sinceTimestamp, null); } /** * Return the number of new statuses. * * @return integer */ public int newCount() { return mNewTweets; } /** * Return the number of new replies. * * @return integer */ public int replyCount() { return mReplies; } /** * Get the last status ID. * * @return long */ public long lastId() { return mLastStatusId; } /** * Destroy the status specified by ID. * * @param statusId * @return Number of deleted records */ public int destroyStatus(long statusId) { return mContentResolver.delete(AndTweetDatabase.Tweets.CONTENT_URI, AndTweetDatabase.Tweets._ID + " = " + statusId, null); } }
package me.prettyprint.cassandra.service.template; import static org.junit.Assert.*; import java.nio.ByteBuffer; import java.util.Arrays; import me.prettyprint.hector.api.beans.HColumn; import me.prettyprint.hector.api.beans.HSuperColumn; import org.junit.Test; public class SuperCfTemplateTest extends BaseColumnFamilyTemplateTest { @Test public void testSuperCfInsertReadTemplate() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey1","super1"); sUpdater.setString("sub_col_1", "sub_val_1"); sTemplate.update(sUpdater); SuperCfResult<String,String,String> result = sTemplate.querySuperColumn("skey1", "super1"); assertEquals("sub_val_1",result.getString("super1","sub_col_1")); sUpdater.deleteSuperColumn(); sTemplate.update(sUpdater); assertEquals("super1",sUpdater.getCurrentSuperColumn()); result = sTemplate.querySuperColumn("skey1", "super1"); assertFalse(result.hasResults()); } @Test public void testSuperCfMultiSc() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey2","super1"); sUpdater.setString("sub1_col_1", "sub1_val_1"); sUpdater.addSuperColumn("super2"); sUpdater.setString("sub2_col_1", "sub2_val_1"); sTemplate.update(sUpdater); SuperCfResult<String,String,String> result = sTemplate.querySuperColumns("skey2", Arrays.asList("super1","super2")); assertEquals(2,result.getSuperColumns().size()); /*for (String sName : result.getSuperColumns() ) { result.getString(sName,"sub1_col_1"); }*/ //assertEquals("sub1_val_1",result.getString("sub1_col_1")); //assertEquals("sub2_val_1",result.next().getString("sub2_col_1")); } @Test public void testQuerySingleSubColumn() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey3","super1"); sUpdater.setString("sub1_col_1", "sub1_val_1"); sTemplate.update(sUpdater); HColumn<String,String> myCol = sTemplate.querySingleSubColumn("skey3", "super1", "sub1_col_1", se); assertEquals("sub1_val_1", myCol.getValue()); } @Test public void testQuerySingleSubColumnExtractSuper() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey3","super1"); sUpdater.setString("sub1_col_1", "sub1_val_1"); sUpdater.setString("sub1_col_2", "sub1_val_2"); sTemplate.update(sUpdater); SuperCfResult<String, String, String> result = sTemplate.querySuperColumns("skey3"); HSuperColumn<String, String, ByteBuffer> superColumn = result.getSuperColumn("super1"); assertNotNull(superColumn); assertEquals("super1",superColumn.getName()); assertEquals(2,superColumn.getColumns().size()); } @Test public void testQuerySingleSubColumnEmpty() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey3","super1"); sUpdater.setString("sub1_col_1", "sub1_val_1"); sTemplate.update(sUpdater); HColumn<String,String> myCol = sTemplate.querySingleSubColumn("skey3", "super2", "sub1_col_1", se); assertNull(myCol); } @Test public void testSuperCfInsertReadMultiKey() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("s_multi_key1","super1"); sUpdater.setString("sub_col_1", "sub_val_1"); sUpdater.addKey("s_multi_key2"); sUpdater.addSuperColumn("super1"); sUpdater.setString("sub_col_1", "sub_val_2"); sTemplate.update(sUpdater); SuperCfResult<String,String,String> result = sTemplate.querySuperColumns(Arrays.asList("s_multi_key1","s_multi_key2"), Arrays.asList("super1")); assertTrue(result.hasResults()); assertEquals("sub_val_2",result.getString("super1","sub_col_1")); assertEquals("sub_val_1",result.next().getString("super1","sub_col_1")); } @Test public void testSuperCfInsertReadMultiKeyNoSc() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("s_multi_key1","super1"); sUpdater.setString("sub_col_1", "sub_val_1"); sUpdater.addKey("s_multi_key2"); sUpdater.addSuperColumn("super1"); sUpdater.setString("sub_col_1", "sub_val_2"); sTemplate.update(sUpdater); SuperCfResult<String,String,String> result = sTemplate.querySuperColumns(Arrays.asList("s_multi_key1","s_multi_key2")); assertTrue(result.hasResults()); assertEquals("sub_val_2",result.getString("super1","sub_col_1")); assertEquals("sub_val_1",result.next().getString("super1","sub_col_1")); } @Test public void testSuperCfKeyOnly() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey1","super1"); sUpdater.setString("sub_col_1", "sub_val_1"); sUpdater.addSuperColumn("super2"); sUpdater.setString("sub_col_1", "sub_val_2"); sTemplate.update(sUpdater); SuperCfResult<String,String,String> result = sTemplate.querySuperColumns("skey1"); assertEquals(2, result.getSuperColumns().size()); assertTrue(result.hasResults()); result = sTemplate.querySuperColumns("skey1-non-existing-key"); assertNull(result.getActiveSuperColumn()); } @Test public void testSuperCfNoResults() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); assertFalse(sTemplate.querySuperColumns("no_results").hasResults()); } @Test public void testDeleteSubColumns() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey3","super1"); sUpdater.setString("sub1_col_1", "sub1_val_1"); sUpdater.setString("sub1_col_2", "sub1_val_2"); sUpdater.setString("sub1_col_3", "sub1_val_3"); sTemplate.update(sUpdater); SuperCfResult<String,String,String> result = sTemplate.querySuperColumn("skey3","super1"); assertEquals(3, result.getColumnNames().size()); sUpdater.deleteSubColumn("sub1_col_1"); sTemplate.update(sUpdater); result = sTemplate.querySuperColumn("skey3","super1"); assertEquals(2, result.getColumnNames().size()); } @Test public void testTemplateLevelDeleteSuper() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey_del_super","super1"); sUpdater.setString("sub1_col_1", "sub1_val_1"); sTemplate.update(sUpdater); SuperCfResult<String,String,String> result = sTemplate.querySuperColumn("skey_del_super","super1"); assertEquals(1, result.getColumnNames().size()); sTemplate.deleteColumn("skey_del_super", "super1"); result = sTemplate.querySuperColumn("skey_del_super","super1"); assertFalse(result.hasResults()); assertEquals(0, result.getColumnNames().size()); } @Test public void testTemplateLevelDeleteRow() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey_row_del","super1"); sUpdater.setString("sub1_col_1", "sub1_val_1"); sTemplate.update(sUpdater); SuperCfResult<String,String,String> result = sTemplate.querySuperColumn("skey_row_del","super1"); assertEquals(1, result.getColumnNames().size()); sTemplate.deleteRow("skey_row_del"); result = sTemplate.querySuperColumns("skey_row_del"); assertFalse(result.hasResults()); assertEquals(0, result.getSuperColumns().size()); } @Test public void testTemplateLevelDeleteMiss() { SuperCfTemplate<String, String, String> sTemplate = new ThriftSuperCfTemplate<String, String, String>(keyspace, "Super1", se, se, se); SuperCfUpdater<String,String,String> sUpdater = sTemplate.createUpdater("skey_row_del_miss","super1"); sUpdater.setString("sub1_col_1", "sub1_val_1"); sTemplate.update(sUpdater); SuperCfResult<String,String,String> result = sTemplate.querySuperColumn("skey_row_del_miss","super1"); assertEquals(1, result.getColumnNames().size()); sTemplate.deleteRow("skey_row_miss_foo"); sTemplate.deleteColumn("skey_row_del", "foo"); result = sTemplate.querySuperColumns("skey_row_del_miss"); assertTrue(result.hasResults()); assertEquals(1, result.getSuperColumns().size()); } }
package ch.liquidmind.inflection.compiler; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; import ch.liquidmind.inflection.model.compiled.TaxonomyCompiled; public class CompilationUnit { public static class CompilationUnitRaw { // TODO: should be using streams instead of files (allows for // other sources of source code). private File sourceFile; public CompilationUnitRaw( File sourceFile ) { super(); this.sourceFile = sourceFile; } public File getSourceFile() { return sourceFile; } public void setSourceFile( File sourceFile ) { this.sourceFile = sourceFile; } } public static class CompilationUnitParsed { private ParseTree parseTree; private CommonTokenStream tokens; private String[] sourceFileContent; public CompilationUnitParsed() { super(); } public ParseTree getParseTree() { return parseTree; } public void setParseTree( ParseTree parseTree ) { this.parseTree = parseTree; } public CommonTokenStream getTokens() { return tokens; } public void setTokens( CommonTokenStream tokens ) { this.tokens = tokens; } public String[] getSourceFileContent() { if ( sourceFileContent == null ) { String sourceFileAsString = tokens.getTokenSource().getInputStream().toString(); sourceFileContent = sourceFileAsString.split( System.lineSeparator() ); } return sourceFileContent; } } public static class CompilationUnitCompiled { // Imports public static abstract class Import { private String name; private ParserRuleContext parserRuleContext; private boolean wasReferenced = false; public Import( String name ) { super(); this.name = name; } public Import( String name, ParserRuleContext parserRuleContext ) { super(); this.name = name; this.parserRuleContext = parserRuleContext; } public String getName() { return name; } public ParserRuleContext getParserRuleContext() { return parserRuleContext; } public boolean getWasReferenced() { return wasReferenced; } public void setWasReferenced( boolean wasReferenced ) { this.wasReferenced = wasReferenced; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ( ( name == null ) ? 0 : name.hashCode() ); return result; } @Override public boolean equals( Object obj ) { if ( this == obj ) return true; if ( obj == null ) return false; if ( getClass() != obj.getClass() ) return false; Import other = (Import)obj; if ( name == null ) { if ( other.name != null ) return false; } else if ( !name.equals( other.name ) ) return false; return true; } } public static class TypeImport extends Import { public TypeImport( String name ) { super( name ); } public TypeImport( String name, ParserRuleContext parserRuleContext ) { super( name, parserRuleContext ); } } public static class PackageImport extends Import { public enum PackageImportType { OWN_PACKAGE, OTHER_PACKAGE } private PackageImportType type; public PackageImport( String name ) { super( name ); } public PackageImport( String name, ParserRuleContext parserRuleContext ) { super( name, parserRuleContext ); } public PackageImport( String name, ParserRuleContext parserRuleContext, PackageImportType type ) { super( name, parserRuleContext ); this.type = type; } public PackageImportType getType() { return type; } } public static class StaticMemberImport extends Import { public StaticMemberImport( String name ) { super( name ); } public StaticMemberImport( String name, ParserRuleContext parserRuleContext ) { super( name, parserRuleContext ); } } public static class StaticClassImport extends Import { public StaticClassImport( String name ) { super( name ); } public StaticClassImport( String name, ParserRuleContext parserRuleContext ) { super( name, parserRuleContext ); } } private String packageName; private Map< String, TypeImport > typeImports = new HashMap< String, TypeImport >(); private Set< PackageImport > packageImports = new HashSet< PackageImport >(); private Map< String, StaticMemberImport > staticMemberImports = new HashMap< String, StaticMemberImport >(); private Set< StaticClassImport > staticClassImports = new HashSet< StaticClassImport >(); private List< TaxonomyCompiled > taxonomiesCompiled = new ArrayList< TaxonomyCompiled >(); public CompilationUnitCompiled() { super(); } public String getPackageName() { return packageName; } public void setPackageName( String packageName ) { this.packageName = packageName; } public Map< String, TypeImport > getTypeImports() { return typeImports; } public Set< PackageImport > getPackageImports() { return packageImports; } public Map< String, StaticMemberImport > getStaticMemberImports() { return staticMemberImports; } public Set< StaticClassImport > getStaticClassImports() { return staticClassImports; } public List< TaxonomyCompiled > getTaxonomiesCompiled() { return taxonomiesCompiled; } } private CompilationUnitRaw compilationUnitRaw; private CompilationUnitParsed compilationUnitParsed; private CompilationUnitCompiled compilationUnitCompiled; // Note that compilation errors and warnings may be generated during // parsing (syntax error) or during compilation (grammar error), which // is why compilationFaults belong here rather than in CompilationUnitParsed // or CompilationUnitCompiled. private List< CompilationFault > compilationFaults = new ArrayList< CompilationFault >(); private InflectionErrorListener errorListener = new InflectionErrorListener( this ); private CompilationJob parentCompilationJob; public CompilationUnit( File sourceFile, CompilationJob parentCompilationJob ) { super(); compilationUnitRaw = new CompilationUnitRaw( sourceFile ); compilationUnitParsed = new CompilationUnitParsed(); compilationUnitCompiled = new CompilationUnitCompiled(); this.parentCompilationJob = parentCompilationJob; } public CompilationUnitRaw getCompilationUnitRaw() { return compilationUnitRaw; } public CompilationUnitParsed getCompilationUnitParsed() { return compilationUnitParsed; } public CompilationUnitCompiled getCompilationUnitCompiled() { return compilationUnitCompiled; } public List< CompilationFault > getCompilationFaults() { return compilationFaults; } public InflectionErrorListener getErrorListener() { return errorListener; } public CompilationJob getParentCompilationJob() { return parentCompilationJob; } public boolean hasCompilationErrors() { return containsCompilationErrors( compilationFaults ); } public static boolean containsCompilationErrors( List< CompilationFault > compilationFaults ) { boolean hasCompilationErrors = false; for ( CompilationFault compilationFault : compilationFaults ) { if ( compilationFault instanceof CompilationError ) { hasCompilationErrors = true; break; } } return hasCompilationErrors; } }
package com.mortrag.ut.wasabi.leveleditor; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.util.HashMap; import java.util.Iterator; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Screen; import com.badlogic.gdx.graphics.Camera; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureAtlas; import com.badlogic.gdx.graphics.g2d.TextureAtlas.AtlasRegion; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType; import com.badlogic.gdx.maps.Map; import com.badlogic.gdx.maps.MapLayer; import com.badlogic.gdx.maps.MapObjects; import com.badlogic.gdx.maps.MapProperties; import com.badlogic.gdx.maps.objects.TextureMapObject; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.math.Vector3; import com.badlogic.gdx.utils.Array; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import com.esotericsoftware.kryo.serializers.FieldSerializer; import com.mortrag.ut.wasabi.WasabiGame; import com.mortrag.ut.wasabi.graphics.Common; import com.mortrag.ut.wasabi.input.Command; import com.mortrag.ut.wasabi.input.Controls; import com.mortrag.ut.wasabi.input.WasabiInput; import com.mortrag.ut.wasabi.input.WasabiInput.MouseState; import com.mortrag.ut.wasabi.map.WasabiMap; import com.mortrag.ut.wasabi.map.WasabiMapRenderer; import com.mortrag.ut.wasabi.map.WasabiTextureMapObject; import com.mortrag.ut.wasabi.testchamber.TestChamber; import com.mortrag.ut.wasabi.util.Constants; import com.mortrag.ut.wasabi.util.Debug; public class LevelEditor implements Screen { // -------------------------------------------------------------------------------------------- // CONSTANTS // -------------------------------------------------------------------------------------------- // public public static final String NAME = "Level Editor"; // private private static final int GRID_SPACING = 50; private static final float ZOOM_DELTA = 0.05f; private static final float ZOOM_LIMIT = 0.1f; private static final float CAM_MOVE_SPEED = 10.0f; private static final float OBJECT_MOVE_SPEED = 1.0f; // for pixel-perfect nudging private static final float MAIN_VIEWPORT_WIDTH_FRAC = 0.75f; // -------------------------------------------------------------------------------------------- // MEMBERS // -------------------------------------------------------------------------------------------- // Textures, sprites, shapes, fonts private TextureAtlas atlas; private Array<AtlasRegion> objectRegions, spriteRegions; // each img prefix (e.g. o_*, s_*, ...) java.util.Map<String, Array<AtlasRegion>> regionMap; // maps Constants.FD.*_PREFIX -> *Regions private SpriteBatch batch; private ShapeRenderer shapeRenderer; // TODO(max): Have removed this by the end of the levelRewrite private Array<Vector3> placedSpriteInfo; // Map private WasabiMap map; private int curLayerIdx; private Vector2 curSpritePos; private WasabiMapRenderer mapRenderer; // Viewports, Cameras, Window sizes private Rectangle overall_viewport, main_viewport, minimap_viewport, detail_viewport; private OrthographicCamera overall_cam, main_cam, minimap_cam, detail_cam; // Window w/h, level w/h, main camera (viewport) w, minimap (viewport) h. All units in pixels. float w, h, level_width, level_height, main_width, minimap_height; // Game, input, controls, commands private WasabiGame game; private WasabiInput input; private Controls controls; private Array<Command> commandList; // State (should make settings obj / map?) private boolean paused = false, drawGridlines = true, snapToGrid = true, dirty = false; private File savedFilename = null; // this is set when the user saves the file, unset w/ load private MouseState mouseState; private Vector3 mouseStateUnprojected; // saving & whatnot private JFileChooser jFileChooser; private Kryo kryo; // -------------------------------------------------------------------------------------------- // CONSTRUCTORS // -------------------------------------------------------------------------------------------- public LevelEditor(WasabiGame game, WasabiInput input) { this.game = game; this.input = input; w = Gdx.graphics.getWidth(); h = Gdx.graphics.getHeight(); level_width = 5000; level_height = 5000; // Bit shapes shapeRenderer = new ShapeRenderer(); // Viewports are areas of the java window that have stuff rendered in them. // Cameras project things to viewports. They can be zoomed and moved. // Overall (whole window) overall_viewport = new Rectangle(0, 0, w, h); overall_cam = new OrthographicCamera(w, h); overall_cam.translate(w / 2.0f, h / 2.0f, 0.0f); // Main (editor area). main_width = w * MAIN_VIEWPORT_WIDTH_FRAC; main_viewport = new Rectangle(0, 0, main_width, h); main_cam = new OrthographicCamera(main_width, h); main_cam.translate(main_width / 2.0f, h / 2.0f, 0.0f); // Minimap minimap_height = (w - main_width) * (level_height / level_width); minimap_viewport = new Rectangle(main_width, h - minimap_height, w - main_width, minimap_height); minimap_cam = new OrthographicCamera(level_width, level_height); minimap_cam.translate(level_width / 2.0f, level_height / 2.0f, 0.0f); // Detail detail_viewport = new Rectangle(main_width, 0, w - main_width, h - minimap_height); detail_cam = new OrthographicCamera(w, h); // not sure what to set this to... // Drawing (sprite batches, textures, ...) batch = new SpriteBatch(); atlas = new TextureAtlas(Gdx.files.internal("../wasabi-android/assets/wasabi-atlas.atlas")); // regions Array<AtlasRegion> regions = atlas.getRegions(); regionMap = new HashMap<String, Array<AtlasRegion>>(); objectRegions = getRegionsPrefix(regions, Constants.FD.OBJ_PREFIX); regionMap.put(Constants.FD.OBJ_PREFIX, objectRegions); spriteRegions = getRegionsPrefix(regions, Constants.FD.SPRITE_PREFIX); regionMap.put(Constants.FD.SPRITE_PREFIX, spriteRegions); placedSpriteInfo = new Array<Vector3>(); curLayerIdx = 0; curSpritePos = new Vector2(); // Map setupEmptyMap(); // must be called after map made mapRenderer = new LevelEditor_MapRenderer(map, batch, shapeRenderer); // Input commandList = new Array<Command>(); controls = new LevelEditor_Controls(); mouseStateUnprojected = new Vector3(); // Saving / Loading kryo = new Kryo(); // Remove textureRegion from WasabiTextureMapObject FieldSerializer<WasabiTextureMapObject> objSer = new FieldSerializer<WasabiTextureMapObject>(kryo, WasabiTextureMapObject.class); objSer.removeField("textureRegion"); kryo.register(WasabiTextureMapObject.class, objSer); // Make Array serialization work by removing stupid transient fields. FieldSerializer<Array<MapLayer>> arraySer = new FieldSerializer<Array<MapLayer>>(kryo, Array.class); arraySer.removeField("iterable"); arraySer.removeField("predicateIterable"); kryo.register(Array.class, arraySer); jFileChooser = new JFileChooser(); } /** * Used in constructor. */ private Array<AtlasRegion> getRegionsPrefix(Array<AtlasRegion> regions, String prefix) { Array<AtlasRegion> result = new Array<AtlasRegion>(); for(int i = 0; i < regions.size; i++) { AtlasRegion cur = regions.get(i); if (cur.name.startsWith(prefix)) { result.add(cur); } } return result; } /** * This is private, but really part of the constructor (so far). Just handles all map setting up * code. */ private void setupEmptyMap() { map = new WasabiMap(); MapLayer layer; // layer 0: not-collidable: background layer = new LevelEditor_MapLayer(Constants.FD.OBJ_PREFIX, objectRegions); layer.getProperties().put(Constants.MP.LAYER_TYPE, Constants.MP.LayerType.BG); map.getLayers().add(layer); // layer 1: collidable: foreground layer = new LevelEditor_MapLayer(Constants.FD.OBJ_PREFIX, objectRegions); layer.getProperties().put(Constants.MP.LAYER_TYPE, Constants.MP.LayerType.COLLISION_FG); map.getLayers().add(layer); // layer 2: not-collidable: foreground layer = new LevelEditor_MapLayer(Constants.FD.OBJ_PREFIX, objectRegions); layer.getProperties().put(Constants.MP.LAYER_TYPE, Constants.MP.LayerType.FG); map.getLayers().add(layer); // layer 3: place hero and enemy points // TODO(max): this. // Properties MapProperties mp = map.getProperties(); mp.put(Constants.MP.LEVEL_WIDTH, level_width); mp.put(Constants.MP.LEVEL_HEIGHT, level_height); mp.put(Constants.MP.SPAWN_POINT, new Vector2(300, 300)); } // -------------------------------------------------------------------------------------------- // PRIVATE METHODS // -------------------------------------------------------------------------------------------- /** * TODO(max): this */ private void saveMap() { // If we're clean, there's nothing to save! if (!dirty) { return; } // Otherwise, see if we've got a filename stored & get one if not File fileToSave = null; if (savedFilename == null) { int rVal = jFileChooser.showSaveDialog(null); if (rVal == JFileChooser.APPROVE_OPTION) { fileToSave = jFileChooser.getSelectedFile(); } else { // User canceled save--return! return; } } else { fileToSave = savedFilename; } // At this point we're for sure doing the save try { Output output = new Output(new FileOutputStream(fileToSave)); kryo.writeClassAndObject(output, map); output.close(); // clean (on freshly saved map) and track last file saved dirty = false; savedFilename = fileToSave; Debug.print("Successfully wrote map to: " + fileToSave); } catch (FileNotFoundException e) { // TODO(max): Switch to Toast when this is implemented. Debug.print(e); } } /** * TODO(max): test */ private void loadMap() { // Confirm if saving first: if (dirty) { int confirmVal = JOptionPane.showOptionDialog(null, "The level has been modified since last save. Save it before loading another?", "Save before load?", JOptionPane.YES_NO_CANCEL_OPTION, JOptionPane.QUESTION_MESSAGE, null, null, null); switch (confirmVal) { case JOptionPane.OK_OPTION: saveMap(); break; case JOptionPane.NO_OPTION: // Do nothing--they don't want to save! We just continue with the load. break; case JOptionPane.CANCEL_OPTION: default: // We return--they didn't mean to load. return; } } // If we've gotten here, we're going to try to load int rVal = jFileChooser.showOpenDialog(null); if (rVal == JFileChooser.APPROVE_OPTION) { // They picked a file. OK! File fileToSave = jFileChooser.getSelectedFile(); try { Input input = new Input(new FileInputStream(fileToSave)); WasabiMap newMap = (WasabiMap) kryo.readClassAndObject(input); input.close(); // Setup the new map newMap.initialize(atlas, regionMap); // Once we're here, we've presumably gotten a good new map! map.dispose(); // TODO(max): Sure we want to do this? Probably expensive! map = newMap; mapRenderer.setMap(newMap); // clean (on freshly loaded map)! dirty = true; savedFilename = null; Debug.print("Successfully loaded map from: " + jFileChooser.getSelectedFile()); } catch (FileNotFoundException e) { // TODO(max): Switch to Toast when this is implemented. Debug.print(e); } } } /** * Switches screen to test chamber. */ private void testMap() { // Load the test chamber if it hasn't been loaded, or update it. TestChamber testChamber = null; if (!game.screenLoaded(TestChamber.NAME)) { // Screen hasn't been loaded--make it! testChamber = new TestChamber(game, input, map, batch, atlas); game.addScreen(testChamber, TestChamber.NAME); } else { // screen has been loaded--just update the map! testChamber = (TestChamber) game.getScreen(TestChamber.NAME); testChamber.setMap(map); } // switch game.getAndSetScreen(TestChamber.NAME); } private void placeSprite() { dirty = true; // NOTE(max): In the future, will have swtich based on layer what this does (e.g. if // placing spawn point for hero, can only be one.) LevelEditor_MapLayer layer = (LevelEditor_MapLayer) map.getLayers().get(curLayerIdx); AtlasRegion curRegion = layer.regions.get(layer.curRegionIdx); TextureMapObject newObj = new WasabiTextureMapObject(curRegion, curRegion.name, curSpritePos.x, curSpritePos.y, curRegion.getRotatedPackedWidth(), curRegion.getRotatedPackedHeight()); layer.getObjects().add(newObj); } /** * Handle cursor press. (Place sprite.) * NOTE(max): This behavior will change when the mouse is moved in other viewports! :-) */ private void handleCursorPressed() { // main window functionality placeSprite(); } /** * Unprojected the mouse state from window space into world space of the main cam. * NOTE(max): This behavior will change when the mouse is moved in other viewports! :-) */ private void handleCursorMoved() { mouseStateUnprojected.x = mouseState.x; mouseStateUnprojected.y = mouseState.y; main_cam.unproject(mouseStateUnprojected, main_viewport.x, main_viewport.y, main_viewport.width, main_viewport.height); curSpriteSetPosition(mouseStateUnprojected.x, mouseStateUnprojected.y); } /** * Moves based on GRID_SPACING (snapToGrid on) or SPRITE_MOVE_SPEED (snapToGrid off). * @param xMove -1 for left, 0 for none, 1 for right * @param yMove -1 for down, 0 for none, 1 for up */ private void curSpriteNudge(int xMove, int yMove) { // Calculate move speed and do tentative translation. float moveSpeed = snapToGrid ? GRID_SPACING : OBJECT_MOVE_SPEED; curSpriteMove(((float) xMove) * moveSpeed, ((float) yMove) * moveSpeed); } private void curSpriteMove(float xAmt, float yAmt) { curSpriteSetPosition(curSpritePos.x + xAmt, curSpritePos.y + yAmt); } private void curSpriteSetPosition(float newXReq, float newYReq) { LevelEditor_MapLayer layer = (LevelEditor_MapLayer) map.getLayers().get(curLayerIdx); AtlasRegion curRegion = layer.regions.get(layer.curRegionIdx); float curW = curRegion.getRotatedPackedWidth(); float curH = curRegion.getRotatedPackedHeight(); float newX = newXReq; float newY = newYReq; // Fix up out-of-bounds movements before moving. if (newX < 0) { newX = 0.0f; } else if (newX + curW > level_width) { newX = level_width - curW; } if (newY < 0) { newY = 0.0f; } else if (newY + curH > level_height) { newY = level_height - curH; } // Adjust if snapping to grid. if (snapToGrid) { newX = newX - newX % GRID_SPACING; newY = newY - newY % GRID_SPACING; } // Finally do the actual setting curSpritePos.set(newX, newY); } private void renderSprites(Camera c) { // everything we placed mapRenderer.setView((OrthographicCamera) c); mapRenderer.renderBackgroundAndCount(); // the one we're moving around if (map.getLayers().get(curLayerIdx).getProperties().get(Constants.MP.LAYER_TYPE) == Constants.MP.LayerType.BG) { // cur layer is background -- render it, then foreground renderCurSprite(c); mapRenderer.renderForegroundAndCount(); } else { // cur layer is foreground -- render foreground, then it mapRenderer.renderForegroundAndCount(); renderCurSprite(c); } } /** * Render the one you're moving around. */ private void renderCurSprite(Camera c) { batch.setProjectionMatrix(c.combined); batch.begin(); LevelEditor_MapLayer layer = (LevelEditor_MapLayer) map.getLayers().get(curLayerIdx); AtlasRegion curRegion = layer.regions.get(layer.curRegionIdx); batch.draw(curRegion, curSpritePos.x, curSpritePos.y, curRegion.getRotatedPackedWidth(), curRegion.getRotatedPackedHeight()); batch.end(); } private void drawEditorLines(Camera c) { // setup shapeRenderer.setProjectionMatrix(c.combined); shapeRenderer.begin(ShapeType.Line); shapeRenderer.setColor(Color.BLACK); // alpha doesn't do anything... // Line to separate main window shapeRenderer.line(main_width, 0, main_width, h); // Line to separate minimap from detail view. shapeRenderer.line(main_width, h - minimap_height, w, h - minimap_height); // end shapeRenderer.end(); } private void drawGrid(Camera c) { // set up renderer shapeRenderer.setProjectionMatrix(c.combined); shapeRenderer.begin(ShapeType.Line); shapeRenderer.setColor(0.5f, 0.5f, 0.5f, 0.0f); // alpha doesn't do anything... // vertical lines for (int i = 0; i <= level_width; i += GRID_SPACING) { shapeRenderer.line(i, level_height, i, 0); } // horizontal lines for (int i = 0; i <= level_height; i += GRID_SPACING) { shapeRenderer.line(0, i, level_width, i); } shapeRenderer.end(); } /** * Convenience method for printing. * @param o thing to print */ @SuppressWarnings("unused") private void print(Object o) { System.out.println(o); } private void handleCommands() { Iterator<Command> cit = commandList.iterator(); while (cit.hasNext()) { LevelEditor_Commands c = (LevelEditor_Commands) cit.next(); if (paused) { // Game is paused command interpretation switch(c) { case PAUSE: resume(); break; default: // do nothing break; } } else { LevelEditor_MapLayer layer = (LevelEditor_MapLayer) map.getLayers().get( curLayerIdx); int numLayers = map.getLayers().getCount(); // Normal Level Editor command interpretation switch(c) { case CAMERA_RIGHT: main_cam.translate(CAM_MOVE_SPEED, 0, 0); break; case CAMERA_LEFT: main_cam.translate(-CAM_MOVE_SPEED, 0, 0); break; case CAMERA_UP: main_cam.translate(0, CAM_MOVE_SPEED, 0); break; case CAMERA_DOWN: main_cam.translate(0, -CAM_MOVE_SPEED, 0); break; case CAMERA_ZOOM_IN_PRESS: case CAMERA_ZOOM_IN_HOLD: if (main_cam.zoom >= ZOOM_LIMIT) { main_cam.zoom -= ZOOM_DELTA; } break; case CAMERA_ZOOM_OUT_PRESS: case CAMERA_ZOOM_OUT_HOLD: main_cam.zoom += ZOOM_DELTA; break; case MOVE_RIGHT: curSpriteNudge(1, 0); break; case MOVE_LEFT: curSpriteNudge(-1, 0); break; case MOVE_UP: curSpriteNudge(0, 1); break; case MOVE_DOWN: curSpriteNudge(0, -1); break; case PAUSE: pause(); break; case NEXT_SPRITE: layer.curRegionIdx = (layer.curRegionIdx + 1) % (layer.regions.size - 1); break; case PREVIOUS_SPRITE: layer.curRegionIdx = layer.curRegionIdx == 0 ? layer.regions.size - 1 : layer.curRegionIdx - 1; break; case PLACE_SPRITE: placeSprite(); break; case TOGGLE_GRID: drawGridlines = !drawGridlines; break; case TOGGLE_SNAP_TO_GRID: snapToGrid = !snapToGrid; break; case BOUNDING_BOXES: mapRenderer.renderBoundingBoxes = !mapRenderer.renderBoundingBoxes; break; case CURSOR_MOVED: handleCursorMoved(); break; case PRESS_DOWN: handleCursorPressed(); break; case NEXT_LAYER: curLayerIdx = curLayerIdx == numLayers - 1 ? 0 : curLayerIdx + 1; break; case PREV_LAYER: curLayerIdx = curLayerIdx == 0 ? numLayers - 1 : curLayerIdx - 1; break; case TEST_MAP: testMap(); break; case SAVE_MAP: saveMap(); break; case LOAD_MAP: loadMap(); break; default: // Do nothing. break; } } } // else (if paused) // removes the inputs that were just PRESS actions input.clearPress(); } // -------------------------------------------------------------------------------------------- // PUBLIC METHODS // -------------------------------------------------------------------------------------------- @Override public void render(float delta) { // input handleCommands(); // handle GL stuff GL20 gl = Gdx.graphics.getGL20(); gl.glClearColor(1, 1, 1, 1); gl.glClear(GL20.GL_COLOR_BUFFER_BIT); // update cameras overall_cam.update(); main_cam.update(); minimap_cam.update(); detail_cam.update(); // Draw main sprites and grid gl.glViewport((int) main_viewport.x, (int) main_viewport.y, (int) main_viewport.width, (int) main_viewport.height); renderSprites(main_cam); if (drawGridlines) { drawGrid(main_cam); } // Draw minimap sprites gl.glViewport((int) minimap_viewport.x, (int) minimap_viewport.y, (int) minimap_viewport.width, (int) minimap_viewport.height); renderSprites(minimap_cam); // Draw overall lines gl.glViewport((int) overall_viewport.x, (int) overall_viewport.y, (int) overall_viewport.width, (int) overall_viewport.height); drawEditorLines(overall_cam); // paused overlay if (paused) { Common.drawPauseOverlay(overall_cam, batch, controls.getControlsList()); } if (Debug.DEBUG) { LevelEditor_MapLayer layer = (LevelEditor_MapLayer) map.getLayers().get(curLayerIdx); Debug.debugLine("Layer: " + curLayerIdx); Debug.debugLine("Current img: " + layer.regions.get(layer.curRegionIdx).name); Common.displayFps(overall_cam, batch); } } @Override public void resize(int width, int height) { // only resize if anything has actually changed if (width == (int) w && height == (int) h) { return; } w = width; h = height; // reconfig overall viewport/cam overall_viewport.width = w; overall_viewport.height = h; overall_cam.viewportWidth = w; overall_cam.viewportHeight = h; overall_cam.position.scl(0.0f); overall_cam.translate(w / 2.0f, h / 2.0f, 0.0f); // reconfig main viewport/cam main_width = w * MAIN_VIEWPORT_WIDTH_FRAC; main_viewport.width = main_width; main_viewport.height = h; main_cam.viewportWidth = main_width; main_cam.viewportHeight = h; main_cam.position.scl(0.0f); main_cam.translate(main_width / 2.0f, h / 2.0f, 0.0f); // reconfig minimap viewport/ cam minimap_height = (w - main_width) * (level_height / level_width); minimap_viewport.x = main_width; minimap_viewport.y = h - minimap_height; minimap_viewport.width = w - main_width; minimap_viewport.height = minimap_height; //minimap_cam = new OrthographicCamera(level_width, level_height); minimap_cam.position.scl(0.0f); minimap_cam.translate(level_width / 2.0f, level_height / 2.0f, 0.0f); // reconfig detail viewport / cam detail_viewport.x = main_width; detail_viewport.width = w - main_width; detail_viewport.height = h - minimap_height; // NOTE(max): Update this once it's been actually set. // detail_cam = new OrthographicCamera(...); // not sure what to set this to... } @Override public void show() { mouseState = input.setControls(controls, commandList); } @Override public void hide() { // NOTE(max): Do we need to revoke the controls here? // Do we need to do anything else here? } @Override public void pause() { paused = true; } @Override public void resume() { paused = false; } @Override public void dispose() { // so this never happens... batch.dispose(); } }
/* * Copyright 2003-2007 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.initialization; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.Processor; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.util.Collection; public class NonThreadSafeLazyInitializationInspection extends BaseInspection { @Override @NotNull public String getDisplayName() { return InspectionGadgetsBundle.message( "non.thread.safe.lazy.initialization.display.name"); } @Override @NotNull public String buildErrorString(Object... infos) { return InspectionGadgetsBundle.message( "non.thread.safe.lazy.initialization.problem.descriptor"); } @Override public BaseInspectionVisitor buildVisitor() { return new UnsafeSafeLazyInitializationVisitor(); } private static class UnsafeSafeLazyInitializationVisitor extends BaseInspectionVisitor { @Override public void visitAssignmentExpression( @NotNull PsiAssignmentExpression expression) { super.visitAssignmentExpression(expression); final PsiExpression lhs = expression.getLExpression(); if (!(lhs instanceof PsiReferenceExpression)) { return; } final PsiReference reference = (PsiReference)lhs; final PsiElement referent = reference.resolve(); if (!(referent instanceof PsiField)) { return; } final PsiField field = (PsiField)referent; if (!field.hasModifierProperty(PsiModifier.STATIC)) { return; } if (isInStaticInitializer(expression)) { return; } if (isInSynchronizedContext(expression)) { return; } if (!isLazy(expression, (PsiReferenceExpression)lhs)) { return; } boolean assignedOnce = isAssignedOnce(referent); boolean safeToDelete = isSafeToDeleteIfStatement(expression); registerError(lhs, assignedOnce && safeToDelete); } private static boolean isAssignedOnce(PsiElement referent) { final int[] writeCount = new int[1]; return ReferencesSearch.search(referent).forEach(new Processor<PsiReference>() { @Override public boolean process(PsiReference reference) { PsiElement element = reference.getElement(); if (!(element instanceof PsiExpression)) { return true; } if (!PsiUtil.isAccessedForWriting((PsiExpression)element)) { return true; } return ++writeCount[0] != 2; } }); } private static boolean isSafeToDeleteIfStatement(PsiElement expression) { PsiIfStatement ifStatement = PsiTreeUtil.getParentOfType(expression, PsiIfStatement.class); if (ifStatement.getElseBranch() != null) { return false; } PsiStatement thenBranch = ifStatement.getThenBranch(); if (thenBranch == null) return false; if (!(thenBranch instanceof PsiBlockStatement)) { return true; } return ((PsiBlockStatement)thenBranch).getCodeBlock().getStatements().length == 1; } private static boolean isLazy(PsiAssignmentExpression expression, PsiReferenceExpression lhs) { final PsiIfStatement ifStatement = PsiTreeUtil.getParentOfType(expression, PsiIfStatement.class); if (ifStatement == null) { return false; } final PsiExpression condition = ifStatement.getCondition(); if (condition == null) { return false; } return isNullComparison(condition, lhs); } private static boolean isNullComparison( PsiExpression condition, PsiReferenceExpression reference) { if (!(condition instanceof PsiBinaryExpression)) { return false; } final PsiBinaryExpression comparison = (PsiBinaryExpression)condition; final IElementType tokenType = comparison.getOperationTokenType(); if (!tokenType.equals(JavaTokenType.EQEQ)) { return false; } final PsiExpression lhs = comparison.getLOperand(); final PsiExpression rhs = comparison.getROperand(); if (rhs == null) { return false; } final String lhsText = lhs.getText(); final String rhsText = rhs.getText(); if (!PsiKeyword.NULL.equals(lhsText) && !PsiKeyword.NULL.equals(rhsText)) { return false; } final String referenceText = reference.getText(); return referenceText.equals(lhsText) || referenceText.equals(rhsText); } private static boolean isInSynchronizedContext(PsiElement element) { final PsiSynchronizedStatement syncBlock = PsiTreeUtil.getParentOfType(element, PsiSynchronizedStatement.class); if (syncBlock != null) { return true; } final PsiMethod method = PsiTreeUtil.getParentOfType(element, PsiMethod.class); return method != null && method.hasModifierProperty(PsiModifier.SYNCHRONIZED) && method.hasModifierProperty(PsiModifier.STATIC); } private static boolean isInStaticInitializer(PsiElement element) { final PsiClassInitializer initializer = PsiTreeUtil.getParentOfType(element, PsiClassInitializer.class); return initializer != null && initializer.hasModifierProperty(PsiModifier.STATIC); } } @Override protected InspectionGadgetsFix buildFix(Object... infos) { boolean isApplicable = ((Boolean)infos[0]).booleanValue(); return isApplicable ? new IntroduceHolderFix() : null; } private static class IntroduceHolderFix extends InspectionGadgetsFix { @Override protected void doFix(Project project, ProblemDescriptor descriptor) throws IncorrectOperationException { PsiReferenceExpression expression = (PsiReferenceExpression)descriptor.getPsiElement(); PsiElement resolved = expression.resolve(); if (!(resolved instanceof PsiField)) return; PsiField field = (PsiField)resolved; String holderName = suggestHolderName(field); @NonNls String text = "private static class " + holderName + " {" + "private static final " + field.getType().getCanonicalText() + " " + field.getName() + " = " + ((PsiAssignmentExpression)expression.getParent()).getRExpression().getText() + ";" + "}"; PsiElementFactory elementFactory = JavaPsiFacade.getInstance(field.getProject()).getElementFactory(); PsiClass holder = elementFactory.createClassFromText(text, field).getInnerClasses()[0]; PsiMethod method = PsiTreeUtil.getParentOfType(expression, PsiMethod.class); method.getParent().addBefore(holder, method); PsiIfStatement ifStatement = PsiTreeUtil.getParentOfType(expression, PsiIfStatement.class); ifStatement.delete(); final PsiExpression holderReference = elementFactory.createExpressionFromText(holderName + "." + field.getName(), field); Collection<PsiReference> references = ReferencesSearch.search(field).findAll(); for (PsiReference reference : references) { PsiElement element = reference.getElement(); element.replace(holderReference); } field.delete(); } @NonNls private static String suggestHolderName(PsiField field) { String string = field.getType().getDeepComponentType().getPresentableText(); final int index = string.indexOf('<'); if (index != -1) { string = string.substring(0, index); } return string + "Holder"; } @Override @NotNull public String getName() { return "Introduce holder class"; } } }
package cz.metacentrum.perun.core.entry; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import cz.metacentrum.perun.core.api.AttributeDefinition; import cz.metacentrum.perun.core.api.AuthzResolver; import cz.metacentrum.perun.core.api.Destination; import cz.metacentrum.perun.core.api.Facility; import cz.metacentrum.perun.core.api.PerunSession; import cz.metacentrum.perun.core.api.Resource; import cz.metacentrum.perun.core.api.RichDestination; import cz.metacentrum.perun.core.api.Role; import cz.metacentrum.perun.core.api.Service; import cz.metacentrum.perun.core.api.ServiceAttributes; import cz.metacentrum.perun.core.api.ServicesManager; import cz.metacentrum.perun.core.api.ServicesPackage; import cz.metacentrum.perun.core.api.Vo; import cz.metacentrum.perun.core.api.exceptions.AttributeAlreadyAssignedException; import cz.metacentrum.perun.core.api.exceptions.AttributeNotAssignedException; import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException; import cz.metacentrum.perun.core.api.exceptions.DestinationAlreadyAssignedException; import cz.metacentrum.perun.core.api.exceptions.DestinationAlreadyRemovedException; import cz.metacentrum.perun.core.api.exceptions.DestinationNotExistsException; import cz.metacentrum.perun.core.api.exceptions.FacilityNotExistsException; import cz.metacentrum.perun.core.api.exceptions.InternalErrorException; import cz.metacentrum.perun.core.api.exceptions.PrivilegeException; import cz.metacentrum.perun.core.api.exceptions.RelationExistsException; import cz.metacentrum.perun.core.api.exceptions.ServiceAlreadyAssignedException; import cz.metacentrum.perun.core.api.exceptions.ServiceAlreadyRemovedException; import cz.metacentrum.perun.core.api.exceptions.ServiceAlreadyRemovedFromServicePackageException; import cz.metacentrum.perun.core.api.exceptions.ServiceExistsException; import cz.metacentrum.perun.core.api.exceptions.ServiceNotExistsException; import cz.metacentrum.perun.core.api.exceptions.ServicesPackageExistsException; import cz.metacentrum.perun.core.api.exceptions.ServicesPackageNotExistsException; import cz.metacentrum.perun.core.api.exceptions.VoNotExistsException; import cz.metacentrum.perun.core.api.exceptions.WrongPatternException; import cz.metacentrum.perun.core.bl.PerunBl; import cz.metacentrum.perun.core.bl.ServicesManagerBl; import cz.metacentrum.perun.core.impl.Utils; import java.util.ArrayList; /** * @author Slavek Licehammer <glory@ics.muni.cz> */ public class ServicesManagerEntry implements ServicesManager { final static Logger log = LoggerFactory.getLogger(ServicesManagerEntry.class); private PerunBl perunBl; private ServicesManagerBl servicesManagerBl; public ServicesManagerEntry(PerunBl perunBl) { this.perunBl = perunBl; this.servicesManagerBl = perunBl.getServicesManagerBl(); } public ServicesManagerEntry() { } public Service createService(PerunSession sess, Service service) throws InternalErrorException, PrivilegeException, ServiceExistsException { Utils.checkPerunSession(sess); Utils.notNull(service, "service"); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "createService"); } return getServicesManagerBl().createService(sess, service); } public void deleteService(PerunSession sess, Service service) throws InternalErrorException, ServiceNotExistsException, PrivilegeException, RelationExistsException, ServiceAlreadyRemovedException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "deleteService"); } getServicesManagerBl().checkServiceExists(sess, service); getServicesManagerBl().deleteService(sess, service); } public void updateService(PerunSession sess, Service service) throws InternalErrorException, ServiceNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "updateService"); } getServicesManagerBl().checkServiceExists(sess, service); getServicesManagerBl().updateService(sess, service); } public Service getServiceById(PerunSession sess, int id) throws InternalErrorException, PrivilegeException, ServiceNotExistsException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.VOADMIN) && !AuthzResolver.isAuthorized(sess, Role.VOOBSERVER) && !AuthzResolver.isAuthorized(sess, Role.ENGINE) && !AuthzResolver.isAuthorized(sess, Role.RPC)) { throw new PrivilegeException(sess, "getServiceById"); } return getServicesManagerBl().getServiceById(sess, id); } public Service getServiceByName(PerunSession sess, String name) throws InternalErrorException, PrivilegeException, ServiceNotExistsException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.VOADMIN) && !AuthzResolver.isAuthorized(sess, Role.VOOBSERVER) && !AuthzResolver.isAuthorized(sess, Role.ENGINE)) { throw new PrivilegeException(sess, "getServiceByName"); } Utils.notNull(name, "name"); return getServicesManagerBl().getServiceByName(sess, name); } public List<Service> getServices(PerunSession sess) throws InternalErrorException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.VOADMIN) && !AuthzResolver.isAuthorized(sess, Role.VOOBSERVER) && !AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN)) { throw new PrivilegeException(sess, "getServices"); } return getServicesManagerBl().getServices(sess); } @Override public List<Service> getServicesByAttributeDefinition(PerunSession sess, AttributeDefinition attributeDefinition) throws InternalErrorException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "getServicesByAttributeDefinition"); } return getServicesManagerBl().getServicesByAttributeDefinition(sess, attributeDefinition); } @Override public List<Resource> getAssignedResources(PerunSession sess, Service service) throws InternalErrorException, PrivilegeException, ServiceNotExistsException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.VOADMIN) && !AuthzResolver.isAuthorized(sess, Role.ENGINE) && !AuthzResolver.isAuthorized(sess, Role.VOOBSERVER)) { throw new PrivilegeException(sess, "getAssignedResources"); } getServicesManagerBl().checkServiceExists(sess, service); return getServicesManagerBl().getAssignedResources(sess, service); } public ServiceAttributes getHierarchicalData(PerunSession sess, Service service, Facility facility) throws InternalErrorException, FacilityNotExistsException, ServiceNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.ENGINE) && !AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(sess, "getHierarchicalData"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); return getServicesManagerBl().getHierarchicalData(sess, service, facility); } public ServiceAttributes getFlatData(PerunSession sess, Service service, Facility facility) throws InternalErrorException, FacilityNotExistsException, ServiceNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.ENGINE) && !AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(sess, "getFlatData"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); return getServicesManagerBl().getFlatData(sess, service, facility); } public ServiceAttributes getDataWithGroups(PerunSession sess, Service service, Facility facility) throws InternalErrorException, FacilityNotExistsException, ServiceNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.ENGINE) && !AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(sess, "getDataWithGroups"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); return getServicesManagerBl().getDataWithGroups(sess, service, facility); } public ServiceAttributes getDataWithVos(PerunSession sess, Service service, Facility facility) throws InternalErrorException, FacilityNotExistsException, VoNotExistsException, ServiceNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.ENGINE) && !AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(sess, "getDataWithVos"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); return getServicesManagerBl().getDataWithVos(sess, service, facility); } public List<ServicesPackage> getServicesPackages(PerunSession sess) throws InternalErrorException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.VOADMIN) && !AuthzResolver.isAuthorized(sess, Role.VOOBSERVER)) { throw new PrivilegeException(sess, "getServicesPackages"); } return getServicesManagerBl().getServicesPackages(sess); } public ServicesPackage getServicesPackageById(PerunSession sess, int servicesPackageId) throws InternalErrorException, ServicesPackageNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.VOADMIN) && !AuthzResolver.isAuthorized(sess, Role.VOOBSERVER) && !AuthzResolver.isAuthorized(sess, Role.RPC)) { throw new PrivilegeException(sess, "getServicesPackageById"); } return getServicesManagerBl().getServicesPackageById(sess, servicesPackageId); } public ServicesPackage getServicesPackageByName(PerunSession sess, String servicesPackageName) throws InternalErrorException, ServicesPackageNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); Utils.notNull(servicesPackageName, "servicesPackageName"); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.VOADMIN) && !AuthzResolver.isAuthorized(sess, Role.VOOBSERVER)) { throw new PrivilegeException(sess, "getServicesPackageByName"); } return getServicesManagerBl().getServicesPackageByName(sess, servicesPackageName); } public ServicesPackage createServicesPackage(PerunSession sess, ServicesPackage servicesPackage) throws InternalErrorException, PrivilegeException, ServicesPackageExistsException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "createServicesPackage"); } Utils.notNull(servicesPackage, "servicesPackage"); return getServicesManagerBl().createServicesPackage(sess, servicesPackage); } public void updateServicesPackage(PerunSession sess, ServicesPackage servicesPackage) throws InternalErrorException, ServicesPackageNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "updateServicesPackage"); } getServicesManagerBl().checkServicesPackageExists(sess, servicesPackage); getServicesManagerBl().updateServicesPackage(sess, servicesPackage); } public void deleteServicesPackage(PerunSession sess, ServicesPackage servicesPackage) throws InternalErrorException, ServicesPackageNotExistsException, PrivilegeException, RelationExistsException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "deleteServicesPackage"); } getServicesManagerBl().checkServicesPackageExists(sess, servicesPackage); getServicesManagerBl().deleteServicesPackage(sess, servicesPackage); } public void addServiceToServicesPackage(PerunSession sess, ServicesPackage servicesPackage, Service service) throws InternalErrorException, ServicesPackageNotExistsException, ServiceNotExistsException, PrivilegeException, ServiceAlreadyAssignedException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "addServiceToServicesPackage"); } getServicesManagerBl().checkServicesPackageExists(sess, servicesPackage); getServicesManagerBl().checkServiceExists(sess, service); getServicesManagerBl().addServiceToServicesPackage(sess, servicesPackage, service); } public void removeServiceFromServicesPackage(PerunSession sess, ServicesPackage servicesPackage, Service service) throws InternalErrorException, ServicesPackageNotExistsException, ServiceNotExistsException, PrivilegeException, ServiceAlreadyRemovedFromServicePackageException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "removeServiceFromServicesPackage"); } getServicesManagerBl().checkServicesPackageExists(sess, servicesPackage); getServicesManagerBl().checkServiceExists(sess, service); getServicesManagerBl().removeServiceFromServicesPackage(sess, servicesPackage, service); } public List<Service> getServicesFromServicesPackage(PerunSession sess, ServicesPackage servicesPackage) throws InternalErrorException, ServicesPackageNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.VOADMIN) && !AuthzResolver.isAuthorized(sess, Role.VOOBSERVER)) { throw new PrivilegeException(sess, "getServicesFromServicesPackage"); } getServicesManagerBl().checkServicesPackageExists(sess, servicesPackage); return getServicesManagerBl().getServicesFromServicesPackage(sess, servicesPackage); } public void addRequiredAttribute(PerunSession sess, Service service, AttributeDefinition attribute) throws PrivilegeException, InternalErrorException, AttributeNotExistsException, ServiceNotExistsException, AttributeAlreadyAssignedException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "addRequiredAttribute"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getAttributesManagerBl().checkAttributeExists(sess, attribute); getServicesManagerBl().addRequiredAttribute(sess, service, attribute); } public void addRequiredAttributes(PerunSession sess, Service service, List<? extends AttributeDefinition> attributes) throws PrivilegeException, InternalErrorException, AttributeNotExistsException, ServiceNotExistsException, AttributeAlreadyAssignedException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "addRequiredAttributes"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getAttributesManagerBl().checkAttributesExists(sess, attributes); getServicesManagerBl().addRequiredAttributes(sess, service, attributes); } public void removeRequiredAttribute(PerunSession sess, Service service, AttributeDefinition attribute) throws PrivilegeException, InternalErrorException, AttributeNotExistsException, ServiceNotExistsException, AttributeNotAssignedException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "removeRequiredAttribute"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getAttributesManagerBl().checkAttributeExists(sess, attribute); getServicesManagerBl().removeRequiredAttribute(sess, service, attribute); } public void removeRequiredAttributes(PerunSession sess, Service service, List<? extends AttributeDefinition> attributes) throws PrivilegeException, InternalErrorException, AttributeNotExistsException, ServiceNotExistsException, AttributeNotAssignedException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "removeRequiredAttributes"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getAttributesManagerBl().checkAttributesExists(sess, attributes); getServicesManagerBl().removeRequiredAttributes(sess, service, attributes); } public void removeAllRequiredAttributes(PerunSession sess, Service service) throws PrivilegeException, InternalErrorException, ServiceNotExistsException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "removeRequiredAttribute"); } getServicesManagerBl().checkServiceExists(sess, service); getServicesManagerBl().removeAllRequiredAttributes(sess, service); } public Destination addDestination(PerunSession perunSession, List<Service> services, Facility facility, Destination destination) throws PrivilegeException, InternalErrorException, ServiceNotExistsException, FacilityNotExistsException, DestinationAlreadyAssignedException, WrongPatternException { Utils.checkPerunSession(perunSession); Utils.notNull(services, "services"); Utils.checkDestinationType(destination); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(perunSession, facility); // Authorization if (!AuthzResolver.isAuthorized(perunSession, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(perunSession, "addDestination"); } //prepare lists of facilities List<Facility> facilitiesByHostname = new ArrayList<Facility>(); List<Facility> facilitiesByDestination = new ArrayList<Facility>(); if(destination.getType().equals(Destination.DESTINATIONHOSTTYPE) || destination.getType().equals(Destination.DESTINATIONUSERHOSTTYPE) || destination.getType().equals(Destination.DESTINATIONUSERHOSTPORTTYPE)) { facilitiesByHostname = getPerunBl().getFacilitiesManagerBl().getFacilitiesByHostName(perunSession, destination.getHostNameFromDestination()); if(facilitiesByHostname.isEmpty()) facilitiesByDestination = getPerunBl().getFacilitiesManagerBl().getFacilitiesByDestination(perunSession, destination.getHostNameFromDestination()); if(!facilitiesByHostname.isEmpty()) { boolean hasRight = false; for(Facility f: facilitiesByHostname) { if(AuthzResolver.isAuthorized(perunSession, Role.FACILITYADMIN, f)) { hasRight = true; break; } } if(!hasRight) throw new PrivilegeException("You have no right to add this destination."); } if(!facilitiesByDestination.isEmpty()) { boolean hasRight = false; for(Facility f: facilitiesByDestination) { if(AuthzResolver.isAuthorized(perunSession, Role.FACILITYADMIN, f)) { hasRight = true; break; } } if(!hasRight) throw new PrivilegeException("You have no right to add this destination."); } } for(Service s: services) { getServicesManagerBl().checkServiceExists(perunSession, s); } Utils.notNull(destination, "destination"); Utils.notNull(destination.getDestination(), "destination.destination"); Utils.notNull(destination.getType(), "destination.type"); return getServicesManagerBl().addDestination(perunSession, services, facility, destination); } public Destination addDestination(PerunSession sess, Service service, Facility facility, Destination destination) throws InternalErrorException, PrivilegeException, ServiceNotExistsException, FacilityNotExistsException, DestinationAlreadyAssignedException, WrongPatternException { Utils.checkPerunSession(sess); Utils.checkDestinationType(destination); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(sess, "addDestination"); } //prepare lists of facilities List<Facility> facilitiesByHostname = new ArrayList<Facility>(); List<Facility> facilitiesByDestination = new ArrayList<Facility>(); if(destination.getType().equals(Destination.DESTINATIONHOSTTYPE) || destination.getType().equals(Destination.DESTINATIONUSERHOSTTYPE) || destination.getType().equals(Destination.DESTINATIONUSERHOSTPORTTYPE)) { facilitiesByHostname = getPerunBl().getFacilitiesManagerBl().getFacilitiesByHostName(sess, destination.getHostNameFromDestination()); if(facilitiesByHostname.isEmpty()) facilitiesByDestination = getPerunBl().getFacilitiesManagerBl().getFacilitiesByDestination(sess, destination.getHostNameFromDestination()); if(!facilitiesByHostname.isEmpty()) { boolean hasRight = false; for(Facility f: facilitiesByHostname) { if(AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, f)) { hasRight = true; break; } } if(!hasRight) throw new PrivilegeException("You have no right to add this destination."); } if(!facilitiesByDestination.isEmpty()) { boolean hasRight = false; for(Facility f: facilitiesByDestination) { if(AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, f)) { hasRight = true; break; } } if(!hasRight) throw new PrivilegeException("You have no right to add this destination."); } } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); Utils.notNull(destination, "destination"); Utils.notNull(destination.getDestination(), "destination.destination"); Utils.notNull(destination.getType(), "destination.type"); return getServicesManagerBl().addDestination(sess, service, facility, destination); } public void removeDestination(PerunSession sess, Service service, Facility facility, Destination destination) throws InternalErrorException, PrivilegeException, ServiceNotExistsException, FacilityNotExistsException, DestinationAlreadyRemovedException { Utils.checkPerunSession(sess); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(sess, "removeDestination"); } getServicesManagerBl().checkServiceExists(sess, service); Utils.notNull(destination, "destination"); Utils.notNull(destination.getDestination(), "destination.destination"); Utils.notNull(destination.getType(), "destination.type"); getServicesManagerBl().removeDestination(sess, service, facility, destination); } public Destination getDestinationById(PerunSession sess, int id) throws PrivilegeException, InternalErrorException, DestinationNotExistsException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN) && !AuthzResolver.isAuthorized(sess, Role.ENGINE)) { throw new PrivilegeException(sess, "getDestinationById"); } return getServicesManagerBl().getDestinationById(sess, id); } public List<Destination> getDestinations(PerunSession sess, Service service, Facility facility) throws InternalErrorException, PrivilegeException, ServiceNotExistsException, FacilityNotExistsException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, facility) && !AuthzResolver.isAuthorized(sess, Role.VOADMIN) && !AuthzResolver.isAuthorized(sess, Role.VOOBSERVER) && !AuthzResolver.isAuthorized(sess, Role.ENGINE)) { throw new PrivilegeException(sess, "getDestinations"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); return getServicesManagerBl().getDestinations(sess, service, facility); } @Override public List<Destination> getDestinations(PerunSession perunSession) throws InternalErrorException, PrivilegeException { Utils.checkPerunSession(perunSession); // Authorization if (!AuthzResolver.isAuthorized(perunSession, Role.PERUNADMIN)) { throw new PrivilegeException(perunSession, "getDestinations"); } return getServicesManagerBl().getDestinations(perunSession); } public List<RichDestination> getAllRichDestinations(PerunSession perunSession, Facility facility) throws PrivilegeException, InternalErrorException, FacilityNotExistsException{ Utils.checkPerunSession(perunSession); //Authorization if (!AuthzResolver.isAuthorized(perunSession, Role.FACILITYADMIN, facility) && !AuthzResolver.isAuthorized(perunSession, Role.VOADMIN) && !AuthzResolver.isAuthorized(perunSession, Role.VOOBSERVER) && !AuthzResolver.isAuthorized(perunSession, Role.ENGINE)) { throw new PrivilegeException(perunSession, "getAllRichDestinations"); } getPerunBl().getFacilitiesManagerBl().checkFacilityExists(perunSession, facility); return getPerunBl().getServicesManagerBl().getAllRichDestinations(perunSession, facility); } public List<RichDestination> getAllRichDestinations(PerunSession perunSession, Service service) throws PrivilegeException, InternalErrorException, ServiceNotExistsException{ Utils.checkPerunSession(perunSession); //Authorization if (!AuthzResolver.isAuthorized(perunSession, Role.PERUNADMIN)) throw new PrivilegeException(perunSession, "getAllRichDestinations"); getServicesManagerBl().checkServiceExists(perunSession, service); return getPerunBl().getServicesManagerBl().getAllRichDestinations(perunSession, service); } public List<RichDestination> getRichDestinations(PerunSession perunSession, Facility facility, Service service) throws PrivilegeException, InternalErrorException, FacilityNotExistsException, ServiceNotExistsException{ Utils.checkPerunSession(perunSession); //Authorization if (!AuthzResolver.isAuthorized(perunSession, Role.FACILITYADMIN, facility) && !AuthzResolver.isAuthorized(perunSession, Role.VOADMIN) && !AuthzResolver.isAuthorized(perunSession, Role.VOOBSERVER) && !AuthzResolver.isAuthorized(perunSession, Role.ENGINE)) { throw new PrivilegeException(perunSession, "getRichDestinations"); } getPerunBl().getFacilitiesManagerBl().checkFacilityExists(perunSession, facility); getServicesManagerBl().checkServiceExists(perunSession, service); return getPerunBl().getServicesManagerBl().getRichDestinations(perunSession, facility, service); } public void removeAllDestinations(PerunSession sess, Service service, Facility facility) throws InternalErrorException, PrivilegeException, ServiceNotExistsException, FacilityNotExistsException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "removeAllDestinations"); } getServicesManagerBl().checkServiceExists(sess, service); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); getServicesManagerBl().removeAllDestinations(sess, service, facility); } public List<Destination> getFacilitiesDestinations(PerunSession sess, Vo vo) throws InternalErrorException, PrivilegeException, VoNotExistsException { Utils.checkPerunSession(sess); getPerunBl().getVosManagerBl().checkVoExists(sess, vo); return getPerunBl().getServicesManagerBl().getFacilitiesDestinations(sess, vo); } public int getDestinationIdByName(PerunSession sess, String name) throws InternalErrorException, DestinationNotExistsException { return servicesManagerBl.getDestinationIdByName(sess, name); } public List<Service> getAssignedServices(PerunSession sess, Facility facility) throws InternalErrorException, FacilityNotExistsException, PrivilegeException { Utils.checkPerunSession(sess); //Authorization if (!AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, facility) && !AuthzResolver.isAuthorized(sess, Role.ENGINE)) { throw new PrivilegeException(sess, "getAssignedServices"); } getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); return getServicesManagerBl().getAssignedServices(sess, facility); } public PerunBl getPerunBl() { return this.perunBl; } /** * Sets the perunBl for this instance. * * @param perunBl The perunBl. */ public void setPerunBl(PerunBl perunBl) { this.perunBl = perunBl; } /** * Sets the servicesManagerBl for this instance. * * @param servicesManagerBl The servicesManagerBl. */ public void setServicesManagerBl(ServicesManagerBl servicesManagerBl) { this.servicesManagerBl = servicesManagerBl; } public ServicesManagerBl getServicesManagerBl() { return this.servicesManagerBl; } @Override public List<Destination> addDestinationsForAllServicesOnFacility(PerunSession sess, Facility facility, Destination destination) throws PrivilegeException, InternalErrorException, FacilityNotExistsException, DestinationAlreadyAssignedException, WrongPatternException { Utils.checkPerunSession(sess); Utils.checkDestinationType(destination); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(sess, "addDestinationsForAllServices"); } getPerunBl().getFacilitiesManagerBl().checkFacilityExists(sess, facility); Utils.notNull(destination, "destination"); Utils.notNull(destination.getDestination(), "destination.destination"); Utils.notNull(destination.getType(), "destination.type"); return getServicesManagerBl().addDestinationsForAllServicesOnFacility(sess, facility, destination); } @Override public List<Destination> addDestinationsDefinedByHostsOnFacility(PerunSession perunSession, Service service, Facility facility) throws PrivilegeException, InternalErrorException, ServiceNotExistsException, FacilityNotExistsException, DestinationAlreadyAssignedException { Utils.checkPerunSession(perunSession); // Auhtorization if (!AuthzResolver.isAuthorized(perunSession, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(perunSession, "addDestinationsDefinedByHostsOnFacility"); } getServicesManagerBl().checkServiceExists(perunSession, service); getPerunBl().getFacilitiesManagerBl().checkFacilityExists(perunSession, facility); return getServicesManagerBl().addDestinationsDefinedByHostsOnFacility(perunSession, service, facility); } @Override public List<Destination> addDestinationsDefinedByHostsOnFacility(PerunSession perunSession, List<Service> services, Facility facility) throws PrivilegeException, InternalErrorException, ServiceNotExistsException, FacilityNotExistsException { Utils.checkPerunSession(perunSession); Utils.notNull(services, "services"); // Auhtorization if (!AuthzResolver.isAuthorized(perunSession, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(perunSession, "addDestinationsDefinedByHostsOnFacility"); } for(Service s: services) { getServicesManagerBl().checkServiceExists(perunSession, s); } getPerunBl().getFacilitiesManagerBl().checkFacilityExists(perunSession, facility); return getServicesManagerBl().addDestinationsDefinedByHostsOnFacility(perunSession, services, facility); } @Override public List<Destination> addDestinationsDefinedByHostsOnFacility(PerunSession perunSession, Facility facility) throws PrivilegeException, InternalErrorException, FacilityNotExistsException { Utils.checkPerunSession(perunSession); // Auhtorization if (!AuthzResolver.isAuthorized(perunSession, Role.FACILITYADMIN, facility)) { throw new PrivilegeException(perunSession, "addDestinationsDefinedByHostsOnFacility"); } getPerunBl().getFacilitiesManagerBl().checkFacilityExists(perunSession, facility); return getServicesManagerBl().addDestinationsDefinedByHostsOnFacility(perunSession, facility); } @Override public int getDestinationsCount(PerunSession sess) throws InternalErrorException, PrivilegeException { Utils.checkPerunSession(sess); // Authorization if (!AuthzResolver.isAuthorized(sess, Role.PERUNADMIN)) { throw new PrivilegeException(sess, "getDestinationsCount"); } return getServicesManagerBl().getDestinationsCount(sess); } }
/* * The MIT License (MIT) * * Copyright (c) 2015 Carlos Andres Jimenez <apps@carlosandresjimenez.co> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package co.carlosandresjimenez.android.gotit; import android.app.Dialog; import android.app.ProgressDialog; import android.content.DialogInterface; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.support.v7.app.AlertDialog; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.widget.Button; import android.widget.TextView; import java.util.Observable; import java.util.Observer; import co.carlosandresjimenez.android.gotit.beans.Following; import co.carlosandresjimenez.android.gotit.cloud.ApplicationState; import co.carlosandresjimenez.android.gotit.cloud.CloudFactory; import co.carlosandresjimenez.android.gotit.cloud.CloudManager; /** * Created by carlosjimenez on 10/4/15. */ public class FollowFragment extends DialogFragment implements Observer { private static final String LOG_TAG = MainActivity.class.getSimpleName(); TextView mTvEmail; ProgressDialog progressDialog; Listener mListener; CloudManager mCloudManager; String mSessionEmail; public void setListener(Listener l) { mListener = l; } public interface Listener { void onFollowRequested(String userEmail); } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); // Get the layout inflater LayoutInflater inflater = getActivity().getLayoutInflater(); View dialogView = inflater.inflate(R.layout.dialog_follow, null); mTvEmail = (TextView) dialogView.findViewById(R.id.email); mSessionEmail = Utility.getUserEmail(getActivity()); // Inflate and set the layout for the dialog // Pass null as the parent view because its going in the dialog layout builder.setTitle(R.string.title_dialog_follow) .setView(dialogView) // Add action buttons .setPositiveButton(R.string.action_request, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { } }) .setNegativeButton(R.string.action_cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { FollowFragment.this.getDialog().cancel(); } }); AlertDialog alertDialog = builder.create(); alertDialog.setCanceledOnTouchOutside(false); alertDialog.show(); Button positiveButton = alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); positiveButton.setOnClickListener(new RequestPermissionListener(alertDialog)); return alertDialog; } private class RequestPermissionListener implements View.OnClickListener { private final Dialog dialog; public RequestPermissionListener(Dialog dialog) { this.dialog = dialog; } @Override public void onClick(View v) { // put your code here Log.i(LOG_TAG, "Button id: " + v.getId()); String mValue = mTvEmail.getText().toString(); if (mValue.isEmpty()) { openResultDialog("Invalid email entered"); return; } if (mValue.equals(mSessionEmail)) { openResultDialog("Invalid email entered, you cannot follow yourself."); return; } mListener.onFollowRequested(mTvEmail.getText().toString()); Utility.hideSoftKeyboard(getActivity(), mTvEmail); requestPermission(); openValidatingDialog(); } } public void openValidatingDialog() { progressDialog = ProgressDialog.show(getActivity(), null, "Requesting permission...", true, false); } public void openResultDialog(String message) { AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(getActivity()); alertDialogBuilder.setIcon(android.R.drawable.ic_dialog_alert); alertDialogBuilder.setTitle("Request error"); // set dialog message alertDialogBuilder .setMessage(message) .setCancelable(false) .setPositiveButton(R.string.action_edit, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { // if this button is clicked, close current activity dialog.cancel(); } }) .setNegativeButton(R.string.action_cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { // if this button is clicked, just close the dialog box and do nothing dialog.cancel(); FollowFragment.this.getDialog().dismiss(); } }); // create alert dialog AlertDialog alertDialog = alertDialogBuilder.create(); alertDialog.setCanceledOnTouchOutside(false); // show it alertDialog.show(); } public void requestPermission() { mCloudManager = CloudFactory.getManager(getActivity(), new Following(mTvEmail.getText().toString())); mCloudManager.addObserver(this); mCloudManager.save(); } @Override public void update(Observable observable, Object data) { switch ((ApplicationState) data) { case FOLLOWING_SAVED: progressDialog.dismiss(); mCloudManager.deleteObserver(this); this.dismiss(); break; case FOLLOWING_CANNOT_FOLLOW: progressDialog.dismiss(); mCloudManager.deleteObserver(this); openResultDialog("Follow request not saved. User has a Follower profile and cannot be followed."); break; case FOLLOWING_NOT_FOUND: progressDialog.dismiss(); mCloudManager.deleteObserver(this); openResultDialog("Follow request not saved. User not found"); break; case FOLLOWING_NOT_SAVED: progressDialog.dismiss(); mCloudManager.deleteObserver(this); openResultDialog("Follow request not saved"); break; case ACCESS_UNAUTHORIZED: break; case NO_INTERNET: break; default: break; } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.java.decompiler.modules.decompiler.stats; import org.jetbrains.java.decompiler.code.CodeConstants; import org.jetbrains.java.decompiler.code.cfg.BasicBlock; import org.jetbrains.java.decompiler.main.DecompilerContext; import org.jetbrains.java.decompiler.main.collectors.BytecodeMappingTracer; import org.jetbrains.java.decompiler.main.collectors.CounterContainer; import org.jetbrains.java.decompiler.modules.decompiler.DecHelper; import org.jetbrains.java.decompiler.modules.decompiler.ExprProcessor; import org.jetbrains.java.decompiler.modules.decompiler.StatEdge; import org.jetbrains.java.decompiler.modules.decompiler.exps.VarExprent; import org.jetbrains.java.decompiler.struct.gen.VarType; import org.jetbrains.java.decompiler.util.TextBuffer; import java.util.ArrayList; import java.util.List; import java.util.Set; public final class CatchStatement extends Statement { private final List<List<String>> exctstrings = new ArrayList<>(); private final List<VarExprent> vars = new ArrayList<>(); // ***************************************************************************** // constructors // ***************************************************************************** private CatchStatement() { type = TYPE_TRYCATCH; } private CatchStatement(Statement head, Statement next, Set<Statement> setHandlers) { this(); first = head; stats.addWithKey(first, first.id); for (StatEdge edge : head.getSuccessorEdges(StatEdge.TYPE_EXCEPTION)) { Statement stat = edge.getDestination(); if (setHandlers.contains(stat)) { stats.addWithKey(stat, stat.id); exctstrings.add(new ArrayList<>(edge.getExceptions())); vars.add(new VarExprent(DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.VAR_COUNTER), new VarType(CodeConstants.TYPE_OBJECT, 0, edge.getExceptions().get(0)), // FIXME: for now simply the first type. Should get the first common superclass when possible. DecompilerContext.getVarProcessor())); } } if (next != null) { post = next; } } // ***************************************************************************** // public methods // ***************************************************************************** public static Statement isHead(Statement head) { if (head.getLastBasicType() != LASTBASICTYPE_GENERAL) { return null; } Set<Statement> setHandlers = DecHelper.getUniquePredExceptions(head); if (!setHandlers.isEmpty()) { int hnextcount = 0; // either no statements with connection to next, or more than 1 Statement next = null; List<StatEdge> lstHeadSuccs = head.getSuccessorEdges(STATEDGE_DIRECT_ALL); if (!lstHeadSuccs.isEmpty() && lstHeadSuccs.get(0).getType() == StatEdge.TYPE_REGULAR) { next = lstHeadSuccs.get(0).getDestination(); hnextcount = 2; } for (StatEdge edge : head.getSuccessorEdges(StatEdge.TYPE_EXCEPTION)) { Statement stat = edge.getDestination(); boolean handlerok = true; if (edge.getExceptions() != null && setHandlers.contains(stat)) { if (stat.getLastBasicType() != LASTBASICTYPE_GENERAL) { handlerok = false; } else { List<StatEdge> lstStatSuccs = stat.getSuccessorEdges(STATEDGE_DIRECT_ALL); if (!lstStatSuccs.isEmpty() && lstStatSuccs.get(0).getType() == StatEdge.TYPE_REGULAR) { Statement statn = lstStatSuccs.get(0).getDestination(); if (next == null) { next = statn; } else if (next != statn) { handlerok = false; } if (handlerok) { hnextcount++; } } } } else { handlerok = false; } if (!handlerok) { setHandlers.remove(stat); } } if (hnextcount != 1 && !setHandlers.isEmpty()) { List<Statement> lst = new ArrayList<>(); lst.add(head); lst.addAll(setHandlers); for (Statement st : lst) { if (st.isMonitorEnter()) { return null; } } if (DecHelper.checkStatementExceptions(lst)) { return new CatchStatement(head, next, setHandlers); } } } return null; } @Override public TextBuffer toJava(int indent, BytecodeMappingTracer tracer) { TextBuffer buf = new TextBuffer(); buf.append(ExprProcessor.listToJava(varDefinitions, indent, tracer)); if (isLabeled()) { buf.appendIndent(indent).append("label").append(this.id.toString()).append(":").appendLineSeparator(); tracer.incrementCurrentSourceLine(); } buf.appendIndent(indent).append("try {").appendLineSeparator(); tracer.incrementCurrentSourceLine(); buf.append(ExprProcessor.jmpWrapper(first, indent + 1, true, tracer)); buf.appendIndent(indent).append("}"); for (int i = 1; i < stats.size(); i++) { Statement stat = stats.get(i); // map first instruction storing the exception to the catch statement BasicBlock block = stat.getBasichead().getBlock(); if (!block.getSeq().isEmpty() && block.getInstruction(0).opcode == CodeConstants.opc_astore) { Integer offset = block.getOldOffset(0); if (offset > -1) tracer.addMapping(offset); } buf.append(" catch ("); List<String> exception_types = exctstrings.get(i - 1); if (exception_types.size() > 1) { // multi-catch, Java 7 style for (int exc_index = 1; exc_index < exception_types.size(); ++exc_index) { VarType exc_type = new VarType(CodeConstants.TYPE_OBJECT, 0, exception_types.get(exc_index)); String exc_type_name = ExprProcessor.getCastTypeName(exc_type); buf.append(exc_type_name).append(" | "); } } buf.append(vars.get(i - 1).toJava(indent, tracer)); buf.append(") {").appendLineSeparator(); tracer.incrementCurrentSourceLine(); buf.append(ExprProcessor.jmpWrapper(stat, indent + 1, false, tracer)).appendIndent(indent) .append("}"); } buf.appendLineSeparator(); tracer.incrementCurrentSourceLine(); return buf; } @Override public Statement getSimpleCopy() { CatchStatement cs = new CatchStatement(); for (List<String> exc : this.exctstrings) { cs.exctstrings.add(new ArrayList<>(exc)); cs.vars.add(new VarExprent(DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.VAR_COUNTER), new VarType(CodeConstants.TYPE_OBJECT, 0, exc.get(0)), DecompilerContext.getVarProcessor())); } return cs; } // ***************************************************************************** // getter and setter methods // ***************************************************************************** public List<VarExprent> getVars() { return vars; } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect.testing.google; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Sets.newTreeSet; import static com.google.common.collect.testing.SampleElements.Strings.AFTER_LAST; import static com.google.common.collect.testing.SampleElements.Strings.AFTER_LAST_2; import static com.google.common.collect.testing.SampleElements.Strings.BEFORE_FIRST; import static com.google.common.collect.testing.SampleElements.Strings.BEFORE_FIRST_2; import static junit.framework.Assert.assertEquals; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.collect.ContiguousSet; import com.google.common.collect.DiscreteDomain; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import com.google.common.collect.Range; import com.google.common.collect.Sets; import com.google.common.collect.testing.TestCollectionGenerator; import com.google.common.collect.testing.TestCollidingSetGenerator; import com.google.common.collect.testing.TestIntegerSortedSetGenerator; import com.google.common.collect.testing.TestSetGenerator; import com.google.common.collect.testing.TestStringListGenerator; import com.google.common.collect.testing.TestStringSetGenerator; import com.google.common.collect.testing.TestStringSortedSetGenerator; import com.google.common.collect.testing.TestUnhashableCollectionGenerator; import com.google.common.collect.testing.UnhashableObject; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Set; import java.util.SortedSet; /** * Generators of different types of sets and derived collections from sets. * * @author Kevin Bourrillion * @author Jared Levy * @author Hayward Chan */ @GwtCompatible(emulated = true) public class SetGenerators { public static class ImmutableSetCopyOfGenerator extends TestStringSetGenerator { @Override protected Set<String> create(String[] elements) { return ImmutableSet.copyOf(elements); } } public static class ImmutableSetUnsizedBuilderGenerator extends TestStringSetGenerator { @Override protected Set<String> create(String[] elements) { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); for (String e : elements) { builder.add(e); } return builder.build(); } } public static class ImmutableSetSizedBuilderGenerator extends TestStringSetGenerator { @Override protected Set<String> create(String[] elements) { ImmutableSet.Builder<String> builder = ImmutableSet.builderWithExpectedSize(Sets.newHashSet(elements).size()); for (String e : elements) { builder.add(e); } return builder.build(); } } public static class ImmutableSetTooBigBuilderGenerator extends TestStringSetGenerator { @Override protected Set<String> create(String[] elements) { ImmutableSet.Builder<String> builder = ImmutableSet.builderWithExpectedSize(Sets.newHashSet(elements).size() + 1); for (String e : elements) { builder.add(e); } return builder.build(); } } public static class ImmutableSetTooSmallBuilderGenerator extends TestStringSetGenerator { @Override protected Set<String> create(String[] elements) { ImmutableSet.Builder<String> builder = ImmutableSet.builderWithExpectedSize(Math.max(0, Sets.newHashSet(elements).size() - 1)); for (String e : elements) { builder.add(e); } return builder.build(); } } public static class ImmutableSetWithBadHashesGenerator extends TestCollidingSetGenerator // Work around a GWT compiler bug. Not explicitly listing this will // cause the createArray() method missing in the generated javascript. // TODO: Remove this once the GWT bug is fixed. implements TestCollectionGenerator<Object> { @Override public Set<Object> create(Object... elements) { return ImmutableSet.copyOf(elements); } } public static class DegeneratedImmutableSetGenerator extends TestStringSetGenerator { // Make sure we get what we think we're getting, or else this test // is pointless @SuppressWarnings("cast") @Override protected Set<String> create(String[] elements) { return (ImmutableSet<String>) ImmutableSet.of(elements[0], elements[0]); } } public static class ImmutableSortedSetCopyOfGenerator extends TestStringSortedSetGenerator { @Override protected SortedSet<String> create(String[] elements) { return ImmutableSortedSet.copyOf(elements); } } public static class ImmutableSortedSetHeadsetGenerator extends TestStringSortedSetGenerator { @Override protected SortedSet<String> create(String[] elements) { List<String> list = Lists.newArrayList(elements); list.add("zzz"); return ImmutableSortedSet.copyOf(list).headSet("zzy"); } } public static class ImmutableSortedSetTailsetGenerator extends TestStringSortedSetGenerator { @Override protected SortedSet<String> create(String[] elements) { List<String> list = Lists.newArrayList(elements); list.add("\0"); return ImmutableSortedSet.copyOf(list).tailSet("\0\0"); } } public static class ImmutableSortedSetSubsetGenerator extends TestStringSortedSetGenerator { @Override protected SortedSet<String> create(String[] elements) { List<String> list = Lists.newArrayList(elements); list.add("\0"); list.add("zzz"); return ImmutableSortedSet.copyOf(list).subSet("\0\0", "zzy"); } } @GwtIncompatible // NavigableSet public static class ImmutableSortedSetDescendingGenerator extends TestStringSortedSetGenerator { @Override protected SortedSet<String> create(String[] elements) { return ImmutableSortedSet.<String>reverseOrder().add(elements).build().descendingSet(); } } public static class ImmutableSortedSetExplicitComparator extends TestStringSetGenerator { private static final Comparator<String> STRING_REVERSED = Collections.reverseOrder(); @Override protected SortedSet<String> create(String[] elements) { return ImmutableSortedSet.orderedBy(STRING_REVERSED).add(elements).build(); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder, Collections.reverseOrder()); return insertionOrder; } } public static class ImmutableSortedSetExplicitSuperclassComparatorGenerator extends TestStringSetGenerator { private static final Comparator<Comparable<?>> COMPARABLE_REVERSED = Collections.reverseOrder(); @Override protected SortedSet<String> create(String[] elements) { return new ImmutableSortedSet.Builder<String>(COMPARABLE_REVERSED).add(elements).build(); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder, Collections.reverseOrder()); return insertionOrder; } } public static class ImmutableSortedSetReversedOrderGenerator extends TestStringSetGenerator { @Override protected SortedSet<String> create(String[] elements) { return ImmutableSortedSet.<String>reverseOrder() .addAll(Arrays.asList(elements).iterator()) .build(); } @Override public List<String> order(List<String> insertionOrder) { Collections.sort(insertionOrder, Collections.reverseOrder()); return insertionOrder; } } public static class ImmutableSortedSetUnhashableGenerator extends TestUnhashableSetGenerator { @Override public Set<UnhashableObject> create(UnhashableObject[] elements) { return ImmutableSortedSet.copyOf(elements); } } public static class ImmutableSetAsListGenerator extends TestStringListGenerator { @Override protected List<String> create(String[] elements) { return ImmutableSet.copyOf(elements).asList(); } } public static class ImmutableSortedSetAsListGenerator extends TestStringListGenerator { @Override protected List<String> create(String[] elements) { Comparator<String> comparator = createExplicitComparator(elements); ImmutableSet<String> set = ImmutableSortedSet.copyOf(comparator, Arrays.asList(elements)); return set.asList(); } } public static class ImmutableSortedSetSubsetAsListGenerator extends TestStringListGenerator { @Override protected List<String> create(String[] elements) { Comparator<String> comparator = createExplicitComparator(elements); ImmutableSortedSet.Builder<String> builder = ImmutableSortedSet.orderedBy(comparator); builder.add(BEFORE_FIRST); builder.add(elements); builder.add(AFTER_LAST); return builder.build().subSet(BEFORE_FIRST_2, AFTER_LAST).asList(); } } @GwtIncompatible // NavigableSet public static class ImmutableSortedSetDescendingAsListGenerator extends TestStringListGenerator { @Override protected List<String> create(String[] elements) { Comparator<String> comparator = createExplicitComparator(elements).reverse(); return ImmutableSortedSet.orderedBy(comparator) .add(elements) .build() .descendingSet() .asList(); } } public static class ImmutableSortedSetAsListSubListGenerator extends TestStringListGenerator { @Override protected List<String> create(String[] elements) { Comparator<String> comparator = createExplicitComparator(elements); ImmutableSortedSet.Builder<String> builder = ImmutableSortedSet.orderedBy(comparator); builder.add(BEFORE_FIRST); builder.add(elements); builder.add(AFTER_LAST); return builder.build().asList().subList(1, elements.length + 1); } } public static class ImmutableSortedSetSubsetAsListSubListGenerator extends TestStringListGenerator { @Override protected List<String> create(String[] elements) { Comparator<String> comparator = createExplicitComparator(elements); ImmutableSortedSet.Builder<String> builder = ImmutableSortedSet.orderedBy(comparator); builder.add(BEFORE_FIRST); builder.add(BEFORE_FIRST_2); builder.add(elements); builder.add(AFTER_LAST); builder.add(AFTER_LAST_2); return builder .build() .subSet(BEFORE_FIRST_2, AFTER_LAST_2) .asList() .subList(1, elements.length + 1); } } public abstract static class TestUnhashableSetGenerator extends TestUnhashableCollectionGenerator<Set<UnhashableObject>> implements TestSetGenerator<UnhashableObject> {} private static Ordering<String> createExplicitComparator(String[] elements) { // Collapse equal elements, which Ordering.explicit() doesn't support, while // maintaining the ordering by first occurrence. Set<String> elementsPlus = Sets.newLinkedHashSet(); elementsPlus.add(BEFORE_FIRST); elementsPlus.add(BEFORE_FIRST_2); elementsPlus.addAll(Arrays.asList(elements)); elementsPlus.add(AFTER_LAST); elementsPlus.add(AFTER_LAST_2); return Ordering.explicit(Lists.newArrayList(elementsPlus)); } /* * All the ContiguousSet generators below manually reject nulls here. In principle, we'd like to * defer that to Range, since it's ContiguousSet.create() that's used to create the sets. However, * that gets messy here, and we already have null tests for Range. */ /* * These generators also rely on consecutive integer inputs (not necessarily in order, but no * holes). */ // SetCreationTester has some tests that pass in duplicates. Dedup them. private static <E extends Comparable<? super E>> SortedSet<E> nullCheckedTreeSet(E[] elements) { SortedSet<E> set = newTreeSet(); for (E element : elements) { // Explicit null check because TreeSet wrongly accepts add(null) when empty. set.add(checkNotNull(element)); } return set; } public static class ContiguousSetGenerator extends AbstractContiguousSetGenerator { @Override protected SortedSet<Integer> create(Integer[] elements) { return checkedCreate(nullCheckedTreeSet(elements)); } } public static class ContiguousSetHeadsetGenerator extends AbstractContiguousSetGenerator { @Override protected SortedSet<Integer> create(Integer[] elements) { SortedSet<Integer> set = nullCheckedTreeSet(elements); int tooHigh = set.isEmpty() ? 0 : set.last() + 1; set.add(tooHigh); return checkedCreate(set).headSet(tooHigh); } } public static class ContiguousSetTailsetGenerator extends AbstractContiguousSetGenerator { @Override protected SortedSet<Integer> create(Integer[] elements) { SortedSet<Integer> set = nullCheckedTreeSet(elements); int tooLow = set.isEmpty() ? 0 : set.first() - 1; set.add(tooLow); return checkedCreate(set).tailSet(tooLow + 1); } } public static class ContiguousSetSubsetGenerator extends AbstractContiguousSetGenerator { @Override protected SortedSet<Integer> create(Integer[] elements) { SortedSet<Integer> set = nullCheckedTreeSet(elements); if (set.isEmpty()) { /* * The (tooLow + 1, tooHigh) arguments below would be invalid because tooLow would be * greater than tooHigh. */ return ContiguousSet.create(Range.openClosed(0, 1), DiscreteDomain.integers()).subSet(0, 1); } int tooHigh = set.last() + 1; int tooLow = set.first() - 1; set.add(tooHigh); set.add(tooLow); return checkedCreate(set).subSet(tooLow + 1, tooHigh); } } @GwtIncompatible // NavigableSet public static class ContiguousSetDescendingGenerator extends AbstractContiguousSetGenerator { @Override protected SortedSet<Integer> create(Integer[] elements) { return checkedCreate(nullCheckedTreeSet(elements)).descendingSet(); } /** Sorts the elements in reverse natural order. */ @Override public List<Integer> order(List<Integer> insertionOrder) { Collections.sort(insertionOrder, Ordering.natural().reverse()); return insertionOrder; } } private abstract static class AbstractContiguousSetGenerator extends TestIntegerSortedSetGenerator { protected final ContiguousSet<Integer> checkedCreate(SortedSet<Integer> elementsSet) { List<Integer> elements = newArrayList(elementsSet); /* * A ContiguousSet can't have holes. If a test demands a hole, it should be changed so that it * doesn't need one, or it should be suppressed for ContiguousSet. */ for (int i = 0; i < elements.size() - 1; i++) { assertEquals(elements.get(i) + 1, (int) elements.get(i + 1)); } Range<Integer> range = elements.isEmpty() ? Range.closedOpen(0, 0) : Range.encloseAll(elements); return ContiguousSet.create(range, DiscreteDomain.integers()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.webapp; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.Callable; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.ws.rs.core.MediaType; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.KerberosTestUtils; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.DelegationToken; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.JerseyTestBase; import org.apache.hadoop.yarn.webapp.WebServicesTestUtils; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Singleton; import com.google.inject.servlet.GuiceServletContextListener; import com.google.inject.servlet.ServletModule; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse.Status; import com.sun.jersey.api.client.filter.LoggingFilter; import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; import com.sun.jersey.test.framework.WebAppDescriptor; @RunWith(Parameterized.class) public class TestRMWebServicesDelegationTokens extends JerseyTestBase { private static File testRootDir; private static File httpSpnegoKeytabFile = new File( KerberosTestUtils.getKeytabFile()); private static String httpSpnegoPrincipal = KerberosTestUtils .getServerPrincipal(); private static MiniKdc testMiniKDC; private static MockRM rm; private Injector injector; private boolean isKerberosAuth = false; // Make sure the test uses the published header string final String yarnTokenHeader = "Hadoop-YARN-RM-Delegation-Token"; @Singleton public static class TestKerberosAuthFilter extends AuthenticationFilter { @Override protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException { Properties properties = super.getConfiguration(configPrefix, filterConfig); properties.put(KerberosAuthenticationHandler.PRINCIPAL, httpSpnegoPrincipal); properties.put(KerberosAuthenticationHandler.KEYTAB, httpSpnegoKeytabFile.getAbsolutePath()); properties.put(AuthenticationFilter.AUTH_TYPE, "kerberos"); return properties; } } @Singleton public static class TestSimpleAuthFilter extends AuthenticationFilter { @Override protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException { Properties properties = super.getConfiguration(configPrefix, filterConfig); properties.put(KerberosAuthenticationHandler.PRINCIPAL, httpSpnegoPrincipal); properties.put(KerberosAuthenticationHandler.KEYTAB, httpSpnegoKeytabFile.getAbsolutePath()); properties.put(AuthenticationFilter.AUTH_TYPE, "simple"); properties.put(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false"); return properties; } } private class TestServletModule extends ServletModule { public Configuration rmconf = new Configuration(); @Override protected void configureServlets() { bind(JAXBContextResolver.class); bind(RMWebServices.class); bind(GenericExceptionHandler.class); Configuration rmconf = new Configuration(); rmconf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS); rmconf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class); rmconf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); rm = new MockRM(rmconf); bind(ResourceManager.class).toInstance(rm); if (isKerberosAuth == true) { filter("/*").through(TestKerberosAuthFilter.class); } else { filter("/*").through(TestSimpleAuthFilter.class); } serve("/*").with(GuiceContainer.class); } } private Injector getSimpleAuthInjector() { return Guice.createInjector(new TestServletModule() { @Override protected void configureServlets() { isKerberosAuth = false; rmconf.set( CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple"); super.configureServlets(); } }); } private Injector getKerberosAuthInjector() { return Guice.createInjector(new TestServletModule() { @Override protected void configureServlets() { isKerberosAuth = true; rmconf.set( CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal); rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, httpSpnegoKeytabFile.getAbsolutePath()); rmconf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal); rmconf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, httpSpnegoKeytabFile.getAbsolutePath()); super.configureServlets(); } }); } public class GuiceServletConfig extends GuiceServletContextListener { @Override protected Injector getInjector() { return injector; } } @Parameters public static Collection<Object[]> guiceConfigs() { return Arrays.asList(new Object[][] { { 0 }, { 1 } }); } public TestRMWebServicesDelegationTokens(int run) throws Exception { super(new WebAppDescriptor.Builder( "org.apache.hadoop.yarn.server.resourcemanager.webapp") .contextListenerClass(GuiceServletConfig.class) .filterClass(com.google.inject.servlet.GuiceFilter.class) .contextPath("jersey-guice-filter").servletPath("/").build()); switch (run) { case 0: default: injector = getKerberosAuthInjector(); break; case 1: injector = getSimpleAuthInjector(); break; } } @BeforeClass public static void setupKDC() throws Exception { testRootDir = new File("target", TestRMWebServicesDelegationTokens.class.getName() + "-root"); testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir); testMiniKDC.start(); testMiniKDC.createPrincipal(httpSpnegoKeytabFile, "HTTP/localhost", "client", "client2", "client3"); } @Before @Override public void setUp() throws Exception { super.setUp(); httpSpnegoKeytabFile.deleteOnExit(); testRootDir.deleteOnExit(); Configuration conf = new Configuration(); conf.set("hadoop.security.authentication", "kerberos"); UserGroupInformation.setConfiguration(conf); } @AfterClass public static void shutdownKdc() { if (testMiniKDC != null) { testMiniKDC.stop(); } } @After @Override public void tearDown() throws Exception { rm.stop(); super.tearDown(); UserGroupInformation.setConfiguration(new Configuration()); } // Simple test - try to create a delegation token via web services and check // to make sure we get back a valid token. Validate token using RM function // calls. It should only succeed with the kerberos filter @Test public void testCreateDelegationToken() throws Exception { rm.start(); this.client().addFilter(new LoggingFilter(System.out)); final String renewer = "test-renewer"; String jsonBody = "{ \"renewer\" : \"" + renewer + "\" }"; String xmlBody = "<delegation-token><renewer>" + renewer + "</renewer></delegation-token>"; String[] mediaTypes = { MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }; Map<String, String> bodyMap = new HashMap<String, String>(); bodyMap.put(MediaType.APPLICATION_JSON, jsonBody); bodyMap.put(MediaType.APPLICATION_XML, xmlBody); for (final String mediaType : mediaTypes) { final String body = bodyMap.get(mediaType); for (final String contentType : mediaTypes) { if (isKerberosAuth == true) { verifyKerberosAuthCreate(mediaType, contentType, body, renewer); } else { verifySimpleAuthCreate(mediaType, contentType, body); } } } rm.stop(); return; } private void verifySimpleAuthCreate(String mediaType, String contentType, String body) { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").queryParam("user.name", "testuser") .accept(contentType).entity(body, mediaType) .post(ClientResponse.class); assertEquals(Status.FORBIDDEN, response.getClientResponseStatus()); } private void verifyKerberosAuthCreate(String mType, String cType, String reqBody, String renUser) throws Exception { final String mediaType = mType; final String contentType = cType; final String body = reqBody; final String renewer = renUser; KerberosTestUtils.doAsClient(new Callable<Void>() { @Override public Void call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").accept(contentType) .entity(body, mediaType).post(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); DelegationToken tok = getDelegationTokenFromResponse(response); assertFalse(tok.getToken().isEmpty()); Token<RMDelegationTokenIdentifier> token = new Token<RMDelegationTokenIdentifier>(); token.decodeFromUrlString(tok.getToken()); assertEquals(renewer, token.decodeIdentifier().getRenewer().toString()); assertValidRMToken(tok.getToken()); DelegationToken dtoken = new DelegationToken(); response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").accept(contentType) .entity(dtoken, mediaType).post(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); tok = getDelegationTokenFromResponse(response); assertFalse(tok.getToken().isEmpty()); token = new Token<RMDelegationTokenIdentifier>(); token.decodeFromUrlString(tok.getToken()); assertEquals("", token.decodeIdentifier().getRenewer().toString()); assertValidRMToken(tok.getToken()); return null; } }); } // Test to verify renew functionality - create a token and then try to renew // it. The renewer should succeed; owner and third user should fail @Test public void testRenewDelegationToken() throws Exception { client().addFilter(new LoggingFilter(System.out)); rm.start(); final String renewer = "client2"; this.client().addFilter(new LoggingFilter(System.out)); final DelegationToken dummyToken = new DelegationToken(); dummyToken.setRenewer(renewer); String[] mediaTypes = { MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }; for (final String mediaType : mediaTypes) { for (final String contentType : mediaTypes) { if (isKerberosAuth == false) { verifySimpleAuthRenew(mediaType, contentType); continue; } // test "client" and client2" trying to renew "client" token final DelegationToken responseToken = KerberosTestUtils.doAsClient(new Callable<DelegationToken>() { @Override public DelegationToken call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").accept(contentType) .entity(dummyToken, mediaType).post(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); DelegationToken tok = getDelegationTokenFromResponse(response); assertFalse(tok.getToken().isEmpty()); String body = generateRenewTokenBody(mediaType, tok.getToken()); response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").path("expiration") .header(yarnTokenHeader, tok.getToken()) .accept(contentType).entity(body, mediaType) .post(ClientResponse.class); assertEquals(Status.FORBIDDEN, response.getClientResponseStatus()); return tok; } }); KerberosTestUtils.doAs(renewer, new Callable<DelegationToken>() { @Override public DelegationToken call() throws Exception { // renew twice so that we can confirm that the // expiration time actually changes long oldExpirationTime = Time.now(); assertValidRMToken(responseToken.getToken()); String body = generateRenewTokenBody(mediaType, responseToken.getToken()); ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").path("expiration") .header(yarnTokenHeader, responseToken.getToken()) .accept(contentType).entity(body, mediaType) .post(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); DelegationToken tok = getDelegationTokenFromResponse(response); String message = "Expiration time not as expected: old = " + oldExpirationTime + "; new = " + tok.getNextExpirationTime(); assertTrue(message, tok.getNextExpirationTime() > oldExpirationTime); oldExpirationTime = tok.getNextExpirationTime(); // artificial sleep to ensure we get a different expiration time Thread.sleep(1000); response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").path("expiration") .header(yarnTokenHeader, responseToken.getToken()) .accept(contentType).entity(body, mediaType) .post(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); tok = getDelegationTokenFromResponse(response); message = "Expiration time not as expected: old = " + oldExpirationTime + "; new = " + tok.getNextExpirationTime(); assertTrue(message, tok.getNextExpirationTime() > oldExpirationTime); return tok; } }); // test unauthorized user renew attempt KerberosTestUtils.doAs("client3", new Callable<DelegationToken>() { @Override public DelegationToken call() throws Exception { String body = generateRenewTokenBody(mediaType, responseToken.getToken()); ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").path("expiration") .header(yarnTokenHeader, responseToken.getToken()) .accept(contentType).entity(body, mediaType) .post(ClientResponse.class); assertEquals(Status.FORBIDDEN, response.getClientResponseStatus()); return null; } }); // test bad request - incorrect format, empty token string and random // token string KerberosTestUtils.doAsClient(new Callable<Void>() { @Override public Void call() throws Exception { String token = "TEST_TOKEN_STRING"; String body = ""; if (mediaType.equals(MediaType.APPLICATION_JSON)) { body = "{\"token\": \"" + token + "\" }"; } else { body = "<delegation-token><token>" + token + "</token></delegation-token>"; } // missing token header ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").path("expiration") .accept(contentType).entity(body, mediaType) .post(ClientResponse.class); assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); return null; } }); } } rm.stop(); return; } private void verifySimpleAuthRenew(String mediaType, String contentType) { String token = "TEST_TOKEN_STRING"; String body = ""; // contents of body don't matter because the request processing shouldn't // get that far if (mediaType.equals(MediaType.APPLICATION_JSON)) { body = "{\"token\": \"" + token + "\" }"; body = "{\"abcd\": \"test-123\" }"; } else { body = "<delegation-token><token>" + token + "</token></delegation-token>"; body = "<delegation-token><xml>abcd</xml></delegation-token>"; } ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").queryParam("user.name", "testuser") .accept(contentType).entity(body, mediaType) .post(ClientResponse.class); assertEquals(Status.FORBIDDEN, response.getClientResponseStatus()); } // Test to verify cancel functionality - create a token and then try to cancel // it. The owner and renewer should succeed; third user should fail @Test public void testCancelDelegationToken() throws Exception { rm.start(); this.client().addFilter(new LoggingFilter(System.out)); if (isKerberosAuth == false) { verifySimpleAuthCancel(); return; } final DelegationToken dtoken = new DelegationToken(); String renewer = "client2"; dtoken.setRenewer(renewer); String[] mediaTypes = { MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }; for (final String mediaType : mediaTypes) { for (final String contentType : mediaTypes) { // owner should be able to cancel delegation token KerberosTestUtils.doAsClient(new Callable<Void>() { @Override public Void call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").accept(contentType) .entity(dtoken, mediaType).post(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); DelegationToken tok = getDelegationTokenFromResponse(response); response = resource().path("ws").path("v1").path("cluster") .path("delegation-token") .header(yarnTokenHeader, tok.getToken()).accept(contentType) .delete(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); assertTokenCancelled(tok.getToken()); return null; } }); // renewer should be able to cancel token final DelegationToken tmpToken = KerberosTestUtils.doAsClient(new Callable<DelegationToken>() { @Override public DelegationToken call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").accept(contentType) .entity(dtoken, mediaType).post(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); DelegationToken tok = getDelegationTokenFromResponse(response); return tok; } }); KerberosTestUtils.doAs(renewer, new Callable<Void>() { @Override public Void call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token") .header(yarnTokenHeader, tmpToken.getToken()) .accept(contentType).delete(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); assertTokenCancelled(tmpToken.getToken()); return null; } }); // third user should not be able to cancel token final DelegationToken tmpToken2 = KerberosTestUtils.doAsClient(new Callable<DelegationToken>() { @Override public DelegationToken call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").accept(contentType) .entity(dtoken, mediaType).post(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); DelegationToken tok = getDelegationTokenFromResponse(response); return tok; } }); KerberosTestUtils.doAs("client3", new Callable<Void>() { @Override public Void call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token") .header(yarnTokenHeader, tmpToken2.getToken()) .accept(contentType).delete(ClientResponse.class); assertEquals(Status.FORBIDDEN, response.getClientResponseStatus()); assertValidRMToken(tmpToken2.getToken()); return null; } }); testCancelTokenBadRequests(mediaType, contentType); } } rm.stop(); return; } private void testCancelTokenBadRequests(String mType, String cType) throws Exception { final String mediaType = mType; final String contentType = cType; final DelegationToken dtoken = new DelegationToken(); String renewer = "client2"; dtoken.setRenewer(renewer); // bad request(invalid header value) KerberosTestUtils.doAsClient(new Callable<Void>() { @Override public Void call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token") .header(yarnTokenHeader, "random-string").accept(contentType) .delete(ClientResponse.class); assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); return null; } }); // bad request(missing header) KerberosTestUtils.doAsClient(new Callable<Void>() { @Override public Void call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").accept(contentType) .delete(ClientResponse.class); assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); return null; } }); // bad request(cancelled token) final DelegationToken tmpToken = KerberosTestUtils.doAsClient(new Callable<DelegationToken>() { @Override public DelegationToken call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").accept(contentType) .entity(dtoken, mediaType).post(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); DelegationToken tok = getDelegationTokenFromResponse(response); return tok; } }); KerberosTestUtils.doAs(renewer, new Callable<Void>() { @Override public Void call() throws Exception { ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token") .header(yarnTokenHeader, tmpToken.getToken()).accept(contentType) .delete(ClientResponse.class); assertEquals(Status.OK, response.getClientResponseStatus()); response = resource().path("ws").path("v1").path("cluster") .path("delegation-token") .header(yarnTokenHeader, tmpToken.getToken()).accept(contentType) .delete(ClientResponse.class); assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); return null; } }); } private void verifySimpleAuthCancel() { // contents of header don't matter; request should never get that far ClientResponse response = resource().path("ws").path("v1").path("cluster") .path("delegation-token").queryParam("user.name", "testuser") .header(RMWebServices.DELEGATION_TOKEN_HEADER, "random") .delete(ClientResponse.class); assertEquals(Status.FORBIDDEN, response.getClientResponseStatus()); } private DelegationToken getDelegationTokenFromResponse(ClientResponse response) throws IOException, ParserConfigurationException, SAXException, JSONException { if (response.getType().toString().equals(MediaType.APPLICATION_JSON)) { return getDelegationTokenFromJson(response.getEntity(JSONObject.class)); } return getDelegationTokenFromXML(response.getEntity(String.class)); } public static DelegationToken getDelegationTokenFromXML(String tokenXML) throws IOException, ParserConfigurationException, SAXException { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(tokenXML)); Document dom = db.parse(is); NodeList nodes = dom.getElementsByTagName("delegation-token"); assertEquals("incorrect number of elements", 1, nodes.getLength()); Element element = (Element) nodes.item(0); DelegationToken ret = new DelegationToken(); String token = WebServicesTestUtils.getXmlString(element, "token"); if (token != null) { ret.setToken(token); } else { long expiration = WebServicesTestUtils.getXmlLong(element, "expiration-time"); ret.setNextExpirationTime(expiration); } return ret; } public static DelegationToken getDelegationTokenFromJson(JSONObject json) throws JSONException { DelegationToken ret = new DelegationToken(); if (json.has("token")) { ret.setToken(json.getString("token")); } else if (json.has("expiration-time")) { ret.setNextExpirationTime(json.getLong("expiration-time")); } return ret; } private void assertValidRMToken(String encodedToken) throws IOException { Token<RMDelegationTokenIdentifier> realToken = new Token<RMDelegationTokenIdentifier>(); realToken.decodeFromUrlString(encodedToken); RMDelegationTokenIdentifier ident = rm.getRMContext() .getRMDelegationTokenSecretManager().decodeTokenIdentifier(realToken); rm.getRMContext().getRMDelegationTokenSecretManager() .verifyToken(ident, realToken.getPassword()); assertTrue(rm.getRMContext().getRMDelegationTokenSecretManager() .getAllTokens().containsKey(ident)); } private void assertTokenCancelled(String encodedToken) throws Exception { Token<RMDelegationTokenIdentifier> realToken = new Token<RMDelegationTokenIdentifier>(); realToken.decodeFromUrlString(encodedToken); RMDelegationTokenIdentifier ident = rm.getRMContext() .getRMDelegationTokenSecretManager().decodeTokenIdentifier(realToken); boolean exceptionCaught = false; try { rm.getRMContext().getRMDelegationTokenSecretManager() .verifyToken(ident, realToken.getPassword()); } catch (InvalidToken it) { exceptionCaught = true; } assertTrue("InvalidToken exception not thrown", exceptionCaught); assertFalse(rm.getRMContext().getRMDelegationTokenSecretManager() .getAllTokens().containsKey(ident)); } private static String generateRenewTokenBody(String mediaType, String token) { String body = ""; if (mediaType.equals(MediaType.APPLICATION_JSON)) { body = "{\"token\": \"" + token + "\" }"; } else { body = "<delegation-token><token>" + token + "</token></delegation-token>"; } return body; } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hwpf.usermodel; import java.io.IOException; import junit.framework.TestCase; import org.apache.poi.hwpf.HWPFDocument; import org.apache.poi.hwpf.HWPFTestDataSamples; import org.apache.poi.hwpf.model.ListLevel; /** * Tests for our handling of lists */ public final class TestLists extends TestCase { public void testBasics() { HWPFDocument doc = HWPFTestDataSamples.openSampleFile("Lists.doc"); Range r = doc.getRange(); assertEquals(40, r.numParagraphs()); assertEquals("Heading Level 1\r", r.getParagraph(0).text()); assertEquals("This document has different lists in it for testing\r", r.getParagraph(1).text()); assertEquals("The end!\r", r.getParagraph(38).text()); assertEquals("\r", r.getParagraph(39).text()); assertEquals(0, r.getParagraph(0).getLvl()); assertEquals(9, r.getParagraph(1).getLvl()); assertEquals(9, r.getParagraph(38).getLvl()); assertEquals(9, r.getParagraph(39).getLvl()); } public void testUnorderedLists() { HWPFDocument doc = HWPFTestDataSamples.openSampleFile("Lists.doc"); Range r = doc.getRange(); assertEquals(40, r.numParagraphs()); // Normal bullet points assertEquals("This document has different lists in it for testing\r", r.getParagraph(1).text()); assertEquals("Unordered list 1\r", r.getParagraph(2).text()); assertEquals("UL 2\r", r.getParagraph(3).text()); assertEquals("UL 3\r", r.getParagraph(4).text()); assertEquals("Next up is an ordered list:\r", r.getParagraph(5).text()); assertEquals(9, r.getParagraph(1).getLvl()); assertEquals(9, r.getParagraph(2).getLvl()); assertEquals(9, r.getParagraph(3).getLvl()); assertEquals(9, r.getParagraph(4).getLvl()); assertEquals(9, r.getParagraph(5).getLvl()); assertEquals(0, r.getParagraph(1).getIlvl()); assertEquals(0, r.getParagraph(2).getIlvl()); assertEquals(0, r.getParagraph(3).getIlvl()); assertEquals(0, r.getParagraph(4).getIlvl()); assertEquals(0, r.getParagraph(5).getIlvl()); // Tick bullets assertEquals("Now for an un-ordered list with a different bullet style:\r", r.getParagraph(9).text()); assertEquals("Tick 1\r", r.getParagraph(10).text()); assertEquals("Tick 2\r", r.getParagraph(11).text()); assertEquals("Multi-level un-ordered list:\r", r.getParagraph(12).text()); assertEquals(9, r.getParagraph(9).getLvl()); assertEquals(9, r.getParagraph(10).getLvl()); assertEquals(9, r.getParagraph(11).getLvl()); assertEquals(9, r.getParagraph(12).getLvl()); assertEquals(0, r.getParagraph(9).getIlvl()); assertEquals(0, r.getParagraph(10).getIlvl()); assertEquals(0, r.getParagraph(11).getIlvl()); assertEquals(0, r.getParagraph(12).getIlvl()); // TODO Test for tick not bullet } public void testOrderedLists() { HWPFDocument doc = HWPFTestDataSamples.openSampleFile("Lists.doc"); Range r = doc.getRange(); assertEquals(40, r.numParagraphs()); assertEquals("Next up is an ordered list:\r", r.getParagraph(5).text()); assertEquals("Ordered list 1\r", r.getParagraph(6).text()); assertEquals("OL 2\r", r.getParagraph(7).text()); assertEquals("OL 3\r", r.getParagraph(8).text()); assertEquals("Now for an un-ordered list with a different bullet style:\r", r.getParagraph(9).text()); assertEquals(9, r.getParagraph(5).getLvl()); assertEquals(9, r.getParagraph(6).getLvl()); assertEquals(9, r.getParagraph(7).getLvl()); assertEquals(9, r.getParagraph(8).getLvl()); assertEquals(9, r.getParagraph(9).getLvl()); assertEquals(0, r.getParagraph(5).getIlvl()); assertEquals(0, r.getParagraph(6).getIlvl()); assertEquals(0, r.getParagraph(7).getIlvl()); assertEquals(0, r.getParagraph(8).getIlvl()); assertEquals(0, r.getParagraph(9).getIlvl()); } public void testMultiLevelLists() { HWPFDocument doc = HWPFTestDataSamples.openSampleFile("Lists.doc"); Range r = doc.getRange(); assertEquals(40, r.numParagraphs()); assertEquals("Multi-level un-ordered list:\r", r.getParagraph(12).text()); assertEquals("ML 1:1\r", r.getParagraph(13).text()); assertEquals("ML 1:2\r", r.getParagraph(14).text()); assertEquals("ML 2:1\r", r.getParagraph(15).text()); assertEquals("ML 2:2\r", r.getParagraph(16).text()); assertEquals("ML 2:3\r", r.getParagraph(17).text()); assertEquals("ML 3:1\r", r.getParagraph(18).text()); assertEquals("ML 4:1\r", r.getParagraph(19).text()); assertEquals("ML 5:1\r", r.getParagraph(20).text()); assertEquals("ML 5:2\r", r.getParagraph(21).text()); assertEquals("ML 2:4\r", r.getParagraph(22).text()); assertEquals("ML 1:3\r", r.getParagraph(23).text()); assertEquals("Multi-level ordered list:\r", r.getParagraph(24).text()); assertEquals("OL 1\r", r.getParagraph(25).text()); assertEquals("OL 2\r", r.getParagraph(26).text()); assertEquals("OL 2.1\r", r.getParagraph(27).text()); assertEquals("OL 2.2\r", r.getParagraph(28).text()); assertEquals("OL 2.2.1\r", r.getParagraph(29).text()); assertEquals("OL 2.2.2\r", r.getParagraph(30).text()); assertEquals("OL 2.2.2.1\r", r.getParagraph(31).text()); assertEquals("OL 2.2.3\r", r.getParagraph(32).text()); assertEquals("OL 3\r", r.getParagraph(33).text()); assertEquals("Finally we want some indents, to tell the difference\r", r.getParagraph(34).text()); for(int i=12; i<=34; i++) { assertEquals(9, r.getParagraph(12).getLvl()); } assertEquals(0, r.getParagraph(12).getIlvl()); assertEquals(0, r.getParagraph(13).getIlvl()); assertEquals(0, r.getParagraph(14).getIlvl()); assertEquals(1, r.getParagraph(15).getIlvl()); assertEquals(1, r.getParagraph(16).getIlvl()); assertEquals(1, r.getParagraph(17).getIlvl()); assertEquals(2, r.getParagraph(18).getIlvl()); assertEquals(3, r.getParagraph(19).getIlvl()); assertEquals(4, r.getParagraph(20).getIlvl()); assertEquals(4, r.getParagraph(21).getIlvl()); assertEquals(1, r.getParagraph(22).getIlvl()); assertEquals(0, r.getParagraph(23).getIlvl()); assertEquals(0, r.getParagraph(24).getIlvl()); assertEquals(0, r.getParagraph(25).getIlvl()); assertEquals(0, r.getParagraph(26).getIlvl()); assertEquals(1, r.getParagraph(27).getIlvl()); assertEquals(1, r.getParagraph(28).getIlvl()); assertEquals(2, r.getParagraph(29).getIlvl()); assertEquals(2, r.getParagraph(30).getIlvl()); assertEquals(3, r.getParagraph(31).getIlvl()); assertEquals(2, r.getParagraph(32).getIlvl()); assertEquals(0, r.getParagraph(33).getIlvl()); assertEquals(0, r.getParagraph(34).getIlvl()); } public void testIndentedText() { HWPFDocument doc = HWPFTestDataSamples.openSampleFile("Lists.doc"); Range r = doc.getRange(); assertEquals(40, r.numParagraphs()); assertEquals("Finally we want some indents, to tell the difference\r", r.getParagraph(34).text()); assertEquals("Indented once\r", r.getParagraph(35).text()); assertEquals("Indented twice\r", r.getParagraph(36).text()); assertEquals("Indented three times\r", r.getParagraph(37).text()); assertEquals("The end!\r", r.getParagraph(38).text()); assertEquals(9, r.getParagraph(34).getLvl()); assertEquals(9, r.getParagraph(35).getLvl()); assertEquals(9, r.getParagraph(36).getLvl()); assertEquals(9, r.getParagraph(37).getLvl()); assertEquals(9, r.getParagraph(38).getLvl()); assertEquals(9, r.getParagraph(39).getLvl()); assertEquals(0, r.getParagraph(34).getIlvl()); assertEquals(0, r.getParagraph(35).getIlvl()); assertEquals(0, r.getParagraph(36).getIlvl()); assertEquals(0, r.getParagraph(37).getIlvl()); assertEquals(0, r.getParagraph(38).getIlvl()); assertEquals(0, r.getParagraph(39).getIlvl()); // TODO Test the indent } public void testWriteRead() throws IOException { HWPFDocument doc = HWPFTestDataSamples.openSampleFile("Lists.doc"); doc = HWPFTestDataSamples.writeOutAndReadBack(doc); Range r = doc.getRange(); // Check a couple at random assertEquals(4, r.getParagraph(21).getIlvl()); assertEquals(1, r.getParagraph(22).getIlvl()); assertEquals(0, r.getParagraph(23).getIlvl()); } public void testSpecificNumberedOrderedListFeatures() throws IOException { HWPFDocument doc = HWPFTestDataSamples.openSampleFile("Lists.doc"); Range r = doc.getRange(); //these are in the numbered ordered list //26 = OL 2 //27 = OL 2.1 //28 = OL 2.2 //29 = OL 2.2.1 for (int i = 26; i < 30; i++) { Paragraph p = r.getParagraph(i); assertTrue(p.isInList()); HWPFList list = p.getList(); ListLevel level = list.getLVL((char) p.getIlvl()); assertFalse(level.isLegalNumbering()); assertEquals(-1, level.getRestart()); } Paragraph p = r.getParagraph(26); HWPFList list = p.getList(); ListLevel level = list.getLVL((char) p.getIlvl()); byte[] lvl = level.getLevelNumberingPlaceholderOffsets(); assertEquals((byte)1, lvl[0]); assertEquals((byte)0, lvl[1]); p = r.getParagraph(27); list = p.getList(); level = list.getLVL((char) p.getIlvl()); lvl = level.getLevelNumberingPlaceholderOffsets(); assertEquals((byte)1, lvl[0]); assertEquals((byte)3, lvl[1]); p = r.getParagraph(29); list = p.getList(); level = list.getLVL((char) p.getIlvl()); lvl = level.getLevelNumberingPlaceholderOffsets(); assertEquals((byte)1, lvl[0]); assertEquals((byte)3, lvl[1]); assertEquals((byte)5, lvl[2]); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.services; //JDK imports import java.awt.geom.Rectangle2D; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.w3c.dom.Text; import org.xml.sax.SAXException; //SIS imports import org.apache.sis.geometry.DirectPosition2D; import org.apache.sis.geometry.Envelope2D; import org.apache.sis.distance.DistanceUtils; import org.apache.sis.index.tree.GeoRSSData; import org.apache.sis.index.tree.QuadTree; import org.apache.sis.index.tree.QuadTreeData; import org.apache.sis.index.tree.QuadTreeReader; import org.apache.sis.index.tree.QuadTreeWriter; //ROME imports import com.sun.syndication.feed.WireFeed; import com.sun.syndication.feed.module.georss.GeoRSSModule; import com.sun.syndication.feed.rss.Channel; import com.sun.syndication.feed.rss.Item; import com.sun.syndication.io.WireFeedInput; import com.sun.syndication.io.XmlReader; /** * * A location web service that loads data from GeoRSS format (configured via a * provided config.xml file), and then loads up a {@link QuadTree} with this * information, making it queryable for callers. * */ public class LocationServlet extends HttpServlet { private static final long serialVersionUID = 731743219362175102L; private QuadTree tree; private ServletContext context; private String timeToLoad; private String qtreeIdxPath; private String georssStoragePath; /** * Read GeoRSS data (location information provide sis-location-config.xml ) * and build quad-tree. * * @param config Servlet configuration file * @exception ServletException General exception for servlet */ @SuppressWarnings("unchecked") public void init(ServletConfig config) throws ServletException { this.context = config.getServletContext(); long startTime = 0; long endTime = 0; int capacity = -1, depth = -1; this.qtreeIdxPath = this.context .getInitParameter("org.apache.sis.services.config.qIndexPath"); this.georssStoragePath = this.context .getInitParameter("org.apache.sis.services.config.geodataPath"); if (!this.qtreeIdxPath.endsWith("/")) { this.qtreeIdxPath += "/"; } if (!this.georssStoragePath.endsWith("/")) { this.georssStoragePath += "/"; } InputStream indexStream = null; try { indexStream = new FileInputStream(qtreeIdxPath + "node_0.txt"); } catch (FileNotFoundException e) { System.out.println("[INFO] Existing qtree index at: [" + qtreeIdxPath + "] not found. Creating new index."); } if (indexStream != null) { startTime = System.currentTimeMillis(); this.tree = QuadTreeReader.readFromFile(qtreeIdxPath, "tree_config.txt", "node_0.txt"); try { indexStream.close(); } catch (IOException e) { e.printStackTrace(); } endTime = System.currentTimeMillis(); this.timeToLoad = "Quad Tree fully loaded from index files in " + Double.toString((endTime - startTime) / 1000L) + " seconds"; System.out.println("[INFO] Finished loading tree from stored index"); } else { startTime = System.currentTimeMillis(); WireFeedInput wf = new WireFeedInput(true); // read quad tree properties set in config xml file InputStream configStream = null; try { configStream = new FileInputStream(this.context .getInitParameter("org.apache.sis.services.config.filePath")); } catch (Exception e) { e.printStackTrace(); } if (configStream != null) { DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance(); try { DocumentBuilder docBuilder = dbfac.newDocumentBuilder(); Document configDoc = docBuilder.parse(configStream); NodeList capacityNode = configDoc.getElementsByTagName("capacity"); if (capacityNode.item(0) != null) { capacity = Integer.parseInt(capacityNode.item(0).getFirstChild() .getNodeValue()); } NodeList depthNode = configDoc.getElementsByTagName("depth"); if (depthNode.item(0) != null) { depth = Integer.parseInt(depthNode.item(0).getFirstChild() .getNodeValue()); } this.tree = new QuadTree(capacity, depth); // TODO make this // configurable NodeList urlNodes = configDoc.getElementsByTagName("url"); for (int i = 0; i < urlNodes.getLength(); i++) { // read in georss and build tree String georssUrlStr = urlNodes.item(i).getFirstChild() .getNodeValue(); WireFeed feed = null; try { feed = wf.build(new XmlReader(new URL(georssUrlStr))); } catch (Exception e) { System.out.println("[ERROR] Error obtaining geodata url: [" + georssUrlStr + "]: Message: " + e.getMessage() + ": skipping and continuing"); continue; } Channel c = (Channel) feed; List<Item> items = (List<Item>) c.getItems(); for (Item item : items) { GeoRSSModule geoRSSModule = (GeoRSSModule) item .getModule(GeoRSSModule.GEORSS_GEORSS_URI); if (geoRSSModule == null) { geoRSSModule = (GeoRSSModule) item .getModule(GeoRSSModule.GEORSS_GML_URI); } if (geoRSSModule == null) { geoRSSModule = (GeoRSSModule) item .getModule(GeoRSSModule.GEORSS_W3CGEO_URI); } // if location from the item cannot be retrieved // then discard it if (geoRSSModule != null && geoRSSModule.getPosition() != null) { String filename = ""; if (item.getGuid() != null) { filename = cleanStr(item.getGuid().getValue()) + ".txt"; } else { filename = cleanStr(item.getLink()) + ".txt"; } GeoRSSData data = new GeoRSSData(filename, new DirectPosition2D( geoRSSModule.getPosition().getLongitude(), geoRSSModule.getPosition().getLatitude())); if (this.tree.insert(data)) { data.saveToFile(item, geoRSSModule, georssStoragePath); } else { System.out.println("[INFO] Unable to store data at location " + data.getLatLon().y + ", " + data.getLatLon().x + " under filename " + data.getFileName()); } } } } configStream.close(); endTime = System.currentTimeMillis(); this.timeToLoad = "Quad Tree fully loaded from retrieving GeoRSS files over the network in " + Double.toString((endTime - startTime) / 1000L) + " seconds"; QuadTreeWriter.writeTreeToFile(tree, qtreeIdxPath); } catch (ParserConfigurationException e) { e.printStackTrace(); } catch (SAXException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } } else { throw new ServletException( "Unable to read location service XML config: null!"); } } } /** * Provide GET requests for Bounding-box and Point-radius search queries. * Return search results to client in xml format. * * @param request Http Servlet Request * @param response Http Servlet Response * @exception ServletException General exception for servlet * @exception IOException General exception for I/O */ public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { long beforeTime = 0; long afterTime = 0; response.setContentType("text/xml"); PrintWriter out = response.getWriter(); String type = request.getParameter("type"); List<QuadTreeData> results = new ArrayList<QuadTreeData>(); List<String> regions = new ArrayList<String>(); if (type != null && type.equals("bbox")) { String llLat = request.getParameter("llLat"); String llLon = request.getParameter("llLon"); String urLat = request.getParameter("urLat"); String urLon = request.getParameter("urLon"); if (llLat != null && llLon != null && urLat != null && urLon != null) { try { Envelope2D bbox = new Envelope2D(new DirectPosition2D( Double.parseDouble(llLon), Double.parseDouble(llLat)), new DirectPosition2D(Double.parseDouble(urLon), Double.parseDouble(urLat))); beforeTime = System.currentTimeMillis(); results = tree.queryByBoundingBox(bbox); afterTime = System.currentTimeMillis(); // get the polygon that approximates the region Rectangle2D[] rects = bbox.toRectangles(); for (int i = 0; i < rects.length; i++) { final Rectangle2D r = rects[i]; String regionStr = (r.getMinY()) + "," + (r.getMinX()) + ","; regionStr += (r.getMaxY()) + "," + (r.getMinX()) + ","; regionStr += (r.getMaxY()) + "," + (r.getMaxX()) + ","; regionStr += (r.getMinY()) + "," + (r.getMaxX()) + ","; regionStr += (r.getMinY()) + "," + (r.getMinX()); regions.add(regionStr); } } catch (NumberFormatException ex) { System.out .println("[ERROR] Input parameters were not valid latitudes and longitudes"); } } } else if (type != null && type.equals("pointradius")) { String radius = request.getParameter("radius"); String lat = request.getParameter("lat"); String lon = request.getParameter("lon"); if (radius != null && lat != null && lon != null) { DirectPosition2D point = null; try { point = new DirectPosition2D(Double.parseDouble(lon), Double.parseDouble(lat)); } catch (NumberFormatException ex) { System.out .println("{ERROR] Input parameters were not valid latitudes and longitudes"); } double radiusKM = Double.parseDouble(radius); String regionStr = ""; for (int i = 0; i < 360; i += 10) { DirectPosition2D pt = DistanceUtils.getPointOnGreatCircle(point.y, point.x, radiusKM, i); regionStr += pt.y + "," + pt.x + ","; } DirectPosition2D pt = DistanceUtils.getPointOnGreatCircle(point.y, point.x, radiusKM, 0); regionStr += pt.y + "," + pt.x + ","; regions.add(regionStr.substring(0, regionStr.length() - 1)); beforeTime = System.currentTimeMillis(); results = tree.queryByPointRadius(point, radiusKM); afterTime = System.currentTimeMillis(); } } long timeSeconds = afterTime - beforeTime; // return matches from tree in xml format to client out.write(buildXML(results, regions, timeSeconds)); out.close(); } /** * Provide Post requests for build GeoRSS data html file. * * @param request Http Servlet Request * @param response Http Servlet Response * @exception ServletException General exception for servlet * @exception IOException General exception for I/O */ public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html"); PrintWriter out = response.getWriter(); String filename = request.getParameter("filename"); if (filename != null) { HashMap<String, String> map = GeoRSSData .loadFromFile(this.georssStoragePath + filename); String html = ""; if (map.get("title") != null && !map.get("title").equals("null")) { html += "<b>Title:&nbsp;</b>" + map.get("title") + "<br />"; } if (map.get("link") != null && !map.get("link").equals("null")) { html += "<b>Link:&nbsp;</b><a target='_blank' href='" + map.get("link") + "'" + ">" + map.get("link") + "</a><br />"; } if (map.get("author") != null && !map.get("author").equals("null")) { html += "<b>Author:&nbsp;</b>" + map.get("author") + "<br />"; } if (map.get("pubDate") != null && !map.get("pubDate").equals("null")) { html += "<b>Pub Date:&nbsp;</b>" + map.get("pubDate") + "<br />"; } if (map.get("description") != null && !map.get("description").equals("null")) { html += "<b>Description:&nbsp;</b>" + map.get("description") + "<br />"; } html += "<b>Lat:&nbsp;</b>" + map.get("lat") + "<br />"; html += "<b>Lon:&nbsp;</b>" + map.get("lon") + "<br />"; out.write(html); } out.close(); } /** * Builds the XML file to return to client. * * @param filterList list of QuadTreeData that are within the search region * @param regions the String coordinate representation of the search region * @param time the time it took to execute the query * @return XML string */ private String buildXML(final List<QuadTreeData> filterList, final List<String> regions, final long time) { DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance(); try { DocumentBuilder docBuilder = dbfac.newDocumentBuilder(); Document doc = docBuilder.newDocument(); Element root = doc.createElement("root"); doc.appendChild(root); for (QuadTreeData geo : filterList) { Element item = doc.createElement("item"); Element id = doc.createElement("id"); Text idText = doc.createTextNode(geo.getFileName()); id.appendChild(idText); item.appendChild(id); Element lat = doc.createElement("lat"); Text latText = doc.createTextNode(Double.toString(geo.getLatLon().y)); lat.appendChild(latText); item.appendChild(lat); Element lon = doc.createElement("lon"); Text lonText = doc.createTextNode(Double.toString(geo.getLatLon().x)); lon.appendChild(lonText); item.appendChild(lon); root.appendChild(item); } Element timeElem = doc.createElement("time"); Text timeText = doc.createTextNode(Long.toString(time)); timeElem.appendChild(timeText); root.appendChild(timeElem); if (timeToLoad != null) { Element indexLoadTimeElem = doc.createElement("indexLoadTime"); Text indexLoadTimeText = doc.createTextNode(timeToLoad); indexLoadTimeElem.appendChild(indexLoadTimeText); root.appendChild(indexLoadTimeElem); timeToLoad = null; // Only need to send this over to the client // on initial load } Element query = doc.createElement("query"); root.appendChild(query); for (String rStr : regions) { Element region = doc.createElement("region"); Text regionText = doc.createTextNode(rStr); region.appendChild(regionText); query.appendChild(region); } try { TransformerFactory transfac = TransformerFactory.newInstance(); Transformer trans = transfac.newTransformer(); trans.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); trans.setOutputProperty(OutputKeys.INDENT, "yes"); StringWriter sw = new StringWriter(); StreamResult result = new StreamResult(sw); DOMSource source = new DOMSource(doc); trans.transform(source, result); return sw.toString(); } catch (TransformerConfigurationException e) { e.printStackTrace(); } catch (TransformerException e) { e.printStackTrace(); } } catch (ParserConfigurationException e) { e.printStackTrace(); } return null; } /** * Replace all non-alphanumeric characters with empty strings. * * @return String without any non-alphanumeric characters */ private static String cleanStr(String id) { String cleanedID = id; return cleanedID.replaceAll("[^a-zA-Z0-9]", ""); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.protocol.system.gui; import java.awt.BorderLayout; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import javax.swing.BorderFactory; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.JCheckBox; import javax.swing.JPanel; import org.apache.commons.lang3.StringUtils; import org.apache.jmeter.config.Argument; import org.apache.jmeter.config.Arguments; import org.apache.jmeter.config.gui.ArgumentsPanel; import org.apache.jmeter.gui.util.FilePanelEntry; import org.apache.jmeter.gui.util.VerticalPanel; import org.apache.jmeter.protocol.system.SystemSampler; import org.apache.jmeter.samplers.gui.AbstractSamplerGui; import org.apache.jmeter.testelement.TestElement; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.gui.JLabeledTextField; import org.apache.jorphan.gui.ObjectTableModel; import org.apache.jorphan.reflect.Functor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * GUI for {@link SystemSampler} */ public class SystemSamplerGui extends AbstractSamplerGui implements ItemListener { private static final Logger log = LoggerFactory.getLogger(SystemSamplerGui.class); /** * */ private static final long serialVersionUID = -2413845772703695934L; private JCheckBox checkReturnCode; private JLabeledTextField desiredReturnCode; private final FilePanelEntry stdin = new FilePanelEntry(JMeterUtils.getResString("system_sampler_stdin")); // $NON-NLS-1$ private final FilePanelEntry stdout = new FilePanelEntry(JMeterUtils.getResString("system_sampler_stdout")); // $NON-NLS-1$ private final FilePanelEntry stderr = new FilePanelEntry(JMeterUtils.getResString("system_sampler_stderr")); // $NON-NLS-1$ private final FilePanelEntry directory = new FilePanelEntry(JMeterUtils.getResString("directory_field_title"), true); // $NON-NLS-1$ private final FilePanelEntry command = new FilePanelEntry(JMeterUtils.getResString("command_field_title")); // $NON-NLS-1$ private JLabeledTextField timeout; private ArgumentsPanel argsPanel; private ArgumentsPanel envPanel; /** * Constructor for JavaTestSamplerGui */ public SystemSamplerGui() { super(); init(); } @Override public String getLabelResource() { return "system_sampler_title"; // $NON-NLS-1$ } @Override public String getStaticLabel() { return JMeterUtils.getResString(getLabelResource()); } /** * Initialize the GUI components and layout. */ private void init() { // WARNING: called from ctor so must not be overridden (i.e. must be private or final) setLayout(new BorderLayout()); setBorder(makeBorder()); add(makeTitlePanel(), BorderLayout.NORTH); add(makeCommandPanel(), BorderLayout.CENTER); JPanel streamsCodePane = new JPanel(new BorderLayout()); streamsCodePane.add(makeStreamsPanel(), BorderLayout.NORTH); streamsCodePane.add(makeReturnCodePanel(), BorderLayout.CENTER); streamsCodePane.add(makeTimeoutPanel(), BorderLayout.SOUTH); add(streamsCodePane, BorderLayout.SOUTH); } /* Implements JMeterGuiComponent.createTestElement() */ @Override public TestElement createTestElement() { SystemSampler sampler = new SystemSampler(); modifyTestElement(sampler); return sampler; } @Override public void modifyTestElement(TestElement sampler) { super.configureTestElement(sampler); SystemSampler systemSampler = (SystemSampler)sampler; systemSampler.setCheckReturnCode(checkReturnCode.isSelected()); if(checkReturnCode.isSelected()) { if(!StringUtils.isEmpty(desiredReturnCode.getText())) { systemSampler.setExpectedReturnCode(Integer.parseInt(desiredReturnCode.getText())); } else { systemSampler.setExpectedReturnCode(SystemSampler.DEFAULT_RETURN_CODE); } } else { systemSampler.setExpectedReturnCode(SystemSampler.DEFAULT_RETURN_CODE); } systemSampler.setCommand(command.getFilename()); systemSampler.setArguments((Arguments)argsPanel.createTestElement()); systemSampler.setEnvironmentVariables((Arguments)envPanel.createTestElement()); systemSampler.setDirectory(directory.getFilename()); systemSampler.setStdin(stdin.getFilename()); systemSampler.setStdout(stdout.getFilename()); systemSampler.setStderr(stderr.getFilename()); if(!StringUtils.isEmpty(timeout.getText())) { try { systemSampler.setTimout(Long.parseLong(timeout.getText())); } catch (NumberFormatException e) { log.error("Error parsing timeout field value:"+timeout.getText(), e); } } } /* Overrides AbstractJMeterGuiComponent.configure(TestElement) */ @Override public void configure(TestElement el) { super.configure(el); SystemSampler systemSampler = (SystemSampler) el; checkReturnCode.setSelected(systemSampler.getCheckReturnCode()); desiredReturnCode.setText(Integer.toString(systemSampler.getExpectedReturnCode())); desiredReturnCode.setEnabled(checkReturnCode.isSelected()); command.setFilename(systemSampler.getCommand()); argsPanel.configure(systemSampler.getArguments()); envPanel.configure(systemSampler.getEnvironmentVariables()); directory.setFilename(systemSampler.getDirectory()); stdin.setFilename(systemSampler.getStdin()); stdout.setFilename(systemSampler.getStdout()); stderr.setFilename(systemSampler.getStderr()); timeout.setText(systemSampler.getTimeout() == 0L ? "": // $NON-NLS-1$ Long.toString(systemSampler.getTimeout())); // not sure if replace 0L to empty string is the good way. } /** * @return JPanel return code config */ private JPanel makeReturnCodePanel() { JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.X_AXIS)); panel.setBorder(BorderFactory.createTitledBorder( BorderFactory.createEtchedBorder(), JMeterUtils.getResString("return_code_config_box_title"))); // $NON-NLS-1$ checkReturnCode = new JCheckBox(JMeterUtils.getResString("check_return_code_title")); // $NON-NLS-1$ checkReturnCode.addItemListener(this); desiredReturnCode = new JLabeledTextField(JMeterUtils.getResString("expected_return_code_title")); // $NON-NLS-1$ desiredReturnCode.setSize(desiredReturnCode.getSize().height, 30); panel.add(checkReturnCode); panel.add(Box.createHorizontalStrut(5)); panel.add(desiredReturnCode); checkReturnCode.setSelected(true); return panel; } /** * @return JPanel timeout config */ private JPanel makeTimeoutPanel() { JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.X_AXIS)); panel.setBorder(BorderFactory.createTitledBorder( BorderFactory.createEtchedBorder(), JMeterUtils.getResString("timeout_config_box_title"))); // $NON-NLS-1$ timeout = new JLabeledTextField(JMeterUtils.getResString("timeout_title")); // $NON-NLS-1$ timeout.setSize(timeout.getSize().height, 30); panel.add(timeout); return panel; } /** * @return JPanel Command + directory */ private JPanel makeCommandPanel() { JPanel cmdPanel = new JPanel(); cmdPanel.setLayout(new BoxLayout(cmdPanel, BoxLayout.X_AXIS)); JPanel cmdWkDirPane = new JPanel(new BorderLayout()); cmdWkDirPane.add(command, BorderLayout.NORTH); cmdWkDirPane.add(directory, BorderLayout.SOUTH); cmdPanel.add(cmdWkDirPane); JPanel panel = new VerticalPanel(); panel.setBorder(BorderFactory.createTitledBorder( BorderFactory.createEtchedBorder(), JMeterUtils.getResString("command_config_box_title"))); // $NON-NLS-1$ panel.add(cmdPanel, BorderLayout.NORTH); panel.add(makeArgumentsPanel(), BorderLayout.CENTER); panel.add(makeEnvironmentPanel(), BorderLayout.SOUTH); return panel; } /** * @return JPanel Arguments Panel */ private JPanel makeArgumentsPanel() { argsPanel = new ArgumentsPanel(JMeterUtils.getResString("arguments_panel_title"), null, true, false , // $NON-NLS-1$ new ObjectTableModel(new String[] { ArgumentsPanel.COLUMN_RESOURCE_NAMES_1 }, Argument.class, new Functor[] { new Functor("getValue") }, // $NON-NLS-1$ new Functor[] { new Functor("setValue") }, // $NON-NLS-1$ new Class[] {String.class })); return argsPanel; } /** * @return JPanel Environment Panel */ private JPanel makeEnvironmentPanel() { envPanel = new ArgumentsPanel(JMeterUtils.getResString("environment_panel_title")); // $NON-NLS-1$ return envPanel; } /** * @return JPanel Streams Panel */ private JPanel makeStreamsPanel() { JPanel stdPane = new JPanel(new BorderLayout()); stdPane.setBorder(BorderFactory.createTitledBorder( BorderFactory.createEtchedBorder(), JMeterUtils.getResString("command_config_std_streams_title"))); // $NON-NLS-1$ stdPane.add(stdin, BorderLayout.NORTH); stdPane.add(stdout, BorderLayout.CENTER); stdPane.add(stderr, BorderLayout.SOUTH); return stdPane; } /** * @see org.apache.jmeter.gui.AbstractJMeterGuiComponent#clearGui() */ @Override public void clearGui() { super.clearGui(); directory.clearGui(); command.clearGui(); argsPanel.clearGui(); envPanel.clearGui(); desiredReturnCode.setText(""); // $NON-NLS-1$ checkReturnCode.setSelected(false); desiredReturnCode.setEnabled(false); stdin.clearGui(); stdout.clearGui(); stderr.clearGui(); timeout.setText(""); // $NON-NLS-1$ } @Override public void itemStateChanged(ItemEvent e) { if(e.getSource()==checkReturnCode) { desiredReturnCode.setEnabled(e.getStateChange() == ItemEvent.SELECTED); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.9.2) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.zeppelin.interpreter.thrift; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2018-6-21") public class AppOutputAppendEvent implements org.apache.thrift.TBase<AppOutputAppendEvent, AppOutputAppendEvent._Fields>, java.io.Serializable, Cloneable, Comparable<AppOutputAppendEvent> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AppOutputAppendEvent"); private static final org.apache.thrift.protocol.TField NOTE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("noteId", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField PARAGRAPH_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("paragraphId", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField APP_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("appId", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField INDEX_FIELD_DESC = new org.apache.thrift.protocol.TField("index", org.apache.thrift.protocol.TType.I32, (short)4); private static final org.apache.thrift.protocol.TField DATA_FIELD_DESC = new org.apache.thrift.protocol.TField("data", org.apache.thrift.protocol.TType.STRING, (short)5); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new AppOutputAppendEventStandardSchemeFactory()); schemes.put(TupleScheme.class, new AppOutputAppendEventTupleSchemeFactory()); } public String noteId; // required public String paragraphId; // required public String appId; // required public int index; // required public String data; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { NOTE_ID((short)1, "noteId"), PARAGRAPH_ID((short)2, "paragraphId"), APP_ID((short)3, "appId"), INDEX((short)4, "index"), DATA((short)5, "data"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // NOTE_ID return NOTE_ID; case 2: // PARAGRAPH_ID return PARAGRAPH_ID; case 3: // APP_ID return APP_ID; case 4: // INDEX return INDEX; case 5: // DATA return DATA; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __INDEX_ISSET_ID = 0; private byte __isset_bitfield = 0; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.NOTE_ID, new org.apache.thrift.meta_data.FieldMetaData("noteId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.PARAGRAPH_ID, new org.apache.thrift.meta_data.FieldMetaData("paragraphId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.APP_ID, new org.apache.thrift.meta_data.FieldMetaData("appId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.INDEX, new org.apache.thrift.meta_data.FieldMetaData("index", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.DATA, new org.apache.thrift.meta_data.FieldMetaData("data", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AppOutputAppendEvent.class, metaDataMap); } public AppOutputAppendEvent() { } public AppOutputAppendEvent( String noteId, String paragraphId, String appId, int index, String data) { this(); this.noteId = noteId; this.paragraphId = paragraphId; this.appId = appId; this.index = index; setIndexIsSet(true); this.data = data; } /** * Performs a deep copy on <i>other</i>. */ public AppOutputAppendEvent(AppOutputAppendEvent other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetNoteId()) { this.noteId = other.noteId; } if (other.isSetParagraphId()) { this.paragraphId = other.paragraphId; } if (other.isSetAppId()) { this.appId = other.appId; } this.index = other.index; if (other.isSetData()) { this.data = other.data; } } public AppOutputAppendEvent deepCopy() { return new AppOutputAppendEvent(this); } @Override public void clear() { this.noteId = null; this.paragraphId = null; this.appId = null; setIndexIsSet(false); this.index = 0; this.data = null; } public String getNoteId() { return this.noteId; } public AppOutputAppendEvent setNoteId(String noteId) { this.noteId = noteId; return this; } public void unsetNoteId() { this.noteId = null; } /** Returns true if field noteId is set (has been assigned a value) and false otherwise */ public boolean isSetNoteId() { return this.noteId != null; } public void setNoteIdIsSet(boolean value) { if (!value) { this.noteId = null; } } public String getParagraphId() { return this.paragraphId; } public AppOutputAppendEvent setParagraphId(String paragraphId) { this.paragraphId = paragraphId; return this; } public void unsetParagraphId() { this.paragraphId = null; } /** Returns true if field paragraphId is set (has been assigned a value) and false otherwise */ public boolean isSetParagraphId() { return this.paragraphId != null; } public void setParagraphIdIsSet(boolean value) { if (!value) { this.paragraphId = null; } } public String getAppId() { return this.appId; } public AppOutputAppendEvent setAppId(String appId) { this.appId = appId; return this; } public void unsetAppId() { this.appId = null; } /** Returns true if field appId is set (has been assigned a value) and false otherwise */ public boolean isSetAppId() { return this.appId != null; } public void setAppIdIsSet(boolean value) { if (!value) { this.appId = null; } } public int getIndex() { return this.index; } public AppOutputAppendEvent setIndex(int index) { this.index = index; setIndexIsSet(true); return this; } public void unsetIndex() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __INDEX_ISSET_ID); } /** Returns true if field index is set (has been assigned a value) and false otherwise */ public boolean isSetIndex() { return EncodingUtils.testBit(__isset_bitfield, __INDEX_ISSET_ID); } public void setIndexIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __INDEX_ISSET_ID, value); } public String getData() { return this.data; } public AppOutputAppendEvent setData(String data) { this.data = data; return this; } public void unsetData() { this.data = null; } /** Returns true if field data is set (has been assigned a value) and false otherwise */ public boolean isSetData() { return this.data != null; } public void setDataIsSet(boolean value) { if (!value) { this.data = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case NOTE_ID: if (value == null) { unsetNoteId(); } else { setNoteId((String)value); } break; case PARAGRAPH_ID: if (value == null) { unsetParagraphId(); } else { setParagraphId((String)value); } break; case APP_ID: if (value == null) { unsetAppId(); } else { setAppId((String)value); } break; case INDEX: if (value == null) { unsetIndex(); } else { setIndex((Integer)value); } break; case DATA: if (value == null) { unsetData(); } else { setData((String)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case NOTE_ID: return getNoteId(); case PARAGRAPH_ID: return getParagraphId(); case APP_ID: return getAppId(); case INDEX: return Integer.valueOf(getIndex()); case DATA: return getData(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case NOTE_ID: return isSetNoteId(); case PARAGRAPH_ID: return isSetParagraphId(); case APP_ID: return isSetAppId(); case INDEX: return isSetIndex(); case DATA: return isSetData(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof AppOutputAppendEvent) return this.equals((AppOutputAppendEvent)that); return false; } public boolean equals(AppOutputAppendEvent that) { if (that == null) return false; boolean this_present_noteId = true && this.isSetNoteId(); boolean that_present_noteId = true && that.isSetNoteId(); if (this_present_noteId || that_present_noteId) { if (!(this_present_noteId && that_present_noteId)) return false; if (!this.noteId.equals(that.noteId)) return false; } boolean this_present_paragraphId = true && this.isSetParagraphId(); boolean that_present_paragraphId = true && that.isSetParagraphId(); if (this_present_paragraphId || that_present_paragraphId) { if (!(this_present_paragraphId && that_present_paragraphId)) return false; if (!this.paragraphId.equals(that.paragraphId)) return false; } boolean this_present_appId = true && this.isSetAppId(); boolean that_present_appId = true && that.isSetAppId(); if (this_present_appId || that_present_appId) { if (!(this_present_appId && that_present_appId)) return false; if (!this.appId.equals(that.appId)) return false; } boolean this_present_index = true; boolean that_present_index = true; if (this_present_index || that_present_index) { if (!(this_present_index && that_present_index)) return false; if (this.index != that.index) return false; } boolean this_present_data = true && this.isSetData(); boolean that_present_data = true && that.isSetData(); if (this_present_data || that_present_data) { if (!(this_present_data && that_present_data)) return false; if (!this.data.equals(that.data)) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_noteId = true && (isSetNoteId()); list.add(present_noteId); if (present_noteId) list.add(noteId); boolean present_paragraphId = true && (isSetParagraphId()); list.add(present_paragraphId); if (present_paragraphId) list.add(paragraphId); boolean present_appId = true && (isSetAppId()); list.add(present_appId); if (present_appId) list.add(appId); boolean present_index = true; list.add(present_index); if (present_index) list.add(index); boolean present_data = true && (isSetData()); list.add(present_data); if (present_data) list.add(data); return list.hashCode(); } @Override public int compareTo(AppOutputAppendEvent other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetNoteId()).compareTo(other.isSetNoteId()); if (lastComparison != 0) { return lastComparison; } if (isSetNoteId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.noteId, other.noteId); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetParagraphId()).compareTo(other.isSetParagraphId()); if (lastComparison != 0) { return lastComparison; } if (isSetParagraphId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.paragraphId, other.paragraphId); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetAppId()).compareTo(other.isSetAppId()); if (lastComparison != 0) { return lastComparison; } if (isSetAppId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.appId, other.appId); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetIndex()).compareTo(other.isSetIndex()); if (lastComparison != 0) { return lastComparison; } if (isSetIndex()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.index, other.index); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetData()).compareTo(other.isSetData()); if (lastComparison != 0) { return lastComparison; } if (isSetData()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.data, other.data); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("AppOutputAppendEvent("); boolean first = true; sb.append("noteId:"); if (this.noteId == null) { sb.append("null"); } else { sb.append(this.noteId); } first = false; if (!first) sb.append(", "); sb.append("paragraphId:"); if (this.paragraphId == null) { sb.append("null"); } else { sb.append(this.paragraphId); } first = false; if (!first) sb.append(", "); sb.append("appId:"); if (this.appId == null) { sb.append("null"); } else { sb.append(this.appId); } first = false; if (!first) sb.append(", "); sb.append("index:"); sb.append(this.index); first = false; if (!first) sb.append(", "); sb.append("data:"); if (this.data == null) { sb.append("null"); } else { sb.append(this.data); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class AppOutputAppendEventStandardSchemeFactory implements SchemeFactory { public AppOutputAppendEventStandardScheme getScheme() { return new AppOutputAppendEventStandardScheme(); } } private static class AppOutputAppendEventStandardScheme extends StandardScheme<AppOutputAppendEvent> { public void read(org.apache.thrift.protocol.TProtocol iprot, AppOutputAppendEvent struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // NOTE_ID if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.noteId = iprot.readString(); struct.setNoteIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // PARAGRAPH_ID if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.paragraphId = iprot.readString(); struct.setParagraphIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // APP_ID if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.appId = iprot.readString(); struct.setAppIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // INDEX if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.index = iprot.readI32(); struct.setIndexIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // DATA if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.data = iprot.readString(); struct.setDataIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, AppOutputAppendEvent struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.noteId != null) { oprot.writeFieldBegin(NOTE_ID_FIELD_DESC); oprot.writeString(struct.noteId); oprot.writeFieldEnd(); } if (struct.paragraphId != null) { oprot.writeFieldBegin(PARAGRAPH_ID_FIELD_DESC); oprot.writeString(struct.paragraphId); oprot.writeFieldEnd(); } if (struct.appId != null) { oprot.writeFieldBegin(APP_ID_FIELD_DESC); oprot.writeString(struct.appId); oprot.writeFieldEnd(); } oprot.writeFieldBegin(INDEX_FIELD_DESC); oprot.writeI32(struct.index); oprot.writeFieldEnd(); if (struct.data != null) { oprot.writeFieldBegin(DATA_FIELD_DESC); oprot.writeString(struct.data); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class AppOutputAppendEventTupleSchemeFactory implements SchemeFactory { public AppOutputAppendEventTupleScheme getScheme() { return new AppOutputAppendEventTupleScheme(); } } private static class AppOutputAppendEventTupleScheme extends TupleScheme<AppOutputAppendEvent> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, AppOutputAppendEvent struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetNoteId()) { optionals.set(0); } if (struct.isSetParagraphId()) { optionals.set(1); } if (struct.isSetAppId()) { optionals.set(2); } if (struct.isSetIndex()) { optionals.set(3); } if (struct.isSetData()) { optionals.set(4); } oprot.writeBitSet(optionals, 5); if (struct.isSetNoteId()) { oprot.writeString(struct.noteId); } if (struct.isSetParagraphId()) { oprot.writeString(struct.paragraphId); } if (struct.isSetAppId()) { oprot.writeString(struct.appId); } if (struct.isSetIndex()) { oprot.writeI32(struct.index); } if (struct.isSetData()) { oprot.writeString(struct.data); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, AppOutputAppendEvent struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(5); if (incoming.get(0)) { struct.noteId = iprot.readString(); struct.setNoteIdIsSet(true); } if (incoming.get(1)) { struct.paragraphId = iprot.readString(); struct.setParagraphIdIsSet(true); } if (incoming.get(2)) { struct.appId = iprot.readString(); struct.setAppIdIsSet(true); } if (incoming.get(3)) { struct.index = iprot.readI32(); struct.setIndexIsSet(true); } if (incoming.get(4)) { struct.data = iprot.readString(); struct.setDataIsSet(true); } } } }
/* * Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0, * and the EPL 1.0 (http://h2database.com/html/license.html). * Initial Developer: H2 Group */ package org.h2.mvstore.type; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.lang.reflect.Array; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.UUID; import org.h2.mvstore.DataUtils; import org.h2.mvstore.WriteBuffer; import org.h2.util.New; /** * A data type implementation for the most common data types, including * serializable objects. */ public class ObjectDataType implements DataType { /** * The type constants are also used as tag values. */ static final int TYPE_NULL = 0; static final int TYPE_BOOLEAN = 1; static final int TYPE_BYTE = 2; static final int TYPE_SHORT = 3; static final int TYPE_INT = 4; static final int TYPE_LONG = 5; static final int TYPE_BIG_INTEGER = 6; static final int TYPE_FLOAT = 7; static final int TYPE_DOUBLE = 8; static final int TYPE_BIG_DECIMAL = 9; static final int TYPE_CHAR = 10; static final int TYPE_STRING = 11; static final int TYPE_UUID = 12; static final int TYPE_DATE = 13; static final int TYPE_ARRAY = 14; static final int TYPE_SERIALIZED_OBJECT = 19; /** * For very common values (e.g. 0 and 1) we save space by encoding the value * in the tag. e.g. TAG_BOOLEAN_TRUE and TAG_FLOAT_0. */ static final int TAG_BOOLEAN_TRUE = 32; static final int TAG_INTEGER_NEGATIVE = 33; static final int TAG_INTEGER_FIXED = 34; static final int TAG_LONG_NEGATIVE = 35; static final int TAG_LONG_FIXED = 36; static final int TAG_BIG_INTEGER_0 = 37; static final int TAG_BIG_INTEGER_1 = 38; static final int TAG_BIG_INTEGER_SMALL = 39; static final int TAG_FLOAT_0 = 40; static final int TAG_FLOAT_1 = 41; static final int TAG_FLOAT_FIXED = 42; static final int TAG_DOUBLE_0 = 43; static final int TAG_DOUBLE_1 = 44; static final int TAG_DOUBLE_FIXED = 45; static final int TAG_BIG_DECIMAL_0 = 46; static final int TAG_BIG_DECIMAL_1 = 47; static final int TAG_BIG_DECIMAL_SMALL = 48; static final int TAG_BIG_DECIMAL_SMALL_SCALED = 49; /** * For small-values/small-arrays, we encode the value/array-length in the * tag. */ static final int TAG_INTEGER_0_15 = 64; static final int TAG_LONG_0_7 = 80; static final int TAG_STRING_0_15 = 88; static final int TAG_BYTE_ARRAY_0_15 = 104; /** * Constants for floating point synchronization. */ static final int FLOAT_ZERO_BITS = Float.floatToIntBits(0.0f); static final int FLOAT_ONE_BITS = Float.floatToIntBits(1.0f); static final long DOUBLE_ZERO_BITS = Double.doubleToLongBits(0.0d); static final long DOUBLE_ONE_BITS = Double.doubleToLongBits(1.0d); static final Class<?>[] COMMON_CLASSES = { boolean.class, byte.class, short.class, char.class, int.class, long.class, float.class, double.class, Object.class, Boolean.class, Byte.class, Short.class, Character.class, Integer.class, Long.class, BigInteger.class, Float.class, Double.class, BigDecimal.class, String.class, UUID.class, Date.class }; private static final HashMap<Class<?>, Integer> COMMON_CLASSES_MAP = New .hashMap(); private AutoDetectDataType last = new StringType(this); @Override public int compare(Object a, Object b) { return last.compare(a, b); } @Override public int getMemory(Object obj) { return last.getMemory(obj); } @Override public void read(ByteBuffer buff, Object[] obj, int len, boolean key) { for (int i = 0; i < len; i++) { obj[i] = read(buff); } } @Override public void write(WriteBuffer buff, Object[] obj, int len, boolean key) { for (int i = 0; i < len; i++) { write(buff, obj[i]); } } @Override public void write(WriteBuffer buff, Object obj) { last.write(buff, obj); } private AutoDetectDataType newType(int typeId) { switch (typeId) { case TYPE_NULL: return new NullType(this); case TYPE_BOOLEAN: return new BooleanType(this); case TYPE_BYTE: return new ByteType(this); case TYPE_SHORT: return new ShortType(this); case TYPE_CHAR: return new CharacterType(this); case TYPE_INT: return new IntegerType(this); case TYPE_LONG: return new LongType(this); case TYPE_FLOAT: return new FloatType(this); case TYPE_DOUBLE: return new DoubleType(this); case TYPE_BIG_INTEGER: return new BigIntegerType(this); case TYPE_BIG_DECIMAL: return new BigDecimalType(this); case TYPE_STRING: return new StringType(this); case TYPE_UUID: return new UUIDType(this); case TYPE_DATE: return new DateType(this); case TYPE_ARRAY: return new ObjectArrayType(this); case TYPE_SERIALIZED_OBJECT: return new SerializedObjectType(this); } throw DataUtils.newIllegalStateException(DataUtils.ERROR_INTERNAL, "Unsupported type {0}", typeId); } @Override public Object read(ByteBuffer buff) { int tag = buff.get(); int typeId; if (tag <= TYPE_SERIALIZED_OBJECT) { typeId = tag; } else { switch (tag) { case TAG_BOOLEAN_TRUE: typeId = TYPE_BOOLEAN; break; case TAG_INTEGER_NEGATIVE: case TAG_INTEGER_FIXED: typeId = TYPE_INT; break; case TAG_LONG_NEGATIVE: case TAG_LONG_FIXED: typeId = TYPE_LONG; break; case TAG_BIG_INTEGER_0: case TAG_BIG_INTEGER_1: case TAG_BIG_INTEGER_SMALL: typeId = TYPE_BIG_INTEGER; break; case TAG_FLOAT_0: case TAG_FLOAT_1: case TAG_FLOAT_FIXED: typeId = TYPE_FLOAT; break; case TAG_DOUBLE_0: case TAG_DOUBLE_1: case TAG_DOUBLE_FIXED: typeId = TYPE_DOUBLE; break; case TAG_BIG_DECIMAL_0: case TAG_BIG_DECIMAL_1: case TAG_BIG_DECIMAL_SMALL: case TAG_BIG_DECIMAL_SMALL_SCALED: typeId = TYPE_BIG_DECIMAL; break; default: if (tag >= TAG_INTEGER_0_15 && tag <= TAG_INTEGER_0_15 + 15) { typeId = TYPE_INT; } else if (tag >= TAG_STRING_0_15 && tag <= TAG_STRING_0_15 + 15) { typeId = TYPE_STRING; } else if (tag >= TAG_LONG_0_7 && tag <= TAG_LONG_0_7 + 7) { typeId = TYPE_LONG; } else if (tag >= TAG_BYTE_ARRAY_0_15 && tag <= TAG_BYTE_ARRAY_0_15 + 15) { typeId = TYPE_ARRAY; } else { throw DataUtils.newIllegalStateException( DataUtils.ERROR_FILE_CORRUPT, "Unknown tag {0}", tag); } } } AutoDetectDataType t = last; if (typeId != t.typeId) { last = t = newType(typeId); } return t.read(buff, tag); } private static int getTypeId(Object obj) { if (obj instanceof Integer) { return TYPE_INT; } else if (obj instanceof String) { return TYPE_STRING; } else if (obj instanceof Long) { return TYPE_LONG; } else if (obj instanceof Double) { return TYPE_DOUBLE; } else if (obj instanceof Float) { return TYPE_FLOAT; } else if (obj instanceof Boolean) { return TYPE_BOOLEAN; } else if (obj instanceof UUID) { return TYPE_UUID; } else if (obj instanceof Byte) { return TYPE_BYTE; } else if (obj instanceof Short) { return TYPE_SHORT; } else if (obj instanceof Character) { return TYPE_CHAR; } else if (obj == null) { return TYPE_NULL; } else if (isDate(obj)) { return TYPE_DATE; } else if (isBigInteger(obj)) { return TYPE_BIG_INTEGER; } else if (isBigDecimal(obj)) { return TYPE_BIG_DECIMAL; } else if (obj.getClass().isArray()) { return TYPE_ARRAY; } return TYPE_SERIALIZED_OBJECT; } /** * Switch the last remembered type to match the type of the given object. * * @param obj the object * @return the auto-detected type used */ AutoDetectDataType switchType(Object obj) { int typeId = getTypeId(obj); AutoDetectDataType l = last; if (typeId != l.typeId) { last = l = newType(typeId); } return l; } /** * Check whether this object is a BigInteger. * * @param obj the object * @return true if yes */ static boolean isBigInteger(Object obj) { return obj instanceof BigInteger && obj.getClass() == BigInteger.class; } /** * Check whether this object is a BigDecimal. * * @param obj the object * @return true if yes */ static boolean isBigDecimal(Object obj) { return obj instanceof BigDecimal && obj.getClass() == BigDecimal.class; } /** * Check whether this object is a date. * * @param obj the object * @return true if yes */ static boolean isDate(Object obj) { return obj instanceof Date && obj.getClass() == Date.class; } /** * Check whether this object is an array. * * @param obj the object * @return true if yes */ static boolean isArray(Object obj) { return obj != null && obj.getClass().isArray(); } /** * Get the class id, or null if not found. * * @param clazz the class * @return the class id or null */ static Integer getCommonClassId(Class<?> clazz) { HashMap<Class<?>, Integer> map = COMMON_CLASSES_MAP; if (map.size() == 0) { // lazy initialization for (int i = 0, size = COMMON_CLASSES.length; i < size; i++) { COMMON_CLASSES_MAP.put(COMMON_CLASSES[i], i); } } return map.get(clazz); } /** * Serialize the object to a byte array. * * @param obj the object to serialize * @return the byte array */ public static byte[] serialize(Object obj) { try { ByteArrayOutputStream out = new ByteArrayOutputStream(); ObjectOutputStream os = new ObjectOutputStream(out); os.writeObject(obj); return out.toByteArray(); } catch (Throwable e) { throw DataUtils.newIllegalArgumentException( "Could not serialize {0}", obj, e); } } /** * De-serialize the byte array to an object. * * @param data the byte array * @return the object */ public static Object deserialize(byte[] data) { try { ByteArrayInputStream in = new ByteArrayInputStream(data); ObjectInputStream is = new ObjectInputStream(in); return is.readObject(); } catch (Throwable e) { throw DataUtils.newIllegalArgumentException( "Could not deserialize {0}", Arrays.toString(data), e); } } /** * Compare the contents of two byte arrays. If the content or length of the * first array is smaller than the second array, -1 is returned. If the * content or length of the second array is smaller than the first array, 1 * is returned. If the contents and lengths are the same, 0 is returned. * <p> * This method interprets bytes as unsigned. * * @param data1 the first byte array (must not be null) * @param data2 the second byte array (must not be null) * @return the result of the comparison (-1, 1 or 0) */ public static int compareNotNull(byte[] data1, byte[] data2) { if (data1 == data2) { return 0; } int len = Math.min(data1.length, data2.length); for (int i = 0; i < len; i++) { int b = data1[i] & 255; int b2 = data2[i] & 255; if (b != b2) { return b > b2 ? 1 : -1; } } return Integer.signum(data1.length - data2.length); } /** * The base class for auto-detect data types. */ abstract static class AutoDetectDataType implements DataType { protected final ObjectDataType base; protected final int typeId; AutoDetectDataType(ObjectDataType base, int typeId) { this.base = base; this.typeId = typeId; } @Override public int getMemory(Object o) { return getType(o).getMemory(o); } @Override public int compare(Object aObj, Object bObj) { AutoDetectDataType aType = getType(aObj); AutoDetectDataType bType = getType(bObj); int typeDiff = aType.typeId - bType.typeId; if (typeDiff == 0) { return aType.compare(aObj, bObj); } return Integer.signum(typeDiff); } @Override public void write(WriteBuffer buff, Object[] obj, int len, boolean key) { for (int i = 0; i < len; i++) { write(buff, obj[i]); } } @Override public void write(WriteBuffer buff, Object o) { getType(o).write(buff, o); } @Override public void read(ByteBuffer buff, Object[] obj, int len, boolean key) { for (int i = 0; i < len; i++) { obj[i] = read(buff); } } @Override public final Object read(ByteBuffer buff) { throw DataUtils.newIllegalStateException(DataUtils.ERROR_INTERNAL, "Internal error"); } /** * Get the type for the given object. * * @param o the object * @return the type */ AutoDetectDataType getType(Object o) { return base.switchType(o); } /** * Read an object from the buffer. * * @param buff the buffer * @param tag the first byte of the object (usually the type) * @return the read object */ abstract Object read(ByteBuffer buff, int tag); } /** * The type for the null value */ static class NullType extends AutoDetectDataType { NullType(ObjectDataType base) { super(base, TYPE_NULL); } @Override public int compare(Object aObj, Object bObj) { if (aObj == null && bObj == null) { return 0; } else if (aObj == null) { return -1; } else if (bObj == null) { return 1; } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return obj == null ? 0 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (obj != null) { super.write(buff, obj); return; } buff.put((byte) TYPE_NULL); } @Override public Object read(ByteBuffer buff, int tag) { return null; } } /** * The type for boolean true and false. */ static class BooleanType extends AutoDetectDataType { BooleanType(ObjectDataType base) { super(base, TYPE_BOOLEAN); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof Boolean && bObj instanceof Boolean) { Boolean a = (Boolean) aObj; Boolean b = (Boolean) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return obj instanceof Boolean ? 0 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof Boolean)) { super.write(buff, obj); return; } int tag = ((Boolean) obj) ? TAG_BOOLEAN_TRUE : TYPE_BOOLEAN; buff.put((byte) tag); } @Override public Object read(ByteBuffer buff, int tag) { return tag == TYPE_BOOLEAN ? Boolean.FALSE : Boolean.TRUE; } } /** * The type for byte objects. */ static class ByteType extends AutoDetectDataType { ByteType(ObjectDataType base) { super(base, TYPE_BYTE); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof Byte && bObj instanceof Byte) { Byte a = (Byte) aObj; Byte b = (Byte) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return obj instanceof Byte ? 0 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof Byte)) { super.write(buff, obj); return; } buff.put((byte) TYPE_BYTE); buff.put(((Byte) obj).byteValue()); } @Override public Object read(ByteBuffer buff, int tag) { return Byte.valueOf(buff.get()); } } /** * The type for character objects. */ static class CharacterType extends AutoDetectDataType { CharacterType(ObjectDataType base) { super(base, TYPE_CHAR); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof Character && bObj instanceof Character) { Character a = (Character) aObj; Character b = (Character) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return obj instanceof Character ? 24 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof Character)) { super.write(buff, obj); return; } buff.put((byte) TYPE_CHAR); buff.putChar(((Character) obj).charValue()); } @Override public Object read(ByteBuffer buff, int tag) { return Character.valueOf(buff.getChar()); } } /** * The type for short objects. */ static class ShortType extends AutoDetectDataType { ShortType(ObjectDataType base) { super(base, TYPE_SHORT); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof Short && bObj instanceof Short) { Short a = (Short) aObj; Short b = (Short) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return obj instanceof Short ? 24 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof Short)) { super.write(buff, obj); return; } buff.put((byte) TYPE_SHORT); buff.putShort(((Short) obj).shortValue()); } @Override public Object read(ByteBuffer buff, int tag) { return Short.valueOf(buff.getShort()); } } /** * The type for integer objects. */ static class IntegerType extends AutoDetectDataType { IntegerType(ObjectDataType base) { super(base, TYPE_INT); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof Integer && bObj instanceof Integer) { Integer a = (Integer) aObj; Integer b = (Integer) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return obj instanceof Integer ? 24 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof Integer)) { super.write(buff, obj); return; } int x = (Integer) obj; if (x < 0) { // -Integer.MIN_VALUE is smaller than 0 if (-x < 0 || -x > DataUtils.COMPRESSED_VAR_INT_MAX) { buff.put((byte) TAG_INTEGER_FIXED).putInt(x); } else { buff.put((byte) TAG_INTEGER_NEGATIVE).putVarInt(-x); } } else if (x <= 15) { buff.put((byte) (TAG_INTEGER_0_15 + x)); } else if (x <= DataUtils.COMPRESSED_VAR_INT_MAX) { buff.put((byte) TYPE_INT).putVarInt(x); } else { buff.put((byte) TAG_INTEGER_FIXED).putInt(x); } } @Override public Object read(ByteBuffer buff, int tag) { switch (tag) { case TYPE_INT: return DataUtils.readVarInt(buff); case TAG_INTEGER_NEGATIVE: return -DataUtils.readVarInt(buff); case TAG_INTEGER_FIXED: return buff.getInt(); } return tag - TAG_INTEGER_0_15; } } /** * The type for long objects. */ static class LongType extends AutoDetectDataType { LongType(ObjectDataType base) { super(base, TYPE_LONG); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof Long && bObj instanceof Long) { Long a = (Long) aObj; Long b = (Long) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return obj instanceof Long ? 30 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof Long)) { super.write(buff, obj); return; } long x = (Long) obj; if (x < 0) { // -Long.MIN_VALUE is smaller than 0 if (-x < 0 || -x > DataUtils.COMPRESSED_VAR_LONG_MAX) { buff.put((byte) TAG_LONG_FIXED); buff.putLong(x); } else { buff.put((byte) TAG_LONG_NEGATIVE); buff.putVarLong(-x); } } else if (x <= 7) { buff.put((byte) (TAG_LONG_0_7 + x)); } else if (x <= DataUtils.COMPRESSED_VAR_LONG_MAX) { buff.put((byte) TYPE_LONG); buff.putVarLong(x); } else { buff.put((byte) TAG_LONG_FIXED); buff.putLong(x); } } @Override public Object read(ByteBuffer buff, int tag) { switch (tag) { case TYPE_LONG: return DataUtils.readVarLong(buff); case TAG_LONG_NEGATIVE: return -DataUtils.readVarLong(buff); case TAG_LONG_FIXED: return buff.getLong(); } return Long.valueOf(tag - TAG_LONG_0_7); } } /** * The type for float objects. */ static class FloatType extends AutoDetectDataType { FloatType(ObjectDataType base) { super(base, TYPE_FLOAT); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof Float && bObj instanceof Float) { Float a = (Float) aObj; Float b = (Float) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return obj instanceof Float ? 24 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof Float)) { super.write(buff, obj); return; } float x = (Float) obj; int f = Float.floatToIntBits(x); if (f == ObjectDataType.FLOAT_ZERO_BITS) { buff.put((byte) TAG_FLOAT_0); } else if (f == ObjectDataType.FLOAT_ONE_BITS) { buff.put((byte) TAG_FLOAT_1); } else { int value = Integer.reverse(f); if (value >= 0 && value <= DataUtils.COMPRESSED_VAR_INT_MAX) { buff.put((byte) TYPE_FLOAT).putVarInt(value); } else { buff.put((byte) TAG_FLOAT_FIXED).putFloat(x); } } } @Override public Object read(ByteBuffer buff, int tag) { switch (tag) { case TAG_FLOAT_0: return 0f; case TAG_FLOAT_1: return 1f; case TAG_FLOAT_FIXED: return buff.getFloat(); } return Float.intBitsToFloat(Integer.reverse(DataUtils .readVarInt(buff))); } } /** * The type for double objects. */ static class DoubleType extends AutoDetectDataType { DoubleType(ObjectDataType base) { super(base, TYPE_DOUBLE); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof Double && bObj instanceof Double) { Double a = (Double) aObj; Double b = (Double) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return obj instanceof Double ? 30 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof Double)) { super.write(buff, obj); return; } double x = (Double) obj; long d = Double.doubleToLongBits(x); if (d == ObjectDataType.DOUBLE_ZERO_BITS) { buff.put((byte) TAG_DOUBLE_0); } else if (d == ObjectDataType.DOUBLE_ONE_BITS) { buff.put((byte) TAG_DOUBLE_1); } else { long value = Long.reverse(d); if (value >= 0 && value <= DataUtils.COMPRESSED_VAR_LONG_MAX) { buff.put((byte) TYPE_DOUBLE); buff.putVarLong(value); } else { buff.put((byte) TAG_DOUBLE_FIXED); buff.putDouble(x); } } } @Override public Object read(ByteBuffer buff, int tag) { switch (tag) { case TAG_DOUBLE_0: return 0d; case TAG_DOUBLE_1: return 1d; case TAG_DOUBLE_FIXED: return buff.getDouble(); } return Double.longBitsToDouble(Long.reverse(DataUtils .readVarLong(buff))); } } /** * The type for BigInteger objects. */ static class BigIntegerType extends AutoDetectDataType { BigIntegerType(ObjectDataType base) { super(base, TYPE_BIG_INTEGER); } @Override public int compare(Object aObj, Object bObj) { if (isBigInteger(aObj) && isBigInteger(bObj)) { BigInteger a = (BigInteger) aObj; BigInteger b = (BigInteger) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return isBigInteger(obj) ? 100 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!isBigInteger(obj)) { super.write(buff, obj); return; } BigInteger x = (BigInteger) obj; if (BigInteger.ZERO.equals(x)) { buff.put((byte) TAG_BIG_INTEGER_0); } else if (BigInteger.ONE.equals(x)) { buff.put((byte) TAG_BIG_INTEGER_1); } else { int bits = x.bitLength(); if (bits <= 63) { buff.put((byte) TAG_BIG_INTEGER_SMALL).putVarLong( x.longValue()); } else { byte[] bytes = x.toByteArray(); buff.put((byte) TYPE_BIG_INTEGER).putVarInt(bytes.length) .put(bytes); } } } @Override public Object read(ByteBuffer buff, int tag) { switch (tag) { case TAG_BIG_INTEGER_0: return BigInteger.ZERO; case TAG_BIG_INTEGER_1: return BigInteger.ONE; case TAG_BIG_INTEGER_SMALL: return BigInteger.valueOf(DataUtils.readVarLong(buff)); } int len = DataUtils.readVarInt(buff); byte[] bytes = DataUtils.newBytes(len); buff.get(bytes); return new BigInteger(bytes); } } /** * The type for BigDecimal objects. */ static class BigDecimalType extends AutoDetectDataType { BigDecimalType(ObjectDataType base) { super(base, TYPE_BIG_DECIMAL); } @Override public int compare(Object aObj, Object bObj) { if (isBigDecimal(aObj) && isBigDecimal(bObj)) { BigDecimal a = (BigDecimal) aObj; BigDecimal b = (BigDecimal) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public int getMemory(Object obj) { return isBigDecimal(obj) ? 150 : super.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { if (!isBigDecimal(obj)) { super.write(buff, obj); return; } BigDecimal x = (BigDecimal) obj; if (BigDecimal.ZERO.equals(x)) { buff.put((byte) TAG_BIG_DECIMAL_0); } else if (BigDecimal.ONE.equals(x)) { buff.put((byte) TAG_BIG_DECIMAL_1); } else { int scale = x.scale(); BigInteger b = x.unscaledValue(); int bits = b.bitLength(); if (bits < 64) { if (scale == 0) { buff.put((byte) TAG_BIG_DECIMAL_SMALL); } else { buff.put((byte) TAG_BIG_DECIMAL_SMALL_SCALED) .putVarInt(scale); } buff.putVarLong(b.longValue()); } else { byte[] bytes = b.toByteArray(); buff.put((byte) TYPE_BIG_DECIMAL).putVarInt(scale) .putVarInt(bytes.length).put(bytes); } } } @Override public Object read(ByteBuffer buff, int tag) { switch (tag) { case TAG_BIG_DECIMAL_0: return BigDecimal.ZERO; case TAG_BIG_DECIMAL_1: return BigDecimal.ONE; case TAG_BIG_DECIMAL_SMALL: return BigDecimal.valueOf(DataUtils.readVarLong(buff)); case TAG_BIG_DECIMAL_SMALL_SCALED: int scale = DataUtils.readVarInt(buff); return BigDecimal.valueOf(DataUtils.readVarLong(buff), scale); } int scale = DataUtils.readVarInt(buff); int len = DataUtils.readVarInt(buff); byte[] bytes = DataUtils.newBytes(len); buff.get(bytes); BigInteger b = new BigInteger(bytes); return new BigDecimal(b, scale); } } /** * The type for string objects. */ static class StringType extends AutoDetectDataType { StringType(ObjectDataType base) { super(base, TYPE_STRING); } @Override public int getMemory(Object obj) { if (!(obj instanceof String)) { return super.getMemory(obj); } return 24 + 2 * obj.toString().length(); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof String && bObj instanceof String) { return aObj.toString().compareTo(bObj.toString()); } return super.compare(aObj, bObj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof String)) { super.write(buff, obj); return; } String s = (String) obj; int len = s.length(); if (len <= 15) { buff.put((byte) (TAG_STRING_0_15 + len)); } else { buff.put((byte) TYPE_STRING).putVarInt(len); } buff.putStringData(s, len); } @Override public Object read(ByteBuffer buff, int tag) { int len; if (tag == TYPE_STRING) { len = DataUtils.readVarInt(buff); } else { len = tag - TAG_STRING_0_15; } return DataUtils.readString(buff, len); } } /** * The type for UUID objects. */ static class UUIDType extends AutoDetectDataType { UUIDType(ObjectDataType base) { super(base, TYPE_UUID); } @Override public int getMemory(Object obj) { return obj instanceof UUID ? 40 : super.getMemory(obj); } @Override public int compare(Object aObj, Object bObj) { if (aObj instanceof UUID && bObj instanceof UUID) { UUID a = (UUID) aObj; UUID b = (UUID) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public void write(WriteBuffer buff, Object obj) { if (!(obj instanceof UUID)) { super.write(buff, obj); return; } buff.put((byte) TYPE_UUID); UUID a = (UUID) obj; buff.putLong(a.getMostSignificantBits()); buff.putLong(a.getLeastSignificantBits()); } @Override public Object read(ByteBuffer buff, int tag) { long a = buff.getLong(), b = buff.getLong(); return new UUID(a, b); } } /** * The type for java.util.Date objects. */ static class DateType extends AutoDetectDataType { DateType(ObjectDataType base) { super(base, TYPE_DATE); } @Override public int getMemory(Object obj) { return isDate(obj) ? 40 : super.getMemory(obj); } @Override public int compare(Object aObj, Object bObj) { if (isDate(aObj) && isDate(bObj)) { Date a = (Date) aObj; Date b = (Date) bObj; return a.compareTo(b); } return super.compare(aObj, bObj); } @Override public void write(WriteBuffer buff, Object obj) { if (!isDate(obj)) { super.write(buff, obj); return; } buff.put((byte) TYPE_DATE); Date a = (Date) obj; buff.putLong(a.getTime()); } @Override public Object read(ByteBuffer buff, int tag) { long a = buff.getLong(); return new Date(a); } } /** * The type for object arrays. */ static class ObjectArrayType extends AutoDetectDataType { private final ObjectDataType elementType = new ObjectDataType(); ObjectArrayType(ObjectDataType base) { super(base, TYPE_ARRAY); } @Override public int getMemory(Object obj) { if (!isArray(obj)) { return super.getMemory(obj); } int size = 64; Class<?> type = obj.getClass().getComponentType(); if (type.isPrimitive()) { int len = Array.getLength(obj); if (type == boolean.class) { size += len; } else if (type == byte.class) { size += len; } else if (type == char.class) { size += len * 2; } else if (type == short.class) { size += len * 2; } else if (type == int.class) { size += len * 4; } else if (type == float.class) { size += len * 4; } else if (type == double.class) { size += len * 8; } else if (type == long.class) { size += len * 8; } } else { for (Object x : (Object[]) obj) { if (x != null) { size += elementType.getMemory(x); } } } // we say they are larger, because these objects // use quite a lot of disk space return size * 2; } @Override public int compare(Object aObj, Object bObj) { if (!isArray(aObj) || !isArray(bObj)) { return super.compare(aObj, bObj); } if (aObj == bObj) { return 0; } Class<?> type = aObj.getClass().getComponentType(); Class<?> bType = bObj.getClass().getComponentType(); if (type != bType) { Integer classA = getCommonClassId(type); Integer classB = getCommonClassId(bType); if (classA != null) { if (classB != null) { return classA.compareTo(classB); } return -1; } else if (classB != null) { return 1; } return type.getName().compareTo(bType.getName()); } int aLen = Array.getLength(aObj); int bLen = Array.getLength(bObj); int len = Math.min(aLen, bLen); if (type.isPrimitive()) { if (type == byte.class) { byte[] a = (byte[]) aObj; byte[] b = (byte[]) bObj; return compareNotNull(a, b); } for (int i = 0; i < len; i++) { int x; if (type == boolean.class) { x = Integer.signum((((boolean[]) aObj)[i] ? 1 : 0) - (((boolean[]) bObj)[i] ? 1 : 0)); } else if (type == char.class) { x = Integer.signum((((char[]) aObj)[i]) - (((char[]) bObj)[i])); } else if (type == short.class) { x = Integer.signum((((short[]) aObj)[i]) - (((short[]) bObj)[i])); } else if (type == int.class) { int a = ((int[]) aObj)[i]; int b = ((int[]) bObj)[i]; x = a == b ? 0 : a < b ? -1 : 1; } else if (type == float.class) { x = Float.compare(((float[]) aObj)[i], ((float[]) bObj)[i]); } else if (type == double.class) { x = Double.compare(((double[]) aObj)[i], ((double[]) bObj)[i]); } else { long a = ((long[]) aObj)[i]; long b = ((long[]) bObj)[i]; x = a == b ? 0 : a < b ? -1 : 1; } if (x != 0) { return x; } } } else { Object[] a = (Object[]) aObj; Object[] b = (Object[]) bObj; for (int i = 0; i < len; i++) { int comp = elementType.compare(a[i], b[i]); if (comp != 0) { return comp; } } } return aLen == bLen ? 0 : aLen < bLen ? -1 : 1; } @Override public void write(WriteBuffer buff, Object obj) { if (!isArray(obj)) { super.write(buff, obj); return; } Class<?> type = obj.getClass().getComponentType(); Integer classId = getCommonClassId(type); if (classId != null) { if (type.isPrimitive()) { if (type == byte.class) { byte[] data = (byte[]) obj; int len = data.length; if (len <= 15) { buff.put((byte) (TAG_BYTE_ARRAY_0_15 + len)); } else { buff.put((byte) TYPE_ARRAY) .put((byte) classId.intValue()) .putVarInt(len); } buff.put(data); return; } int len = Array.getLength(obj); buff.put((byte) TYPE_ARRAY).put((byte) classId.intValue()) .putVarInt(len); for (int i = 0; i < len; i++) { if (type == boolean.class) { buff.put((byte) (((boolean[]) obj)[i] ? 1 : 0)); } else if (type == char.class) { buff.putChar(((char[]) obj)[i]); } else if (type == short.class) { buff.putShort(((short[]) obj)[i]); } else if (type == int.class) { buff.putInt(((int[]) obj)[i]); } else if (type == float.class) { buff.putFloat(((float[]) obj)[i]); } else if (type == double.class) { buff.putDouble(((double[]) obj)[i]); } else { buff.putLong(((long[]) obj)[i]); } } return; } buff.put((byte) TYPE_ARRAY).put((byte) classId.intValue()); } else { buff.put((byte) TYPE_ARRAY).put((byte) -1); String c = type.getName(); StringDataType.INSTANCE.write(buff, c); } Object[] array = (Object[]) obj; int len = array.length; buff.putVarInt(len); for (Object x : array) { elementType.write(buff, x); } } @Override public Object read(ByteBuffer buff, int tag) { if (tag != TYPE_ARRAY) { byte[] data; int len = tag - TAG_BYTE_ARRAY_0_15; data = DataUtils.newBytes(len); buff.get(data); return data; } int ct = buff.get(); Class<?> clazz; Object obj; if (ct == -1) { String componentType = StringDataType.INSTANCE.read(buff); try { clazz = Class.forName(componentType); } catch (Exception e) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_SERIALIZATION, "Could not get class {0}", componentType, e); } } else { clazz = COMMON_CLASSES[ct]; } int len = DataUtils.readVarInt(buff); try { obj = Array.newInstance(clazz, len); } catch (Exception e) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_SERIALIZATION, "Could not create array of type {0} length {1}", clazz, len, e); } if (clazz.isPrimitive()) { for (int i = 0; i < len; i++) { if (clazz == boolean.class) { ((boolean[]) obj)[i] = buff.get() == 1; } else if (clazz == byte.class) { ((byte[]) obj)[i] = buff.get(); } else if (clazz == char.class) { ((char[]) obj)[i] = buff.getChar(); } else if (clazz == short.class) { ((short[]) obj)[i] = buff.getShort(); } else if (clazz == int.class) { ((int[]) obj)[i] = buff.getInt(); } else if (clazz == float.class) { ((float[]) obj)[i] = buff.getFloat(); } else if (clazz == double.class) { ((double[]) obj)[i] = buff.getDouble(); } else { ((long[]) obj)[i] = buff.getLong(); } } } else { Object[] array = (Object[]) obj; for (int i = 0; i < len; i++) { array[i] = elementType.read(buff); } } return obj; } } /** * The type for serialized objects. */ static class SerializedObjectType extends AutoDetectDataType { private int averageSize = 10000; SerializedObjectType(ObjectDataType base) { super(base, TYPE_SERIALIZED_OBJECT); } @SuppressWarnings("unchecked") @Override public int compare(Object aObj, Object bObj) { if (aObj == bObj) { return 0; } DataType ta = getType(aObj); DataType tb = getType(bObj); if (ta != this || tb != this) { if (ta == tb) { return ta.compare(aObj, bObj); } return super.compare(aObj, bObj); } // TODO ensure comparable type (both may be comparable but not // with each other) if (aObj instanceof Comparable) { if (aObj.getClass().isAssignableFrom(bObj.getClass())) { return ((Comparable<Object>) aObj).compareTo(bObj); } } if (bObj instanceof Comparable) { if (bObj.getClass().isAssignableFrom(aObj.getClass())) { return -((Comparable<Object>) bObj).compareTo(aObj); } } byte[] a = serialize(aObj); byte[] b = serialize(bObj); return compareNotNull(a, b); } @Override public int getMemory(Object obj) { DataType t = getType(obj); if (t == this) { return averageSize; } return t.getMemory(obj); } @Override public void write(WriteBuffer buff, Object obj) { DataType t = getType(obj); if (t != this) { t.write(buff, obj); return; } byte[] data = serialize(obj); // we say they are larger, because these objects // use quite a lot of disk space int size = data.length * 2; // adjust the average size // using an exponential moving average averageSize = (size + 15 * averageSize) / 16; buff.put((byte) TYPE_SERIALIZED_OBJECT).putVarInt(data.length) .put(data); } @Override public Object read(ByteBuffer buff, int tag) { int len = DataUtils.readVarInt(buff); byte[] data = DataUtils.newBytes(len); buff.get(data); return deserialize(data); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.pig.piggybank.storage.avro; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.avro.Schema; import org.apache.pig.ResourceSchema; import org.apache.pig.ResourceSchema.ResourceFieldSchema; import org.apache.pig.data.DataType; import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema; /** * This class converts Avro schema to Pig schema */ public class AvroSchema2Pig { public static String RECORD = "RECORD"; public static String FIELD = "FIELD"; public static String ARRAY_FIELD = "ARRAY_ELEM"; public static String MAP_VALUE_FIELD = "m_value"; /** * Wrap a pig type to a field schema */ public static ResourceFieldSchema getPigSchema(byte pigType, String fieldName) { return new ResourceFieldSchema(new FieldSchema(fieldName, pigType)); } /** * Convert an Avro schema to a Pig schema */ public static ResourceSchema convert(Schema schema) throws IOException { if (AvroStorageUtils.containsGenericUnion(schema)) throw new IOException("We don't accept schema containing generic unions."); Set<Schema> visitedRecords = new HashSet<Schema>(); ResourceFieldSchema inSchema = inconvert(schema, FIELD, visitedRecords); ResourceSchema tupleSchema; if (inSchema.getType() == DataType.TUPLE) { tupleSchema = inSchema.getSchema(); } else { // other typs ResourceFieldSchema tupleWrapper = AvroStorageUtils.wrapAsTuple(inSchema); ResourceSchema topSchema = new ResourceSchema(); topSchema.setFields(new ResourceFieldSchema[] { tupleWrapper }); tupleSchema = topSchema; } return tupleSchema; } /** * Convert a schema with field name to a pig schema */ private static ResourceFieldSchema inconvert(Schema in, String fieldName, Set<Schema> visitedRecords) throws IOException { AvroStorageLog.details("InConvert avro schema with field name " + fieldName); Schema.Type avroType = in.getType(); ResourceFieldSchema fieldSchema = new ResourceFieldSchema(); fieldSchema.setName(fieldName); if (avroType.equals(Schema.Type.RECORD)) { AvroStorageLog.details("convert to a pig tuple"); if (visitedRecords.contains(in)) { fieldSchema.setType(DataType.BYTEARRAY); } else { visitedRecords.add(in); fieldSchema.setType(DataType.TUPLE); ResourceSchema tupleSchema = new ResourceSchema(); List<Schema.Field> fields = in.getFields(); ResourceFieldSchema[] childFields = new ResourceFieldSchema[fields.size()]; int index = 0; for (Schema.Field field : fields) { childFields[index++] = inconvert(field.schema(), field.name(), visitedRecords); } tupleSchema.setFields(childFields); fieldSchema.setSchema(tupleSchema); visitedRecords.remove(in); } } else if (avroType.equals(Schema.Type.ARRAY)) { AvroStorageLog.details("convert array to a pig bag"); fieldSchema.setType(DataType.BAG); Schema elemSchema = in.getElementType(); ResourceFieldSchema subFieldSchema = inconvert(elemSchema, ARRAY_FIELD, visitedRecords); add2BagSchema(fieldSchema, subFieldSchema); } else if (avroType.equals(Schema.Type.MAP)) { AvroStorageLog.details("convert map to a pig map"); fieldSchema.setType(DataType.MAP); ResourceFieldSchema valueSchema = inconvert(in.getValueType(), in.getName(), visitedRecords); if (valueSchema.getSchema() != null) { fieldSchema.setSchema(valueSchema.getSchema()); } else { FieldSchema fs = new FieldSchema(in.getName(), valueSchema.getType()); org.apache.pig.impl.logicalLayer.schema.Schema ss = new org.apache.pig.impl.logicalLayer.schema.Schema(fs); ResourceSchema elemSchema = new ResourceSchema(ss); fieldSchema.setSchema(elemSchema); } } else if (avroType.equals(Schema.Type.UNION)) { if (AvroStorageUtils.isAcceptableUnion(in)) { Schema acceptSchema = AvroStorageUtils.getAcceptedType(in); ResourceFieldSchema realFieldSchema = inconvert(acceptSchema, null, visitedRecords); fieldSchema.setType(realFieldSchema.getType()); fieldSchema.setSchema(realFieldSchema.getSchema()); } else throw new IOException("Do not support generic union:" + in); } else if (avroType.equals(Schema.Type.FIXED)) { fieldSchema.setType(DataType.BYTEARRAY); } else if (avroType.equals(Schema.Type.BOOLEAN)) { fieldSchema.setType(DataType.BOOLEAN); } else if (avroType.equals(Schema.Type.BYTES)) { fieldSchema.setType(DataType.BYTEARRAY); } else if (avroType.equals(Schema.Type.DOUBLE)) { fieldSchema.setType(DataType.DOUBLE); } else if (avroType.equals(Schema.Type.ENUM)) { fieldSchema.setType(DataType.CHARARRAY); } else if (avroType.equals(Schema.Type.FLOAT)) { fieldSchema.setType(DataType.FLOAT); } else if (avroType.equals(Schema.Type.INT)) { fieldSchema.setType(DataType.INTEGER); } else if (avroType.equals(Schema.Type.LONG)) { fieldSchema.setType(DataType.LONG); } else if (avroType.equals(Schema.Type.STRING)) { fieldSchema.setType(DataType.CHARARRAY); } else if (avroType.equals(Schema.Type.NULL)) { // value of NULL is always NULL fieldSchema.setType(DataType.INTEGER); } else { throw new IOException("Unsupported avro type:" + avroType); } return fieldSchema; } /** * Add a field schema to a bag schema */ static protected void add2BagSchema(ResourceFieldSchema fieldSchema, ResourceFieldSchema subFieldSchema) throws IOException { ResourceFieldSchema wrapped = (subFieldSchema.getType() == DataType.TUPLE) ? subFieldSchema : AvroStorageUtils.wrapAsTuple(subFieldSchema); ResourceSchema listSchema = new ResourceSchema(); listSchema.setFields(new ResourceFieldSchema[] { wrapped }); fieldSchema.setSchema(listSchema); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.server.quorum; import java.io.IOException; import java.util.HashMap; import java.util.LinkedList; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.LinkedBlockingQueue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.zookeeper.ZooDefs.OpCode; import org.apache.zookeeper.server.Request; import org.apache.zookeeper.server.RequestProcessor; import org.apache.zookeeper.server.WorkerService; import org.apache.zookeeper.server.ZooKeeperCriticalThread; import org.apache.zookeeper.server.ZooKeeperServerListener; /** * This RequestProcessor matches the incoming committed requests with the * locally submitted requests. The trick is that locally submitted requests that * change the state of the system will come back as incoming committed requests, * so we need to match them up. Instead of just waiting for the committed requests, * we process the uncommitted requests that belong to other sessions. * * The CommitProcessor is multi-threaded. Communication between threads is * handled via queues, atomics, and wait/notifyAll synchronized on the * processor. The CommitProcessor acts as a gateway for allowing requests to * continue with the remainder of the processing pipeline. It will allow many * read requests but only a single write request to be in flight simultaneously, * thus ensuring that write requests are processed in transaction id order. * * - 1 commit processor main thread, which watches the request queues and * assigns requests to worker threads based on their sessionId so that * read and write requests for a particular session are always assigned * to the same thread (and hence are guaranteed to run in order). * - 0-N worker threads, which run the rest of the request processor pipeline * on the requests. If configured with 0 worker threads, the primary * commit processor thread runs the pipeline directly. * * Typical (default) thread counts are: on a 32 core machine, 1 commit * processor thread and 32 worker threads. * * Multi-threading constraints: * - Each session's requests must be processed in order. * - Write requests must be processed in zxid order * - Must ensure no race condition between writes in one session that would * trigger a watch being set by a read request in another session * * The current implementation solves the third constraint by simply allowing no * read requests to be processed in parallel with write requests. */ public class CommitProcessor extends ZooKeeperCriticalThread implements RequestProcessor { private static final Logger LOG = LoggerFactory.getLogger(CommitProcessor.class); /** Default: numCores */ public static final String ZOOKEEPER_COMMIT_PROC_NUM_WORKER_THREADS = "zookeeper.commitProcessor.numWorkerThreads"; /** Default worker pool shutdown timeout in ms: 5000 (5s) */ public static final String ZOOKEEPER_COMMIT_PROC_SHUTDOWN_TIMEOUT = "zookeeper.commitProcessor.shutdownTimeout"; /** * Incoming requests. */ protected LinkedBlockingQueue<Request> queuedRequests = new LinkedBlockingQueue<Request>(); /** * Requests that have been committed. */ protected final LinkedBlockingQueue<Request> committedRequests = new LinkedBlockingQueue<Request>(); /** * Requests that we are holding until commit comes in. Keys represent * session ids, each value is a linked list of the session's requests. */ protected final HashMap<Long, LinkedList<Request>> pendingRequests = new HashMap<Long, LinkedList<Request>>(10000); /** The number of requests currently being processed */ protected final AtomicInteger numRequestsProcessing = new AtomicInteger(0); RequestProcessor nextProcessor; /** For testing purposes, we use a separated stopping condition for the * outer loop.*/ protected volatile boolean stoppedMainLoop = true; protected volatile boolean stopped = true; private long workerShutdownTimeoutMS; protected WorkerService workerPool; private Object emptyPoolSync = new Object(); /** * This flag indicates whether we need to wait for a response to come back * from the leader or we just let the sync operation flow through like a * read. The flag will be true if the CommitProcessor is in a Leader * pipeline. */ boolean matchSyncs; public CommitProcessor(RequestProcessor nextProcessor, String id, boolean matchSyncs, ZooKeeperServerListener listener) { super("CommitProcessor:" + id, listener); this.nextProcessor = nextProcessor; this.matchSyncs = matchSyncs; } private boolean isProcessingRequest() { return numRequestsProcessing.get() != 0; } protected boolean needCommit(Request request) { switch (request.type) { case OpCode.create: case OpCode.create2: case OpCode.createTTL: case OpCode.createContainer: case OpCode.delete: case OpCode.deleteContainer: case OpCode.setData: case OpCode.reconfig: case OpCode.multi: case OpCode.setACL: return true; case OpCode.sync: return matchSyncs; case OpCode.createSession: case OpCode.closeSession: return !request.isLocalSession(); default: return false; } } @Override public void run() { try { /* * In each iteration of the following loop we process at most * requestsToProcess requests of queuedRequests. We have to limit * the number of request we poll from queuedRequests, since it is * possible to endlessly poll read requests from queuedRequests, and * that will lead to a starvation of non-local committed requests. */ int requestsToProcess = 0; boolean commitIsWaiting = false; do { /* * Since requests are placed in the queue before being sent to * the leader, if commitIsWaiting = true, the commit belongs to * the first update operation in the queuedRequests or to a * request from a client on another server (i.e., the order of * the following two lines is important!). */ commitIsWaiting = !committedRequests.isEmpty(); requestsToProcess = queuedRequests.size(); // Avoid sync if we have something to do if (requestsToProcess == 0 && !commitIsWaiting){ // Waiting for requests to process synchronized (this) { while (!stopped && requestsToProcess == 0 && !commitIsWaiting) { wait(); commitIsWaiting = !committedRequests.isEmpty(); requestsToProcess = queuedRequests.size(); } } } /* * Processing up to requestsToProcess requests from the incoming * queue (queuedRequests), possibly less if a committed request * is present along with a pending local write. After the loop, * we process one committed request if commitIsWaiting. */ Request request = null; while (!stopped && requestsToProcess > 0 && (request = queuedRequests.poll()) != null) { requestsToProcess--; if (needCommit(request) || pendingRequests.containsKey(request.sessionId)) { // Add request to pending LinkedList<Request> requests = pendingRequests .get(request.sessionId); if (requests == null) { requests = new LinkedList<Request>(); pendingRequests.put(request.sessionId, requests); } requests.addLast(request); } else { sendToNextProcessor(request); } /* * Stop feeding the pool if there is a local pending update * and a committed request that is ready. Once we have a * pending request with a waiting committed request, we know * we can process the committed one. This is because commits * for local requests arrive in the order they appeared in * the queue, so if we have a pending request and a * committed request, the committed request must be for that * pending write or for a write originating at a different * server. */ if (!pendingRequests.isEmpty() && !committedRequests.isEmpty()){ /* * We set commitIsWaiting so that we won't check * committedRequests again. */ commitIsWaiting = true; break; } } // Handle a single committed request if (commitIsWaiting && !stopped){ waitForEmptyPool(); if (stopped){ return; } // Process committed head if ((request = committedRequests.poll()) == null) { throw new IOException("Error: committed head is null"); } /* * Check if request is pending, if so, update it with the * committed info */ LinkedList<Request> sessionQueue = pendingRequests .get(request.sessionId); if (sessionQueue != null) { // If session queue != null, then it is also not empty. Request topPending = sessionQueue.poll(); if (request.cxid != topPending.cxid) { LOG.error( "Got cxid 0x" + Long.toHexString(request.cxid) + " expected 0x" + Long.toHexString( topPending.cxid) + " for client session id " + Long.toHexString(request.sessionId)); throw new IOException("Error: unexpected cxid for" + "client session"); } /* * We want to send our version of the request. the * pointer to the connection in the request */ topPending.setHdr(request.getHdr()); topPending.setTxn(request.getTxn()); topPending.zxid = request.zxid; request = topPending; } sendToNextProcessor(request); waitForEmptyPool(); /* * Process following reads if any, remove session queue if * empty. */ if (sessionQueue != null) { while (!stopped && !sessionQueue.isEmpty() && !needCommit(sessionQueue.peek())) { sendToNextProcessor(sessionQueue.poll()); } // Remove empty queues if (sessionQueue.isEmpty()) { pendingRequests.remove(request.sessionId); } } } } while (!stoppedMainLoop); } catch (Throwable e) { handleException(this.getName(), e); } LOG.info("CommitProcessor exited loop!"); } private void waitForEmptyPool() throws InterruptedException { synchronized(emptyPoolSync) { while ((!stopped) && isProcessingRequest()) { emptyPoolSync.wait(); } } } @Override public void start() { int numCores = Runtime.getRuntime().availableProcessors(); int numWorkerThreads = Integer.getInteger( ZOOKEEPER_COMMIT_PROC_NUM_WORKER_THREADS, numCores); workerShutdownTimeoutMS = Long.getLong( ZOOKEEPER_COMMIT_PROC_SHUTDOWN_TIMEOUT, 5000); LOG.info("Configuring CommitProcessor with " + (numWorkerThreads > 0 ? numWorkerThreads : "no") + " worker threads."); if (workerPool == null) { workerPool = new WorkerService( "CommitProcWork", numWorkerThreads, true); } stopped = false; stoppedMainLoop = false; super.start(); } /** * Schedule final request processing; if a worker thread pool is not being * used, processing is done directly by this thread. */ private void sendToNextProcessor(Request request) { numRequestsProcessing.incrementAndGet(); workerPool.schedule(new CommitWorkRequest(request), request.sessionId); } /** * CommitWorkRequest is a small wrapper class to allow * downstream processing to be run using the WorkerService */ private class CommitWorkRequest extends WorkerService.WorkRequest { private final Request request; CommitWorkRequest(Request request) { this.request = request; } @Override public void cleanup() { if (!stopped) { LOG.error("Exception thrown by downstream processor," + " unable to continue."); CommitProcessor.this.halt(); } } public void doWork() throws RequestProcessorException { try { nextProcessor.processRequest(request); } finally { if (numRequestsProcessing.decrementAndGet() == 0){ wakeupOnEmpty(); } } } } synchronized private void wakeup() { notifyAll(); } private void wakeupOnEmpty() { synchronized(emptyPoolSync){ emptyPoolSync.notifyAll(); } } public void commit(Request request) { if (stopped || request == null) { return; } if (LOG.isDebugEnabled()) { LOG.debug("Committing request:: " + request); } committedRequests.add(request); wakeup(); } public void processRequest(Request request) { if (stopped) { return; } if (LOG.isDebugEnabled()) { LOG.debug("Processing request:: " + request); } queuedRequests.add(request); wakeup(); } private void halt() { stoppedMainLoop = true; stopped = true; wakeupOnEmpty(); wakeup(); queuedRequests.clear(); if (workerPool != null) { workerPool.stop(); } } public void shutdown() { LOG.info("Shutting down"); halt(); if (workerPool != null) { workerPool.join(workerShutdownTimeoutMS); } if (nextProcessor != null) { nextProcessor.shutdown(); } } }
package org.drools.rule.builder.dialect.java; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.util.*; import org.drools.base.TypeResolver; import org.drools.builder.KnowledgeBuilderResult; import org.drools.commons.jci.compilers.CompilationResult; import org.drools.commons.jci.compilers.JavaCompiler; import org.drools.commons.jci.compilers.JavaCompilerFactory; import org.drools.commons.jci.compilers.JavaCompilerSettings; import org.drools.commons.jci.problems.CompilationProblem; import org.drools.commons.jci.readers.MemoryResourceReader; import org.drools.compiler.AnalysisResult; import org.drools.compiler.BoundIdentifiers; import org.drools.compiler.DescrBuildError; import org.drools.compiler.Dialect; import org.drools.compiler.PackageBuilder; import org.drools.compiler.PackageRegistry; import org.drools.compiler.PackageBuilder.ErrorHandler; import org.drools.compiler.PackageBuilder.FunctionErrorHandler; import org.drools.compiler.PackageBuilder.RuleErrorHandler; import org.drools.compiler.PackageBuilder.RuleInvokerErrorHandler; import org.drools.compiler.PackageBuilder.SrcErrorHandler; import org.drools.core.util.StringUtils; import org.drools.io.Resource; import org.drools.io.internal.InternalResource; import org.drools.lang.descr.AccumulateDescr; import org.drools.lang.descr.AndDescr; import org.drools.lang.descr.BaseDescr; import org.drools.lang.descr.CollectDescr; import org.drools.lang.descr.EntryPointDescr; import org.drools.lang.descr.EvalDescr; import org.drools.lang.descr.ExistsDescr; import org.drools.lang.descr.ForallDescr; import org.drools.lang.descr.FromDescr; import org.drools.lang.descr.FunctionDescr; import org.drools.lang.descr.ImportDescr; import org.drools.lang.descr.NotDescr; import org.drools.lang.descr.OrDescr; import org.drools.lang.descr.PatternDescr; import org.drools.lang.descr.ProcessDescr; import org.drools.lang.descr.QueryDescr; import org.drools.lang.descr.RuleDescr; import org.drools.lang.descr.WindowReferenceDescr; import org.drools.rule.Function; import org.drools.rule.JavaDialectRuntimeData; import org.drools.rule.LineMappings; import org.drools.rule.Package; import org.drools.rule.Rule; import org.drools.rule.builder.AccumulateBuilder; import org.drools.rule.builder.CollectBuilder; import org.drools.rule.builder.ConsequenceBuilder; import org.drools.rule.builder.EnabledBuilder; import org.drools.rule.builder.EngineElementBuilder; import org.drools.rule.builder.EntryPointBuilder; import org.drools.rule.builder.ForallBuilder; import org.drools.rule.builder.FromBuilder; import org.drools.rule.builder.FunctionBuilder; import org.drools.rule.builder.GroupElementBuilder; import org.drools.rule.builder.PackageBuildContext; import org.drools.rule.builder.PatternBuilder; import org.drools.rule.builder.PredicateBuilder; import org.drools.rule.builder.QueryBuilder; import org.drools.rule.builder.ReturnValueBuilder; import org.drools.rule.builder.RuleBuildContext; import org.drools.rule.builder.RuleClassBuilder; import org.drools.rule.builder.RuleConditionBuilder; import org.drools.rule.builder.SalienceBuilder; import org.drools.rule.builder.WindowReferenceBuilder; import org.drools.rule.builder.dialect.asm.*; import org.drools.rule.builder.dialect.mvel.MVELEnabledBuilder; import org.drools.rule.builder.dialect.mvel.MVELFromBuilder; import org.drools.rule.builder.dialect.mvel.MVELSalienceBuilder; import static org.drools.rule.builder.dialect.DialectUtil.getUniqueLegalName; public class JavaDialect implements Dialect { public static final String ID = "java"; private final static String EXPRESSION_DIALECT_NAME = "mvel"; // builders private static final PatternBuilder PATTERN_BUILDER = new PatternBuilder(); private static final QueryBuilder QUERY_BUILDER = new QueryBuilder(); private static final SalienceBuilder SALIENCE_BUILDER = new MVELSalienceBuilder(); private static final EnabledBuilder ENABLED_BUILDER = new MVELEnabledBuilder(); private static final JavaAccumulateBuilder ACCUMULATE_BUILDER = new JavaAccumulateBuilder(); // private static final RuleConditionBuilder EVAL_BUILDER = new JavaEvalBuilder(); // private static final RuleConditionBuilder EVAL_BUILDER = new ASMEvalBuilder(); private static final RuleConditionBuilder EVAL_BUILDER = new ASMEvalStubBuilder(); // private static final PredicateBuilder PREDICATE_BUILDER = new JavaPredicateBuilder(); // private static final PredicateBuilder PREDICATE_BUILDER = new ASMPredicateBuilder(); private static final PredicateBuilder PREDICATE_BUILDER = new ASMPredicateStubBuilder(); // private static final ReturnValueBuilder RETURN_VALUE_BUILDER = new JavaReturnValueBuilder(); // private static final ReturnValueBuilder RETURN_VALUE_BUILDER = new ASMReturnValueBuilder(); private static final ReturnValueBuilder RETURN_VALUE_BUILDER = new ASMReturnValueStubBuilder(); // private static final ConsequenceBuilder CONSEQUENCE_BUILDER = new JavaConsequenceBuilder(); // private static final ConsequenceBuilder CONSEQUENCE_BUILDER = new ASMConsequenceBuilder(); private static final ConsequenceBuilder CONSEQUENCE_BUILDER = new ASMConsequenceStubBuilder(); private static final JavaRuleClassBuilder RULE_CLASS_BUILDER = new JavaRuleClassBuilder(); private static final MVELFromBuilder FROM_BUILDER = new MVELFromBuilder(); private static final JavaFunctionBuilder FUNCTION_BUILDER = new JavaFunctionBuilder(); private static final CollectBuilder COLLECT_BUIDER = new CollectBuilder(); private static final ForallBuilder FORALL_BUILDER = new ForallBuilder(); private static final EntryPointBuilder ENTRY_POINT_BUILDER = new EntryPointBuilder(); private static final WindowReferenceBuilder WINDOW_REFERENCE_BUILDER = new WindowReferenceBuilder(); private static final GroupElementBuilder GE_BUILDER = new GroupElementBuilder(); // a map of registered builders private static Map<Class<?>, EngineElementBuilder> builders; static { initBuilder(); } // private static final JavaExprAnalyzer analyzer = new JavaExprAnalyzer(); private final JavaDialectConfiguration configuration; private JavaCompiler compiler; private final Package pkg; private final List<String> generatedClassList; private final MemoryResourceReader src; private final PackageStore packageStoreWrapper; private final Map<String, ErrorHandler> errorHandlers; private final List<KnowledgeBuilderResult> results; private final PackageBuilder packageBuilder; private final PackageRegistry packageRegistry; public JavaDialect(PackageBuilder builder, PackageRegistry pkgRegistry, Package pkg) { this.packageBuilder = builder; this.pkg = pkg; this.packageRegistry = pkgRegistry; this.configuration = (JavaDialectConfiguration) builder.getPackageBuilderConfiguration().getDialectConfiguration( "java" ); this.errorHandlers = new HashMap<String, ErrorHandler>(); this.results = new ArrayList<KnowledgeBuilderResult>(); this.src = new MemoryResourceReader(); this.generatedClassList = new ArrayList<String>(); JavaDialectRuntimeData data; // initialie the dialect runtime data if it doesn't already exist if ( pkg.getDialectRuntimeRegistry().getDialectData( ID ) == null ) { data = new JavaDialectRuntimeData(); this.pkg.getDialectRuntimeRegistry().setDialectData( ID, data ); data.onAdd( this.pkg.getDialectRuntimeRegistry(), this.packageBuilder.getRootClassLoader() ); } else { data = (JavaDialectRuntimeData) pkg.getDialectRuntimeRegistry().getDialectData( ID ); } this.packageStoreWrapper = new PackageStore( data, this.results ); loadCompiler(); } public static synchronized void initBuilder() { if ( builders != null ) { return; } // statically adding all builders to the map // but in the future we can move that to a configuration // if we want to builders = new HashMap<Class<?>, EngineElementBuilder>(); builders.put( CollectDescr.class, COLLECT_BUIDER ); builders.put( ForallDescr.class, FORALL_BUILDER ); builders.put( AndDescr.class, GE_BUILDER ); builders.put( OrDescr.class, GE_BUILDER ); builders.put( NotDescr.class, GE_BUILDER ); builders.put( ExistsDescr.class, GE_BUILDER ); builders.put( PatternDescr.class, PATTERN_BUILDER ); builders.put( QueryDescr.class, QUERY_BUILDER ); builders.put( FromDescr.class, FROM_BUILDER ); builders.put( AccumulateDescr.class, ACCUMULATE_BUILDER ); builders.put( EvalDescr.class, EVAL_BUILDER ); builders.put( EntryPointDescr.class, ENTRY_POINT_BUILDER ); builders.put( WindowReferenceDescr.class, WINDOW_REFERENCE_BUILDER ); } public Map<Class<?>, EngineElementBuilder> getBuilders() { return builders; } public void init(final RuleDescr ruleDescr) { final String ruleClassName = getUniqueLegalName( this.pkg.getName(), ruleDescr.getName(), "java", "Rule", this.src ); ruleDescr.setClassName( StringUtils.ucFirst( ruleClassName ) ); } public void init(final ProcessDescr processDescr) { final String processDescrClassName = getUniqueLegalName( this.pkg.getName(), processDescr.getName(), "java", "Process", this.src ); processDescr.setClassName( StringUtils.ucFirst( processDescrClassName ) ); } public String getExpressionDialectName() { return EXPRESSION_DIALECT_NAME; } public AnalysisResult analyzeExpression(final PackageBuildContext context, final BaseDescr descr, final Object content, final BoundIdentifiers availableIdentifiers) { JavaAnalysisResult result = null; try { result = analyzer.analyzeExpression((String) content, availableIdentifiers); } catch ( final Exception e ) { context.addError(new DescrBuildError(context.getParentDescr(), descr, e, "Unable to determine the used declarations.\n" + e)); } return result; } public AnalysisResult analyzeBlock(final PackageBuildContext context, final BaseDescr descr, final String text, final BoundIdentifiers availableIdentifiers) { JavaAnalysisResult result = null; try { result = analyzer.analyzeBlock( text, availableIdentifiers ); } catch ( final Exception e ) { context.addError( new DescrBuildError( context.getParentDescr(), descr, e, "Unable to determine the used declarations.\n" + e ) ); } return result; } /** * Returns the current type resolver instance * * @return */ public TypeResolver getTypeResolver() { return this.packageRegistry.getTypeResolver(); } public RuleConditionBuilder getBuilder(final Class clazz) { return (RuleConditionBuilder) builders.get( clazz ); } public PatternBuilder getPatternBuilder() { return PATTERN_BUILDER; } public QueryBuilder getQueryBuilder() { return QUERY_BUILDER; } public SalienceBuilder getSalienceBuilder() { return SALIENCE_BUILDER; } public EnabledBuilder getEnabledBuilder() { return ENABLED_BUILDER; } public AccumulateBuilder getAccumulateBuilder() { return ACCUMULATE_BUILDER; } public RuleConditionBuilder getEvalBuilder() { return EVAL_BUILDER; } public PredicateBuilder getPredicateBuilder() { return PREDICATE_BUILDER; } public ReturnValueBuilder getReturnValueBuilder() { return RETURN_VALUE_BUILDER; } public ConsequenceBuilder getConsequenceBuilder() { return CONSEQUENCE_BUILDER; } public RuleClassBuilder getRuleClassBuilder() { return RULE_CLASS_BUILDER; } public FunctionBuilder getFunctionBuilder() { return FUNCTION_BUILDER; } public FromBuilder getFromBuilder() { return FROM_BUILDER; } public EntryPointBuilder getEntryPointBuilder() { return ENTRY_POINT_BUILDER; } /** * This actually triggers the compiling of all the resources. * Errors are mapped back to the element that originally generated the semantic * code. */ public void compileAll() { if ( this.generatedClassList.isEmpty() ) { return; } final String[] classes = new String[this.generatedClassList.size()]; this.generatedClassList.toArray( classes ); File dumpDir = this.configuration.getPackageBuilderConfiguration().getDumpDir(); if ( dumpDir != null ) { dumpResources( classes, dumpDir ); } final CompilationResult result = this.compiler.compile( classes, this.src, this.packageStoreWrapper, this.packageBuilder.getRootClassLoader() ); //this will sort out the errors based on what class/file they happened in if ( result.getErrors().length > 0 ) { for ( int i = 0; i < result.getErrors().length; i++ ) { final CompilationProblem err = result.getErrors()[i]; final ErrorHandler handler = this.errorHandlers.get( err.getFileName() ); handler.addError( err ); } final Collection errors = this.errorHandlers.values(); for (Object error : errors) { final ErrorHandler handler = (ErrorHandler) error; if (handler.isInError()) { this.results.add(handler.getError()); } } } // We've compiled everthing, so clear it for the next set of additions this.generatedClassList.clear(); } /** * @param classes * @param dumpDir * @throws IOException * @throws FileNotFoundException */ private void dumpResources(final String[] classes, File dumpDir) { for (String aClass : classes) { File target = new File(dumpDir, aClass); FileOutputStream out = null; try { File parent = target.getParentFile(); if (parent != null && !parent.exists()) { parent.mkdirs(); } target.createNewFile(); out = new FileOutputStream(target); out.write(this.src.getBytes(aClass)); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { if (out != null) try { out.close(); } catch (Exception e) { } } } } /** * This will add the rule for compiling later on. * It will not actually call the compiler */ public void addRule(final RuleBuildContext context) { final Rule rule = context.getRule(); final RuleDescr ruleDescr = context.getRuleDescr(); RuleClassBuilder classBuilder = context.getDialect().getRuleClassBuilder(); String ruleClass = classBuilder.buildRule( context ); // return if there is no ruleclass name; if ( ruleClass == null ) { return; } // The compilation result is for the entire rule, so difficult to associate with any descr addClassCompileTask( this.pkg.getName() + "." + ruleDescr.getClassName(), ruleDescr, ruleClass, this.src, new RuleErrorHandler( ruleDescr, rule, "Rule Compilation error" ) ); JavaDialectRuntimeData data = (JavaDialectRuntimeData) this.pkg.getDialectRuntimeRegistry().getDialectData( ID ); for ( Map.Entry<String, String> invokers : context.getInvokers().entrySet() ) { final String className = invokers.getKey(); // Check if an invoker - returnvalue, predicate, eval or consequence has been associated // If so we add it to the PackageCompilationData as it will get wired up on compilation final Object invoker = context.getInvokerLookups().get( className ); if ( invoker != null ) { data.putInvoker( className, invoker ); } final String text = invokers.getValue(); final BaseDescr descr = (BaseDescr) context.getDescrLookups().get( className ); addClassCompileTask( className, descr, text, this.src, new RuleInvokerErrorHandler( descr, rule, "Unable to generate rule invoker." ) ); } // setup the line mappins for this rule final String name = this.pkg.getName() + "." + StringUtils.ucFirst( ruleDescr.getClassName() ); final LineMappings mapping = new LineMappings( name ); mapping.setStartLine( ruleDescr.getConsequenceLine() ); mapping.setOffset( ruleDescr.getConsequenceOffset() ); this.pkg.getDialectRuntimeRegistry().getLineMappings().put( name, mapping ); } public void addFunction(final FunctionDescr functionDescr, final TypeResolver typeResolver, final Resource resource) { //System.out.println( functionDescr + " : " + typeResolver ); final String functionClassName = this.pkg.getName() + "." + StringUtils.ucFirst( functionDescr.getName() ); functionDescr.setClassName( functionClassName ); this.pkg.addStaticImport( functionClassName + "." + functionDescr.getName() ); Function function = new Function( functionDescr.getNamespace(), functionDescr.getName(), ID ); if ( resource != null && ((InternalResource) resource).hasURL() ) { function.setResource( resource ); } this.pkg.addFunction( function ); final String functionSrc = getFunctionBuilder().build( this.pkg, functionDescr, typeResolver, this.pkg.getDialectRuntimeRegistry().getLineMappings(), this.results ); addClassCompileTask( functionClassName, functionDescr, functionSrc, this.src, new FunctionErrorHandler( functionDescr, "Function Compilation error" ) ); final LineMappings mapping = new LineMappings( functionClassName ); mapping.setStartLine( functionDescr.getLine() ); mapping.setOffset( functionDescr.getOffset() ); this.pkg.getDialectRuntimeRegistry().getLineMappings().put( functionClassName, mapping ); } public void preCompileAddFunction(FunctionDescr functionDescr, TypeResolver typeResolver) { final String functionClassName = this.pkg.getName() + "." + StringUtils.ucFirst( functionDescr.getName() ); this.pkg.addStaticImport( functionClassName + "." + functionDescr.getName() ); } public void postCompileAddFunction(FunctionDescr functionDescr, TypeResolver typeResolver) { final String functionClassName = this.pkg.getName() + "." + StringUtils.ucFirst( functionDescr.getName() ); ImportDescr importDescr = new ImportDescr(functionClassName + "." + functionDescr.getName()); importDescr.setResource(functionDescr.getResource()); importDescr.setNamespace(functionDescr.getNamespace()); this.packageRegistry.addStaticImport( importDescr ); } public void addSrc(String resourceName, byte[] content) { src.add( resourceName, content ); this.errorHandlers.put( resourceName, new SrcErrorHandler( "Src compile error" ) ); addClassName( resourceName ); } /** * This adds a compile "task" for when the compiler of * semantics (JCI) is called later on with compileAll()\ * which actually does the compiling. * The ErrorHandler is required to map the errors back to the * element that caused it. */ public void addClassCompileTask(final String className, final BaseDescr descr, final String text, final MemoryResourceReader src, final ErrorHandler handler) { final String fileName = className.replace( '.', '/' ) + ".java"; if (src != null) { src.add( fileName, text.getBytes() ); } else { this.src.add( fileName, text.getBytes() ); } this.errorHandlers.put( fileName, handler ); addClassName( fileName ); } public void addClassName(final String className) { this.generatedClassList.add( className ); } private void loadCompiler() { switch ( this.configuration.getCompiler() ) { case JavaDialectConfiguration.JANINO : { this.compiler = JavaCompilerFactory.getInstance().createCompiler( "janino" ); break; } case JavaDialectConfiguration.ECLIPSE : default : { this.compiler = JavaCompilerFactory.getInstance().createCompiler( "eclipse" ); JavaCompilerSettings settings = this.compiler.createDefaultSettings(); String lngLevel = this.configuration.getJavaLanguageLevel(); settings.setTargetVersion( lngLevel ); settings.setSourceVersion( lngLevel ); break; } } } public void addImport(ImportDescr importDescr) { // we don't need to do anything here } public void addStaticImport(ImportDescr importDescr) { // we don't need to do anything here } public List<KnowledgeBuilderResult> getResults() { return this.results; } public String getId() { return ID; } public PackageRegistry getPackageRegistry() { return this.packageRegistry; } }
package com.cgi.seminar.web.rest; import com.codahale.metrics.annotation.Timed; import com.cgi.seminar.domain.Authority; import com.cgi.seminar.domain.PersistentToken; import com.cgi.seminar.domain.User; import com.cgi.seminar.repository.PersistentTokenRepository; import com.cgi.seminar.repository.UserRepository; import com.cgi.seminar.security.SecurityUtils; import com.cgi.seminar.service.MailService; import com.cgi.seminar.service.UserService; import com.cgi.seminar.web.rest.dto.KeyAndPasswordDTO; import com.cgi.seminar.web.rest.dto.UserDTO; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.validation.Valid; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.*; import java.util.stream.Collectors; /** * REST controller for managing the current user's account. */ @RestController @RequestMapping("/api") public class AccountResource { private final Logger log = LoggerFactory.getLogger(AccountResource.class); @Inject private UserRepository userRepository; @Inject private UserService userService; @Inject private PersistentTokenRepository persistentTokenRepository; @Inject private MailService mailService; /** * POST /register -> register the user. */ @RequestMapping(value = "/register", method = RequestMethod.POST, produces = MediaType.TEXT_PLAIN_VALUE) @Timed public ResponseEntity<?> registerAccount(@Valid @RequestBody UserDTO userDTO, HttpServletRequest request) { return userRepository.findOneByLogin(userDTO.getLogin()) .map(user -> new ResponseEntity<>("login already in use", HttpStatus.BAD_REQUEST)) .orElseGet(() -> userRepository.findOneByEmail(userDTO.getEmail()) .map(user -> new ResponseEntity<>("e-mail address already in use", HttpStatus.BAD_REQUEST)) .orElseGet(() -> { User user = userService.createUserInformation(userDTO.getLogin(), userDTO.getPassword(), userDTO.getFirstName(), userDTO.getLastName(), userDTO.getEmail().toLowerCase(), userDTO.getLangKey()); String baseUrl = request.getScheme() + // "http" "://" + // "://" request.getServerName() + // "myhost" ":" + // ":" request.getServerPort(); // "80" mailService.sendActivationEmail(user, baseUrl); return new ResponseEntity<>(HttpStatus.CREATED); }) ); } /** * GET /activate -> activate the registered user. */ @RequestMapping(value = "/activate", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<String> activateAccount(@RequestParam(value = "key") String key) { return Optional.ofNullable(userService.activateRegistration(key)) .map(user -> new ResponseEntity<String>(HttpStatus.OK)) .orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } /** * GET /authenticate -> check if the user is authenticated, and return its login. */ @RequestMapping(value = "/authenticate", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public String isAuthenticated(HttpServletRequest request) { log.debug("REST request to check if the current user is authenticated"); return request.getRemoteUser(); } /** * GET /account -> get the current user. */ @RequestMapping(value = "/account", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<UserDTO> getAccount() { return Optional.ofNullable(userService.getUserWithAuthorities()) .map(user -> new ResponseEntity<>(new UserDTO(user), HttpStatus.OK)) .orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } /** * POST /account -> update the current user information. */ @RequestMapping(value = "/account", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<String> saveAccount(@RequestBody UserDTO userDTO) { return userRepository .findOneByLogin(userDTO.getLogin()) .filter(u -> u.getLogin().equals(SecurityUtils.getCurrentLogin())) .map(u -> { userService.updateUserInformation(userDTO.getFirstName(), userDTO.getLastName(), userDTO.getEmail(), userDTO.getLangKey()); return new ResponseEntity<String>(HttpStatus.OK); }) .orElseGet(() -> new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } /** * POST /change_password -> changes the current user's password */ @RequestMapping(value = "/account/change_password", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<?> changePassword(@RequestBody String password) { if (!checkPasswordLength(password)) { return new ResponseEntity<>("Incorrect password", HttpStatus.BAD_REQUEST); } userService.changePassword(password); return new ResponseEntity<>(HttpStatus.OK); } /** * GET /account/sessions -> get the current open sessions. */ @RequestMapping(value = "/account/sessions", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<List<PersistentToken>> getCurrentSessions() { return userRepository.findOneByLogin(SecurityUtils.getCurrentLogin()) .map(user -> new ResponseEntity<>( persistentTokenRepository.findByUser(user), HttpStatus.OK)) .orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } /** * DELETE /account/sessions?series={series} -> invalidate an existing session. * * - You can only delete your own sessions, not any other user's session * - If you delete one of your existing sessions, and that you are currently logged in on that session, you will * still be able to use that session, until you quit your browser: it does not work in real time (there is * no API for that), it only removes the "remember me" cookie * - This is also true if you invalidate your current session: you will still be able to use it until you close * your browser or that the session times out. But automatic login (the "remember me" cookie) will not work * anymore. * There is an API to invalidate the current session, but there is no API to check which session uses which * cookie. */ @RequestMapping(value = "/account/sessions/{series}", method = RequestMethod.DELETE) @Timed public void invalidateSession(@PathVariable String series) throws UnsupportedEncodingException { String decodedSeries = URLDecoder.decode(series, "UTF-8"); userRepository.findOneByLogin(SecurityUtils.getCurrentLogin()).ifPresent(u -> { persistentTokenRepository.findByUser(u).stream() .filter(persistentToken -> StringUtils.equals(persistentToken.getSeries(), decodedSeries)) .findAny().ifPresent(t -> persistentTokenRepository.delete(decodedSeries)); }); } @RequestMapping(value = "/account/reset_password/init", method = RequestMethod.POST, produces = MediaType.TEXT_PLAIN_VALUE) @Timed public ResponseEntity<?> requestPasswordReset(@RequestBody String mail, HttpServletRequest request) { return userService.requestPasswordReset(mail) .map(user -> { String baseUrl = request.getScheme() + "://" + request.getServerName() + ":" + request.getServerPort(); mailService.sendPasswordResetMail(user, baseUrl); return new ResponseEntity<>("e-mail was sent", HttpStatus.OK); }).orElse(new ResponseEntity<>("e-mail address not registered", HttpStatus.BAD_REQUEST)); } @RequestMapping(value = "/account/reset_password/finish", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<String> finishPasswordReset(@RequestBody KeyAndPasswordDTO keyAndPassword) { if (!checkPasswordLength(keyAndPassword.getNewPassword())) { return new ResponseEntity<>("Incorrect password", HttpStatus.BAD_REQUEST); } return userService.completePasswordReset(keyAndPassword.getNewPassword(), keyAndPassword.getKey()) .map(user -> new ResponseEntity<String>(HttpStatus.OK)).orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } private boolean checkPasswordLength(String password) { return (!StringUtils.isEmpty(password) && password.length() >= UserDTO.PASSWORD_MIN_LENGTH && password.length() <= UserDTO.PASSWORD_MAX_LENGTH); } }
package it.conteit.scoresmanager.gui.dialogs; import it.conteit.scoresmanager.control.ApplicationSystem; import it.conteit.scoresmanager.data.IDay; import it.conteit.scoresmanager.data.IScore; import it.conteit.scoresmanager.data.InconsistencyException; import it.conteit.scoresmanager.data.Partial; import it.conteit.scoresmanager.data.Penality; import it.conteit.scoresmanager.gui.valiators.AbstractApplicationDialog; import it.conteit.scoresmanager.gui.valiators.TextFieldNotEmptyValidator; import it.conteit.scoresmanager.gui.valiators.UniqueInScoresListValidator; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dialog; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Font; import java.awt.Frame; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextField; import javax.swing.border.EmptyBorder; import com.jgoodies.forms.factories.FormFactory; import com.jgoodies.forms.layout.ColumnSpec; import com.jgoodies.forms.layout.FormLayout; import com.jgoodies.forms.layout.RowSpec; public class ScoreNameInputDialog extends AbstractApplicationDialog { private static final long serialVersionUID = 5889667515995251164L; private final JPanel m_contentPanel = new JPanel(); private JTextField inputField; private JLabel warningsLabel; private JButton okButton; private IDay day; private IScore result = null; private boolean penality; private UniqueInScoresListValidator val; public ScoreNameInputDialog(){ this((Frame) null, null, false); } public ScoreNameInputDialog(Dialog owner, IDay day, boolean penality) { super(owner); this.penality = penality; createImportGrestInputDialog(day); } public ScoreNameInputDialog(Frame owner, IDay day, boolean penality) { super(owner); this.penality = penality; createImportGrestInputDialog(day); } /** * Create the dialog. * * @param serializer */ public void createImportGrestInputDialog(IDay day) { setTitle("Add new score"); this.day = day; addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { result = null; } }); setBounds(100, 100, 401, 160); setResizable(false); getContentPane().setLayout(new BorderLayout()); m_contentPanel.setLayout(new FormLayout( new ColumnSpec[] { ColumnSpec.decode("6dlu"), FormFactory.DEFAULT_COLSPEC, FormFactory.RELATED_GAP_COLSPEC, ColumnSpec.decode("default:grow"), FormFactory.RELATED_GAP_COLSPEC, FormFactory.DEFAULT_COLSPEC, }, new RowSpec[] { FormFactory.RELATED_GAP_ROWSPEC, FormFactory.DEFAULT_ROWSPEC, RowSpec.decode("8dlu"), RowSpec.decode("bottom:12dlu"), RowSpec.decode("5dlu"), RowSpec.decode("12dlu"), })); m_contentPanel.setBorder(new EmptyBorder(5, 5, 5, 5)); getContentPane().add(m_contentPanel, BorderLayout.CENTER); { JLabel lblTheImportedGrest = new JLabel("Insert new score d name:"); lblTheImportedGrest.setFont(new Font("Lucida Grande", Font.PLAIN, 11)); m_contentPanel.add(lblTheImportedGrest, "2, 2, 3, 1"); } { inputField = new JTextField(); inputField.addKeyListener(new KeyAdapter() { public void keyPressed(KeyEvent e) { key_ev(e); } public void keyReleased(KeyEvent e) { key_ev(e); } }); inputField.setFont(new Font("Lucida Grande", Font.PLAIN, 11)); m_contentPanel.add(inputField, "2, 4, 3, 1"); } { warningsLabel = new JLabel("Invalid name"); warningsLabel .setIcon(new ImageIcon( ScoreNameInputDialog.class .getResource("/it/conteit/scoresmanager/gui/images/warn_ic.png"))); warningsLabel.setForeground(Color.RED); warningsLabel.setVisible(false); warningsLabel.setFont(new Font("Lucida Grande", Font.PLAIN, 11)); m_contentPanel.add(warningsLabel, "2, 6, 3, 1"); } { JPanel buttonPane = new JPanel(); buttonPane.setLayout(new FlowLayout(FlowLayout.RIGHT)); getContentPane().add(buttonPane, BorderLayout.SOUTH); { okButton = new JButton("OK"); okButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { ok_action(e); } }); // okButton.setFont(new Font("Lucida Grande", Font.PLAIN, 11)); okButton.setActionCommand("OK"); getRootPane().setDefaultButton(okButton); buttonPane.add(okButton); JButton cancelButton = new JButton("Cancel"); cancelButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { cancel_action(e); } }); // cancelButton.setFont(new Font("Lucida Grande", Font.PLAIN, // 11)); cancelButton.setActionCommand("Cancel"); buttonPane.add(cancelButton); } { JPanel panel = new JPanel(); buttonPane.add(panel); } } Dimension screensize = java.awt.Toolkit.getDefaultToolkit() .getScreenSize(); int xPos = new Double((screensize.getWidth() - 286) / 2).intValue(); int yPos = new Double((screensize.getHeight() / 2) - 200).intValue(); setLocation(xPos, yPos); setResizable(false); addValidator(new TextFieldNotEmptyValidator("Day Name", inputField)); val = new UniqueInScoresListValidator(day, inputField); addValidator(val); check(); } protected void key_ev(KeyEvent e) { check(); } protected void cancel_action(ActionEvent e) { result = null; setVisible(false); } protected void ok_action(ActionEvent e) { try { if (result == null) { if(penality){ result = Penality.create(inputField.getText(), day.teamCount()); } else { result = Partial.create(inputField.getText(), day.teamCount()); } } else { result.setDescription(inputField.getText()); } } catch (InconsistencyException e1) { ApplicationSystem.getInstance().logError("Cannot rename grest"); } setVisible(false); } @Override public void updateGUI(String[] validationResult, boolean isOk) { okButton.setEnabled(isOk); if (!isOk) { String res = new String(validationResult[0]); for (int i = 1; i < validationResult.length; i++) { res += "; "; res += validationResult[i]; } warningsLabel.setText(res); warningsLabel.setToolTipText(res); } warningsLabel.setVisible(!isOk); } public IScore getResult() { return result; } public void setScore(IScore score) { result = score; inputField.setText(score.getDescription()); val.setOldValue(score.getDescription()); if (score != null) { setTitle("Rename score"); } else { setTitle("Add new score"); } check(); } public static IScore showDialog(IDay day, boolean penality) { ScoreNameInputDialog dialog = new ScoreNameInputDialog((Frame) null, day, penality); dialog.setVisible(true); return dialog.getResult(); } public static IScore showDialog(IDay day, IScore score) { ScoreNameInputDialog dialog = new ScoreNameInputDialog((Frame) null, day, false); dialog.setScore(score); dialog.setVisible(true); return dialog.getResult(); } }
/* * Copyright (c) 2015. Rick Hightower, Geoff Chandler * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * QBit - The Microservice lib for Java : JSON, WebSocket, REST. Be The Web! */ package io.advantageous.qbit.proxy; import io.advantageous.boon.core.Sys; import io.advantageous.qbit.Factory; import io.advantageous.qbit.QBit; import io.advantageous.qbit.annotation.RequestMapping; import io.advantageous.qbit.message.MethodCall; import io.advantageous.qbit.message.Response; import io.advantageous.qbit.queue.Queue; import io.advantageous.qbit.queue.ReceiveQueue; import io.advantageous.qbit.queue.ReceiveQueueListener; import io.advantageous.qbit.queue.SendQueue; import io.advantageous.qbit.reactive.Callback; import io.advantageous.qbit.service.ServiceBundle; import io.advantageous.qbit.service.ServiceBundleBuilder; import org.junit.Test; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import static io.advantageous.boon.core.Exceptions.die; import static io.advantageous.boon.core.IO.puts; /** * created by Richard on 9/30/14. */ public class ServiceProxyFactoryTest { volatile boolean ok; List<MethodCall<Object>> calls = new ArrayList<>(); ServiceBundle serviceBundle = new ServiceBundle() { Factory factory = QBit.factory(); @Override public String address() { return ""; } @Override public ServiceBundle addServiceObject(String address, Object object) { return null; } @Override public ServiceBundle addService(Object object) { return this; } @Override public Queue<Response<Object>> responses() { return null; } @Override public SendQueue<MethodCall<Object>> methodSendQueue() { return null; } @Override public void call(MethodCall<Object> methodCall) { calls.add(methodCall); } @Override public void call(List<MethodCall<Object>> methodCalls) { calls.addAll(methodCalls); } @Override public void flush() { } @Override public void flushSends() { } @Override public void stop() { } @Override public List<String> endPoints() { return null; } @Override public void startReturnHandlerProcessor(ReceiveQueueListener<Response<Object>> listener) { } @Override public void startReturnHandlerProcessor() { } @Override public <T> T createLocalProxy(Class<T> serviceInterface, String serviceName) { return factory.createLocalProxy(serviceInterface, serviceName, this); } }; boolean calledMethod1; boolean calledMethod2; @Test public void testProxySimpleNoArg() { final SomeInterface myService = serviceBundle.createLocalProxy(SomeInterface.class, "myService"); myService.method1(); } @Test public void testProxySimpleTwoArg() { final SomeInterface myService = serviceBundle.createLocalProxy(SomeInterface.class, "myService"); calls.clear(); myService.method2("Hello", 5); } @Test public void testProxySimpleServiceBundle() { final SomeInterface myService = serviceBundle.createLocalProxy(SomeInterface.class, "myService"); calls.clear(); myService.method2("Hello", 5); boolean found = false; for (MethodCall<Object> call : calls) { if (call.name().equals("method2")) { final Object body = call.body(); puts(body); ok = body != null || die(); ok = body.getClass().isArray() || die(); ok = body.getClass().equals(Object[].class); Object[] args = (Object[]) body; String arg1 = (String) args[0]; ok = arg1.equals("Hello") || die(); int i = (int) args[1]; ok = i == 5 || die(); found = true; } } ok = found || die(); } @Test public void callingActualService() { SomeInterface myService = new SomeInterface() { @Override public void method1() { } @Override public void method2(String hi, int amount) { } @Override public String method3(String hi, int amount) { return null; } }; final ServiceBundle bundle = new ServiceBundleBuilder().setAddress("/root").buildAndStart(); bundle.addServiceObject("myService", myService); final SomeInterface myServiceProxy = bundle.createLocalProxy(SomeInterface.class, "myService"); myServiceProxy.method2("hi", 5); Sys.sleep(1000); } //@Test TODO fails sometimes during build but not always public void callingActualServiceWithReturn() { @RequestMapping("myService") class MyServiceClass implements SomeInterface { @Override public void method1() { } @Override public void method2(String hi, int amount) { } @Override public String method3(String hi, int amount) { return "Hi" + hi + " " + amount; } } SomeInterface myService = new MyServiceClass(); final ServiceBundle bundle = new ServiceBundleBuilder().setAddress("/root").buildAndStart(); bundle.addService(myService); final ReceiveQueue<Response<Object>> responseReceiveQueue = bundle.responses().receiveQueue(); final SomeInterface myServiceProxy = bundle.createLocalProxy( SomeInterface.class, "myService"); myServiceProxy.method3("hi", 5); bundle.flush(); Sys.sleep(1000); final Response<Object> objectResponse = responseReceiveQueue.pollWait(); objectResponse.address(); puts(objectResponse.body()); ok = "Hihi 5".equals(objectResponse.body()) || die(); } @Test public void callingActualServiceWithReturnDifferentInterfaces() { @RequestMapping("myService") class MyServiceClass implements SomeInterface { @Override public void method1() { } @Override public void method2(String hi, int amount) { } @Override public String method3(String hi, int amount) { return "Hi" + hi + " " + amount; } } SomeInterface myService = new MyServiceClass(); final ServiceBundle bundle = new ServiceBundleBuilder().setAddress("/root").buildAndStart(); bundle.addService(myService); bundle.startReturnHandlerProcessor(); final MyServiceInterfaceForClient myServiceProxy = bundle.createLocalProxy( MyServiceInterfaceForClient.class, "myService"); ok = false; Callback<String> returnHandler = new Callback<String>() { @Override public void accept(String returnValue) { puts("We got", returnValue); ok = "Hihi 5".equals(returnValue); } }; myServiceProxy.method3(returnHandler, "hi", 5); bundle.flush(); Sys.sleep(1000); ok = ok || die(); } //@Test This test randomly fails the build public void callingServicesThatThrowExceptions() { @RequestMapping("myService") class MyServiceClass { public String methodThrowsExceptionIf5(String hi, int amount) { if (amount == 5) { return die(String.class, "Hi " + hi + " " + amount); } else { return "Hi " + hi + " " + amount; } } } MyServiceClass myService = new MyServiceClass(); final ServiceBundle serviceBundle = new ServiceBundleBuilder().setAddress("/root").buildAndStart(); serviceBundle.addService(myService); serviceBundle.startReturnHandlerProcessor(); final ClientInterfaceThrowsException myServiceProxy = serviceBundle.createLocalProxy( ClientInterfaceThrowsException.class, "myService"); ok = false; AtomicBoolean wasError = new AtomicBoolean(); final Callback<String> handler = new Callback<String>() { @Override public void accept(String s) { ok = true; } @Override public void onError(Throwable error) { puts("We got", error.getMessage()); ok = "Hi hi 5".equals(error.getMessage()); wasError.set(true); } }; myServiceProxy.methodThrowsExceptionIf5(handler, "hi", 6); serviceBundle.flush(); Sys.sleep(5000); ok = ok || die(); ok = !wasError.get() || die(); ok = false; wasError.set(false); Sys.sleep(100); myServiceProxy.methodThrowsExceptionIf5(handler, "hi", 5); serviceBundle.flush(); Sys.sleep(2000); ok = wasError.get() || die(); } public interface SomeInterface { void method1(); void method2(String hi, int amount); String method3(String hi, int amount); } public interface MyServiceInterfaceForClient { void method1(); void method2(String hi, int amount); void method3(Callback<String> handler, String hi, int amount); } public interface ClientInterfaceThrowsException { void methodThrowsExceptionIf5(Callback<String> arg, String hi, int amount); } }
/* Copyright 2014 MITRE Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mitre.provenance.capture.linux; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.FilenameFilter; import java.io.IOException; import java.lang.management.ManagementFactory; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.attribute.UserPrincipal; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.logging.Logger; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.mitre.provenance.Metadata; import org.mitre.provenance.PLUSException; import org.mitre.provenance.client.AbstractProvenanceClient; import org.mitre.provenance.client.LocalProvenanceClient; import org.mitre.provenance.client.ProvenanceClient; import org.mitre.provenance.contenthash.ContentHasher; import org.mitre.provenance.contenthash.SHA256ContentHasher; import org.mitre.provenance.db.neo4j.Neo4JPLUSObjectFactory; import org.mitre.provenance.npe.NonProvenanceEdge; import org.mitre.provenance.plusobject.PLUSEdge; import org.mitre.provenance.plusobject.PLUSFile; import org.mitre.provenance.plusobject.PLUSInvocation; import org.mitre.provenance.plusobject.PLUSObject; import org.mitre.provenance.plusobject.PLUSWorkflow; import org.mitre.provenance.plusobject.ProvenanceCollection; import org.mitre.provenance.tools.LRUCache; import org.mitre.provenance.user.User; /** * This class is an operating system monitoring class for UNIX-based operating systems which support the proc filesystem. * For more information about procfs, see http://en.wikipedia.org/wiki/Procfs * * <p>Basically, this polls available OS information about processes that are running, and then saves that information as provenance. * The OS will tell us for example which process IDs (PIDs) have which files open for read and write, and what the command line is * of the application that executed. * r * <p>We have to apply a few basic fingerprinting techniques to avoid logging duplicates. * * <p>This code could doubtless see many improvements, but it's a basic proof of concept for how to collect provenance in real systems. * For many users, this kind of provenance would be seen as too granular, but it can produce some very interesting findings; in * particular, because we use content-bound identifiers on everything that we encounter, this can establish linkages between * different processes that read and use the same files. * * <p>A major weakness of this capture approach is that you can never know when in the process lifecycle to scan a particular PID. * Which assets the process is using vary dramatically (particularly for long-lived processes) depending on when you hit it in * the lifecycle. Improvements should focus around appending in subsequent polls. * * @author moxious */ public class PROCtor { protected static final Logger log = Logger.getLogger(PROCtor.class.getName()); protected String myPID = null; public static final LRUCache<String,PLUSObject> cache = new LRUCache<String,PLUSObject>(1000); protected HashSet<String> pollPIDs = new HashSet<String>(); protected static AbstractProvenanceClient client = new LocalProvenanceClient(); protected SHA256ContentHasher hasher = new SHA256ContentHasher(); public static final String UUID_KEY = "file_uuid"; /** * Signals that an object already exists. * @author david */ public static class ExistsException extends PLUSException { private static final long serialVersionUID = 11233123L; protected PLUSObject o; public ExistsException(PLUSObject obj) { this.o = obj; } public PLUSObject getObject() { return o; } } public void addPID(String pid) { pollPIDs.add(pid); } //HashMap<String,PLUSObject> cache = new HashMap<String,PLUSObject>(); protected static File PROC = new File("/proc"); public PROCtor() throws Exception { myPID = PROCtor.getMyPID(); } public void run(long pollTimeoutMs, int times) throws Exception { int x=0; while(true) { if(times > 0 && x >= times) break; poll(); Thread.sleep(pollTimeoutMs); x++; } } protected List<String> slurpLines(File f) { BufferedReader br = null; ArrayList<String> lines = new ArrayList<String>(); try { br = new BufferedReader(new FileReader(f)); String line = null; while((line = br.readLine()) != null) lines.add(line); return lines; } catch(IOException exc) { return null; } finally { try { br.close(); } catch(IOException e) { ; } } } // End slurpLines /** * Read the complete contents of a file and return them as a string. Simple utility for tiny files. * @param f file to read. * @return the complete text contents */ protected String slurp(File f) { BufferedReader br = null; try { br = new BufferedReader(new FileReader(f)); StringBuffer b = new StringBuffer(""); String line = null; while((line = br.readLine()) != null) b.append(line); return b.toString(); } catch(IOException ioe) { return null; } finally { try { br.close(); } catch (IOException e) { ; } } } /** * Computes a special identifier for files based on their path and when they were last modified. This is not a content-bound identifier, * but can be used in case a duplicate file has been seen on the same system. * @param f the file to use * @return a string identifier * @throws NoSuchAlgorithmException * @throws IOException */ protected String getIDForFile(File f) throws NoSuchAlgorithmException, IOException { // Unique ID for a file based on its absolute pathname, and last modified date. // When this hash value changes, you know it's a different file. String stamp = f.getCanonicalPath() + "-" + f.lastModified(); return ContentHasher.formatAsHexString(hasher.hash(new ByteArrayInputStream(stamp.getBytes()))); } /** * Polls through all available items in the proc fs, and processes them individually. * @throws IOException * @throws NoSuchAlgorithmException * @throws PLUSException */ protected void poll() throws IOException, NoSuchAlgorithmException, PLUSException { String[] PIDs = PROC.list(new FilenameFilter() { public boolean accept(File dir, String name) { // Match only filenames that are entirely numeric. // These filenames correspond to system PIDs (process IDs) return name.matches("^[0-9]+$"); } }); for(String pid : PIDs) { if(pid.equals(myPID)) continue; // Don't process myself. if(pollPIDs.isEmpty() || pollPIDs.contains(pid)) processPID(new File(PROC, pid)); } } protected ProcFDInfo getFDInfo(File procPID, String fd) { File fdInfoFile = new File(new File(procPID, "fdinfo"), fd); if(!fdInfoFile.exists()) return null; List<String> lines = slurpLines(fdInfoFile); String flags = null; String pos = null; for(String line : lines) { if(line.indexOf(':') != -1) { String [] toks = line.split("[ \\t]+"); if(toks[0].contains("pos")) pos = toks[1]; else if(toks[0].contains("flags")) flags = toks[1]; else log.warning("Unexpected line '" + line + "' in " + fdInfoFile.getAbsolutePath()); } else // Ignore other lines, (inotify, tfd, eventfd-count, others) continue; if(flags != null && pos != null) break; } // Shouldn't happen... if(pos == null || flags == null) return null; return new ProcFDInfo(pos, flags); } /** * Processes a PID identified by a particular /proc filesystem path, and creates the necessary provenance objects. * @param procPID * @throws IOException * @throws NoSuchAlgorithmException * @throws PLUSException */ protected void processPID(File procPID) throws IOException, NoSuchAlgorithmException, PLUSException { if(!procPID.exists()) { log.warning("PID " + procPID + " doesn't exist."); return; } PLUSInvocation inv = createOrRetrieveInvocation(procPID); if(inv == null) return; String [] fileDescriptors = null; File fds = new File(procPID, "fd"); fileDescriptors = fds.list(); if(fileDescriptors == null) { return; } // No permissions here. ProvenanceCollection pcol = new ProvenanceCollection(); boolean revisiting = false; if(client.exists(inv) != null) revisiting = true; else pcol.addNode(inv); List<String> inputs = new ArrayList<String>(); List<String> outputs = new ArrayList<String>(); List<String> related = new ArrayList<String>(); for(String fdName : fileDescriptors) { File fdFile = new File(fds, fdName); // We get the canonical file to resolve the procfs symlink, so that // we're gathering metadata about the file, and not a symlink to the file. File canonical = fdFile.getCanonicalFile(); boolean previouslyWritten = false; PLUSObject fdObj = null; // This is what will let us know whether the file was open for input/output, or whatever. ProcFDInfo fdInfo = getFDInfo(procPID, fdName); if(fdInfo == null) { log.warning("Couldn't get fdInfo for " + procPID + "/fdinfo/" + fdName); continue; } try { fdObj = createOnlyIfNew(canonical); } catch(ExistsException e) { // There is a valid file here, but we've already seen it. That means don't add it // to the collection or try to re-write it. previouslyWritten = true; fdObj = e.getObject(); } if(fdObj == null) continue; if(!previouslyWritten) { fdObj.getMetadata().put("unix:fd", fdName); pcol.addNode(fdObj); } // It's an output if we're appending to it, creating it, writing only to it, or truncating it. if(fdInfo.O_APPEND() || fdInfo.O_CREAT() || fdInfo.O_WRONLY() || fdInfo.O_TRUNC()) outputs.add(""+fdObj.getMetadata().get(UUID_KEY)); // It's an input if we're read only. else if(fdInfo.O_RDONLY()) inputs.add(""+fdObj.getMetadata().get(UUID_KEY)); else if(fdInfo.O_RDWR()) related.add(""+fdObj.getMetadata().get(UUID_KEY)); else { log.warning("Ambiguous mode for " + procPID + "/fdinfo/" + fdName + ": " + fdInfo.getFlags()); } if(fdFile.canWrite()) outputs.add(""+fdObj.getMetadata().get(UUID_KEY)); else inputs.add(""+fdObj.getMetadata().get(UUID_KEY)); String file_uuid = ""+fdObj.getMetadata().get(UUID_KEY); if(previouslyWritten) pcol.addNonProvenanceEdge(new NonProvenanceEdge(fdObj, file_uuid, UUID_KEY)); } for(String id : inputs) { PLUSObject o = (PLUSObject)cache.get(id); if(o != null) pcol.addEdge(new PLUSEdge(o, inv)); } for(String id : outputs) { PLUSObject o = (PLUSObject)cache.get(id); if(o != null) pcol.addEdge(new PLUSEdge(inv, o)); } for(String id : related) { // Just mark these as "contributing". PLUSObject o = (PLUSObject)cache.get(id); if(o != null) pcol.addEdge(new PLUSEdge(o, inv, PLUSWorkflow.DEFAULT_WORKFLOW, PLUSEdge.EDGE_TYPE_CONTRIBUTED)); } boolean written = false; if(pcol.countNodes() > 0) written = client.report(pcol); if(written) log.info((revisiting ? "REVISITED" : "NEW") + ": " + inv.getMetadata().get("cmdline") + " PID " + inv.getMetadata().get("pid") + " => " + inputs.size() + " inputs, " + outputs.size() + " outputs. Total written=" + written); } public boolean isSymlink(File file) throws IOException { if(file == null) return false; File canon; if (file.getParent() == null) canon = file; else { File canonDir = file.getParentFile().getCanonicalFile(); canon = new File(canonDir, file.getName()); } return !canon.getCanonicalFile().equals(canon.getAbsoluteFile()); } /** * Return the PID of the process that PROCtor is running underneath. * @return */ public static String getMyPID() { String pidStr = ManagementFactory.getRuntimeMXBean().getName(); int idx = pidStr.indexOf("@"); if(idx == -1) return pidStr; else return pidStr.substring(0, idx); } /** * Get or create a new PLUSInvocation on the basis of a proc PID file, e.g. /proc/56 (pid 56) * Returns null for insufficient permissions, or when you shouldn't log a particular pid. (For * example, this program will not log its own run) */ public PLUSInvocation createOrRetrieveInvocation(File procPID) throws NoSuchAlgorithmException, IOException { String procFileID = getIDForFile(procPID); if(procFileID == null) return null; String pid = procPID.getName(); if(pid.equals(myPID)) return null; // Don't log myself. String [] children = procPID.list(); if(children == null) return null; // No permissions. if(cache.containsKey(procFileID)) return (PLUSInvocation)cache.get(procFileID); try { ProvenanceCollection results = Neo4JPLUSObjectFactory.loadBySingleMetadataField(User.DEFAULT_USER_GOD, UUID_KEY, procFileID); if(results != null && results.countNodes() > 0) { PLUSInvocation i = (PLUSInvocation)results.getNodes().toArray()[0]; cache.put(procFileID, i); return i; } } catch(PLUSException exc) { exc.printStackTrace(); } long lmod = procPID.lastModified(); String cmdline = slurp(new File(procPID, "cmdline")); File exe = new File(procPID, "exe").getCanonicalFile(); File cwd = new File(procPID, "cwd").getCanonicalFile(); PLUSInvocation inv = new PLUSInvocation(exe.getCanonicalPath()); inv.getMetadata().put("pid", pid); inv.getMetadata().put("cwd", cwd.getCanonicalPath()); inv.getMetadata().put("cmdline", cmdline); inv.getMetadata().put("started", ""+lmod); inv.getMetadata().put(UUID_KEY, procFileID); inv.getMetadata().put(Metadata.CONTENT_HASH_SHA_256, procFileID); Path path = Paths.get(procPID.getAbsolutePath()); UserPrincipal owner = Files.getOwner(path); String username = owner.getName(); try { inv.setOwner(Neo4JPLUSObjectFactory.getActor(username, true)); } catch(PLUSException exc) { log.warning("Failed to set owner for " + inv + ": " + exc.getMessage()); } cache.put(procFileID, inv); // Cache this so we don't go back over it. return inv; } /** * Create a PLUSObject corresponding to a given file, only if that file is new. Note that throwing an * ExistsException is not an error condition, to signal to the caller that provenance already exists. * @param f the file to inspect. * @return a PLUSObject if it is new. * @throws ExistsException if provenance already exists for that object, this will be thrown. * @throws NoSuchAlgorithmException on error * @throws IOException on error. */ public PLUSObject createOnlyIfNew(File f) throws ExistsException, NoSuchAlgorithmException, IOException { if(f == null || !f.exists()) return null; if(!f.isFile()) return null; // Don't log things like sockets right now. String id = getIDForFile(f); if(id == null) { log.warning("Couldn't compute file id for " + f); return null; } if(cache.containsKey(id)) throw new ExistsException(cache.get(id)); ProvenanceCollection results = null; try { results = Neo4JPLUSObjectFactory.loadBySingleMetadataField(User.DEFAULT_USER_GOD, UUID_KEY, id, 1); } catch(PLUSException exc) { exc.printStackTrace(); throw new RuntimeException(exc); } if(results != null && results.countNodes() > 0) { PLUSObject o = (PLUSObject) results.getNodes().toArray()[0]; cache.put(id, o); throw new ExistsException(o); } PLUSFile pf = new PLUSFile(f); pf.getMetadata().put(UUID_KEY, id); if(id != null) cache.put(id, pf); if(f.isFile()) { long fileSize = 0; try { fileSize = f.length(); } catch(Exception exc) { exc.printStackTrace(); return pf; } // Best effort to hash the content. if(fileSize < 1024 * 1024) { FileInputStream fis = null; try { fis = new FileInputStream(f); String sha256hash = ContentHasher.formatAsHexString(hasher.hash(fis)); fis.close(); pf.getMetadata().put(Metadata.CONTENT_HASH_SHA_256, sha256hash); } catch(IOException exc) { ; } finally { if(fis != null) try { fis.close(); } catch(Exception e) { ; } } } } return pf; } public static Options makeCLIOptions() { Options options = new Options(); options.addOption(OptionBuilder.withArgName("pid") .hasArg() .isRequired(false) .withDescription("If specified, capture only provenance for this single PID and its children.") .create("pid")); options.addOption(OptionBuilder.withArgName("once") .hasArg(false) .isRequired(false) .withDescription("Poll the PID fs once, and then quit") .create("once")); options.addOption(OptionBuilder.withArgName("poll") .hasArg(false) .isRequired(false) .withDescription("Poll continuously until user interrupts.") .create("poll")); return options; } public static void usage() { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("PROCtor", makeCLIOptions()); } /** * If provided with arguments, the program processes only those PIDs. If given no arguments, it starts in polling mode. */ public static void main(String [] args) throws Exception { ProvenanceClient.instance = client; CommandLineParser parser = new GnuParser(); if(!PROC.exists()) { log.severe("This utility is intended to run on Linux systems with a PROC filesystem. You do not appear to have one (or it is not readable)"); System.exit(1); } try { CommandLine line = parser.parse(makeCLIOptions(), args); String pidArg = line.getOptionValue("pid"); boolean once = line.hasOption("once"); boolean poll = line.hasOption("poll"); System.out.println("Once " + once + " poll " + poll); PROCtor p = new PROCtor(); if(once && poll) { System.err.println("You can't specify both to run once and to poll."); usage(); System.exit(1); } // Default is to poll if user hasn't otherwise specified. if(!poll && !once) poll = true; if(pidArg != null) { System.out.println("PID=" + pidArg); String[] pids = pidArg.split(" +"); for(String pid : pids) { p.addPID(pid); } } if(poll) p.run(5000, -1); else p.run(5000, 1); } catch(ParseException exc) { usage(); System.exit(1); } } } // End PROCtor
package frostillicus.xsp.util; import javax.faces.application.Application; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; import javax.faces.el.ValueBinding; import org.openntf.domino.*; import org.openntf.domino.utils.Factory; import org.openntf.domino.utils.XSPUtil; import com.ibm.domino.osgi.core.context.ContextInfo; import com.ibm.xsp.component.UIViewRootEx2; import lotus.domino.NotesException; import java.io.*; import java.net.HttpURLConnection; import java.net.URL; import java.util.*; public enum FrameworkUtils { ; public static Database getDatabase(final String server, final String filePath) { Map<String, Object> requestScope = getRequestScope(); String key = "database-" + server + "!!" + filePath; if (!requestScope.containsKey(key)) { Session session = getSession(); requestScope.put(key, session.getDatabase(server, filePath)); } return (Database) requestScope.get(key); } public static Session getSession() { if(isFaces()) { Object session = resolveVariable("session"); if(!(session instanceof Session)) { session = Factory.getWrapperFactory().fromLotus((lotus.domino.Session)session, Session.SCHEMA, null); } return (Session)session; } else { lotus.domino.Session lotusSession = ContextInfo.getUserSession(); Session session; if(lotusSession == null) { session = Factory.getSession(); } else { session = Factory.fromLotus(lotusSession, Session.SCHEMA, null); } session.setConvertMime(false); return session; } } public static Session getSessionAsSigner() { if(isFaces()) { return XSPUtil.getCurrentSessionAsSigner(); } else { return getSession(); } } public static Database getDatabase() { if(isFaces()) { lotus.domino.Database lotusDatabase = (lotus.domino.Database)resolveVariable("database"); Database database; if(lotusDatabase instanceof Database) { database = (Database)lotusDatabase; } else { try { Session session; lotus.domino.Session lotusSession = lotusDatabase.getParent(); if(lotusSession instanceof Session) { session = (Session)lotusSession; } else { session = Factory.getWrapperFactory().fromLotus(lotusSession, Session.SCHEMA, null); } database = Factory.getWrapperFactory().fromLotus(lotusDatabase, Database.SCHEMA, session); } catch(NotesException ne) { throw new RuntimeException(ne); } } return database; } else { Session session = getSession(); lotus.domino.Database lotusDatabase = ContextInfo.getUserDatabase(); Database database; if(lotusDatabase == null) { database = session.getCurrentDatabase(); } else { database = Factory.fromLotus(lotusDatabase, Database.SCHEMA, session); } return database; } } @SuppressWarnings("unchecked") public static Map<String, Object> getApplicationScope() { if(isFaces()) { return (Map<String, Object>)resolveVariable("applicationScope"); } else { return new HashMap<String, Object>(); } } @SuppressWarnings("unchecked") public static Map<String, Object> getSessionScope() { if(isFaces()) { return (Map<String, Object>)resolveVariable("applicationScope"); } else { return new HashMap<String, Object>(); } } @SuppressWarnings("unchecked") public static Map<String, Object> getViewScope() { if(isFaces()) { return (Map<String, Object>)resolveVariable("viewScope"); } else { return new HashMap<String, Object>(); } } @SuppressWarnings("unchecked") public static Map<String, Object> getRequestScope() { if(isFaces()) { return (Map<String, Object>)resolveVariable("requestScope"); } else { return new HashMap<String, Object>(); } } @SuppressWarnings("unchecked") public static Map<Object, Object> getFlashScope() { if(isFaces()) { return (Map<Object, Object>)resolveVariable("flashScope"); } else { return new HashMap<Object, Object>(); } } @SuppressWarnings("unchecked") public static Map<String, String> getParam() { if(isFaces()) { return (Map<String, String>)resolveVariable("param"); } else { return new HashMap<String, String>(); } } public static boolean isFaces() { return FacesContext.getCurrentInstance() != null; } public static Object getBindingValue(final String ref) { if(isFaces()) { FacesContext context = FacesContext.getCurrentInstance(); Application application = context.getApplication(); return application.createValueBinding(ref).getValue(context); } else { return null; } } public static void setBindingValue(final String ref, final Object newObject) { if(isFaces()) { FacesContext context = FacesContext.getCurrentInstance(); Application application = context.getApplication(); ValueBinding binding = application.createValueBinding(ref); binding.setValue(context, newObject); } } public static Object resolveVariable(final String varName) { if(isFaces()) { FacesContext context = FacesContext.getCurrentInstance(); return context.getApplication().getVariableResolver().resolveVariable(context, varName); } else { return null; } } public static String getUserName() { return getSession().getEffectiveUserName(); } public static UIViewRootEx2 getViewRoot() { return (UIViewRootEx2) resolveVariable("view"); } public static String pluralize(final String input) { if (input.endsWith("s")) { return input + "es"; } else if (input.endsWith("y")) { return input.substring(0, input.length() - 2) + "ies"; } return input + "s"; } public static String singularize(final String input) { if (input.endsWith("ses")) { return input.substring(0, input.length() - 2); } else if (input.endsWith("ies")) { return input.substring(0, input.length() - 3) + "y"; } else if (input.endsWith("s")) { return input.substring(0, input.length() - 1); } return input; } public String fetchURL(final String urlString) throws Exception { URL url = new URL(urlString); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestProperty("User-Agent", "Firefox/2.0"); BufferedReader in = new BufferedReader(new InputStreamReader((InputStream) conn.getContent())); StringWriter resultWriter = new StringWriter(); String inputLine; while ((inputLine = in.readLine()) != null) { resultWriter.write(inputLine); } in.close(); return resultWriter.toString().replace("<HTTP-EQUIV", "<meta http-equiv"); } public static String xor(final String input, final Vector<?> key) { StringBuilder output = new StringBuilder(); for (int i = 0; i < input.length(); i++) { int character = input.codePointAt(i); int keyNode = ((Double) key.get(i % key.size())).intValue(); int onePass = character ^ keyNode; output.append((char) onePass); } return output.toString(); } public static String xmlEncode(final String text) { StringBuilder result = new StringBuilder(); for (int i = 0; i < text.length(); i++) { char currentChar = text.charAt(i); if (!((currentChar >= 'a' && currentChar <= 'z') || (currentChar >= 'A' && currentChar <= 'Z') || (currentChar >= '0' && currentChar <= '9'))) { result.append("&#" + (int) currentChar + ";"); } else { result.append(currentChar); } } return result.toString(); } public static String strLeft(final String input, final String delimiter) { return input.substring(0, input.indexOf(delimiter)); } public static String strRight(final String input, final String delimiter) { return input.substring(input.indexOf(delimiter) + delimiter.length()); } public static String strLeftBack(final String input, final String delimiter) { return input.substring(0, input.lastIndexOf(delimiter)); } public static String strLeftBack(final String input, final int chars) { return input.substring(0, input.length() - chars); } public static String strRightBack(final String input, final String delimiter) { return input.substring(input.lastIndexOf(delimiter) + delimiter.length()); } public static String strRightBack(final String input, final int chars) { return input.substring(input.length() - chars); } public static List<String> toStringList(final Object columnValue) { List<String> result = new Vector<String>(); if (columnValue.getClass().getName().equals("java.util.Vector")) { for (Object reader : (Vector<?>) columnValue) { result.add((String) reader); } } else if (((String) columnValue).length() > 0) { result.add((String) columnValue); } return result; } public static List<Integer> toIntegerList(final Object columnValue) { List<Integer> result = new Vector<Integer>(); if (columnValue.getClass().getName().equals("java.util.Vector")) { for (Object element : (Vector<?>) columnValue) { result.add(((Double) element).intValue()); } } else { result.add(((Double) columnValue).intValue()); } return result; } public static int toInteger(final Object columnValue) { int result = 0; if (columnValue.getClass().getName().equals("java.lang.String")) { result = 0; } else { result = ((Double) columnValue).intValue(); } return result; } public static Date toDate(final Object columnValue) throws NotesException { return ((DateTime) columnValue).toJavaDate(); } public static boolean isSpecialText(final String specialText) { return specialText.contains((char)127 + ""); } public static String specialTextDecode(final String specialText, final ViewEntry viewEntry) throws NotesException { String result = specialText; //if(true) return result; String specialStart = (char)127 + ""; String specialEnd = (char)160 + ""; // First, find the start and end of the special text int start_pos = result.indexOf(specialStart); int end_pos = result.indexOf(specialEnd); // This is just in case things get out of hand - no need to have broken code // result in an infinite loop on the server int loopStopper = 1; while (start_pos > -1 && end_pos > start_pos && loopStopper < 100) { loopStopper++; // "working" holds the text we're going to replace, minus the delimiters // "result" holds the text we're going to replace working and the delimiters with String working = result.substring(start_pos + 1, end_pos); String midResult = ""; String[] choices; int offset, length, parameterCount; switch (working.charAt(0)) { case 'C': // @DocChildren parameterCount = Integer.parseInt(working.substring(1, 2)); switch (parameterCount) { case 0: midResult = viewEntry.getChildCount() + ""; break; case 1: midResult = strRight(working, "=").replaceAll("%", viewEntry.getChildCount() + ""); break; case 2: // For convenience, I'll break the string into each option, even if I only use one choices = new String[] { "", "" }; // I can cheat a bit on the first one to find the length offset = 0; length = Integer.parseInt(strLeft(strRight(working, ";"), "=")); choices[0] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); offset = working.indexOf("=", offset) + 1 + length; choices[1] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); if (viewEntry.getChildCount() == 0) { midResult = choices[0].replaceAll("%", "0"); } else { midResult = choices[1].replaceAll("%", viewEntry.getChildCount() + ""); } break; case 3: // For convenience, I'll break the string into each option, even if I only use one choices = new String[] { "", "", "" }; // I can cheat a bit on the first one to find the length offset = 0; length = Integer.parseInt(strLeft(strRight(working, ";"), "=")); choices[0] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); offset = working.indexOf("=", offset) + 2 + length; length = Integer.parseInt(working.substring(offset, working.indexOf("=", offset))); choices[1] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); offset = working.indexOf("=", offset) + 2 + length; length = Integer.parseInt(working.substring(offset, working.indexOf("=", offset))); choices[2] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); if (viewEntry.getChildCount() == 0) { midResult = choices[0].replaceAll("%", "0"); } else if (viewEntry.getChildCount() == 1) { midResult = choices[1].replaceAll("%", "1"); } else { midResult = choices[2].replaceAll("%", viewEntry.getChildCount() + ""); } break; } break; case 'D': // @DocDescendants parameterCount = Integer.parseInt(working.substring(1, 2)); switch (parameterCount) { case 0: midResult = viewEntry.getDescendantCount() + ""; break; case 1: midResult = strRight(working, "=").replaceAll("%", viewEntry.getDescendantCount() + ""); break; case 2: // For convenience, I'll break the string into each option, even if I only use one choices = new String[] { "", "" }; // I can cheat a bit on the first one to find the length offset = 0; length = Integer.parseInt(strLeft(strRight(working, ";"), "=")); choices[0] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); offset = working.indexOf("=", offset) + 1 + length; choices[1] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); if (viewEntry.getDescendantCount() == 0) { midResult = choices[0].replaceAll("%", "0"); } else { midResult = choices[1].replaceAll("%", viewEntry.getDescendantCount() + ""); } break; case 3: // For convenience, I'll break the string into each option, even if I only use one choices = new String[] { "", "", "" }; // I can cheat a bit on the first one to find the length offset = 0; length = Integer.parseInt(strLeft(strRight(working, ";"), "=")); choices[0] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); offset = working.indexOf("=", offset) + 2 + length; length = Integer.parseInt(working.substring(offset, working.indexOf("=", offset))); choices[1] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); offset = working.indexOf("=", offset) + 2 + length; length = Integer.parseInt(working.substring(offset, working.indexOf("=", offset))); choices[2] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); if (viewEntry.getDescendantCount() == 0) { midResult = choices[0].replaceAll("%", "0"); } else if (viewEntry.getDescendantCount() == 1) { midResult = choices[1].replaceAll("%", "1"); } else { midResult = choices[2].replaceAll("%", viewEntry.getDescendantCount() + ""); } break; } break; case 'H': // @DocLevel midResult = (viewEntry.getIndentLevel() + 1) + ""; break; case 'A': // @DocNumber /* Three forms: * @DocNumber - all levels separated by "." * @DocNumber("") - only the least significant level * @DocNumber(char) - all levels separated by char. Note: the formula accepts a multi-character string, but * displays it as just the string, not the doc level */ parameterCount = Integer.parseInt(working.substring(1, 2)); switch (parameterCount) { case 0: midResult = viewEntry.getPosition('.'); break; case 1: String delimiter = strRight(working, "="); if (delimiter.length() == 0) { midResult = strRightBack(viewEntry.getPosition('.'), "."); } else if (delimiter.length() > 1) { // Mimic formula's weird behavior for multi-character strings midResult = delimiter; } else { midResult = viewEntry.getPosition(delimiter.charAt(0)); } break; } break; case 'J': // @DocParentNumber // Same as above, just for the parent, so do the same thing and chomp off the last bit if (viewEntry.getIndentLevel() == 0) { midResult = ""; } else { parameterCount = Integer.parseInt(working.substring(1, 2)); switch (parameterCount) { case 0: midResult = strLeftBack(viewEntry.getPosition('.'), "."); break; case 1: String delimiter = strRight(working, "="); if (delimiter.length() == 0) { midResult = strRightBack(strLeftBack(viewEntry.getPosition('.'), "."), "."); } else if (delimiter.length() > 1) { // Mimic formula's weird behavior for multi-character strings midResult = delimiter; } else { midResult = strLeftBack(viewEntry.getPosition(delimiter.charAt(0)), delimiter); } break; } } break; case 'B': // @DocSiblings midResult = (viewEntry.getSiblingCount()) + ""; break; case 'I': // @IsCategory /* Three forms: * @IsCategory - "*" if it's a category, "" otherwise * @IsCategory(string) - string if it's a category, "" otherwise * @IsCategory(string1, string 2) - string1 if it's a category, string2 otherwise */ parameterCount = Integer.parseInt(working.substring(1, 2)); switch (parameterCount) { case 0: midResult = viewEntry.isCategory() ? "*" : ""; break; case 1: midResult = viewEntry.isCategory() ? strRight(working, "=") : ""; break; case 2: // For convenience, I'll break the string into each option, even if I only use one choices = new String[] { "", "" }; offset = 0; length = Integer.parseInt(strLeft(strRight(working, ";"), "=")); choices[0] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); offset = working.indexOf("=", offset) + 2 + length; length = Integer.parseInt(working.substring(offset, working.indexOf("=", offset))); choices[1] = working.substring(working.indexOf("=", offset) + 1, working.indexOf("=", offset) + 1 + length); midResult = viewEntry.isCategory() ? choices[0] : choices[1]; break; } break; case 'G': // @IsExpandable // This is a UI function that changes based on the expanded/collapsed state of the entry in // the Notes client. This kind of behavior could be better done without @functions on the web, // so it's not really worth implementing midResult = ""; break; default: midResult = working; break; } result = result.replaceAll(specialStart + working + specialEnd, midResult); start_pos = result.indexOf(specialStart); end_pos = result.indexOf(specialEnd); } return result; } @SuppressWarnings("unchecked") public static void flashMessage(final String type, final String message) { Map<Object, Object> flashScope = getFlashScope(); List<Object> messages = (List<Object>) flashScope.get(type + "Messages"); if (messages == null) { messages = new ArrayList<Object>(); flashScope.put(type + "Messages", messages); } messages.add(message); } public static void addMessage(final String summary) { if(isFaces()) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(summary)); } } public static void addMessage(final String summary, final String detail) { if(isFaces()) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(summary, detail)); } } public static void addMessage(final FacesMessage.Severity severity, final String summary, final String detail) { if(isFaces()) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(severity, summary, detail)); } } /** * @param url * The URL of a resource in the application in the style used for XSP component URLs, e.g. "/foo.js" * @return * The server-relative URL of the resource inside the application */ public String getResourceURL(final String url) { FacesContext facesContext = FacesContext.getCurrentInstance(); return facesContext.getExternalContext().encodeResourceURL(facesContext.getApplication().getViewHandler().getResourceURL(facesContext, url)); } public String getActionURL(final String url) { FacesContext facesContext = FacesContext.getCurrentInstance(); return facesContext.getExternalContext().encodeActionURL(facesContext.getApplication().getViewHandler().getActionURL(facesContext, url)); } }
package org.ovirt.engine.core.bll; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyListOf; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import static org.ovirt.engine.core.utils.MockConfigRule.mockConfig; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Spy; import org.mockito.invocation.InvocationOnMock; import org.mockito.runners.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import org.ovirt.engine.core.bll.interfaces.BackendInternal; import org.ovirt.engine.core.bll.snapshots.SnapshotsValidator; import org.ovirt.engine.core.common.action.RunVmParams; import org.ovirt.engine.core.common.businessentities.Disk; import org.ovirt.engine.core.common.businessentities.DiskImage; import org.ovirt.engine.core.common.businessentities.IVdsAsyncCommand; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VMStatus; import org.ovirt.engine.core.common.businessentities.VmDevice; import org.ovirt.engine.core.common.businessentities.VmDeviceId; import org.ovirt.engine.core.common.businessentities.VmStatic; import org.ovirt.engine.core.common.businessentities.storage_domains; import org.ovirt.engine.core.common.config.ConfigValues; import org.ovirt.engine.core.common.interfaces.VDSBrokerFrontend; import org.ovirt.engine.core.common.utils.VmDeviceType; import org.ovirt.engine.core.common.vdscommands.VDSCommandType; import org.ovirt.engine.core.common.vdscommands.VDSParametersBase; import org.ovirt.engine.core.common.vdscommands.VDSReturnValue; import org.ovirt.engine.core.common.vdscommands.VdsAndVmIDVDSParametersBase; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.NGuid; import org.ovirt.engine.core.compat.Version; import org.ovirt.engine.core.dal.VdcBllMessages; import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogableBaseMockUtils; import org.ovirt.engine.core.dao.DiskDao; import org.ovirt.engine.core.dao.StorageDomainDAO; import org.ovirt.engine.core.dao.VmDAO; import org.ovirt.engine.core.dao.VmDeviceDAO; import org.ovirt.engine.core.utils.MockConfigRule; import org.ovirt.engine.core.utils.vmproperties.VmPropertiesUtils; @RunWith(MockitoJUnitRunner.class) public class RunVmCommandTest { @Rule public static MockConfigRule mcr = new MockConfigRule( mockConfig(ConfigValues.VdsSelectionAlgorithm, "General", "0"), mockConfig(ConfigValues.PredefinedVMProperties, "3.0", "0"), mockConfig(ConfigValues.UserDefinedVMProperties, "3.0", "0") ); /** * The command under test. */ private RunVmCommand<RunVmParams> command; @Mock private VDSBrokerFrontend vdsBrokerFrontend; @Mock private VmDAO vmDAO; @Spy private final VmRunHandler vmRunHandler = VmRunHandler.getInstance(); @Mock private BackendInternal backend; private static final String ISO_PREFIX = "iso://"; private static final String ACTIVE_ISO_PREFIX = "/rhev/data-center/mnt/some_computer/f6bccab4-e2f5-4e02-bba0-5748a7bc07b6/images/11111111-1111-1111-1111-111111111111"; private static final String INACTIVE_ISO_PREFIX = ""; public void mockBackend() { doReturn(backend).when(command).getBackend(); doReturn(backend).when(vmRunHandler).getBackend(); VDSReturnValue vdsReturnValue = new VDSReturnValue(); vdsReturnValue.setReturnValue(true); when(vdsBrokerFrontend.RunVdsCommand(any(VDSCommandType.class), any(VDSParametersBase.class))).thenReturn(vdsReturnValue); when(backend.getResourceManager()).thenReturn(vdsBrokerFrontend); // Set Valid Iso Prefix setIsoPrefixVDSMethod(ACTIVE_ISO_PREFIX); // Set create Vm. setCreateVmVDSMethod(); } /** * Set create VM to return VM with status Up. */ private void setCreateVmVDSMethod() { VDSReturnValue returnValue = new VDSReturnValue(); returnValue.setReturnValue(VMStatus.Up); when(backend.getResourceManager().RunAsyncVdsCommand(eq(VDSCommandType.CreateVm), any(VdsAndVmIDVDSParametersBase.class), any(IVdsAsyncCommand.class))).thenReturn(returnValue); } private static DiskImage createImage() { final DiskImage diskImage = new DiskImage(); diskImage.setId(Guid.NewGuid()); diskImage.setstorage_ids(new ArrayList<Guid>(Arrays.asList(new Guid()))); return diskImage; } private static VmDevice createDiskVmDevice(final DiskImage diskImage) { final VmDevice vmDevice = new VmDevice(); vmDevice.setIsPlugged(true); vmDevice.setId(new VmDeviceId(diskImage.getId(), Guid.NewGuid())); return vmDevice; } /** * Set the Iso prefix. * * @param isoPrefix * - Valid Iso patch or blank (when the Iso is not active. */ private void setIsoPrefixVDSMethod(final String isoPrefix) { doAnswer(new Answer<String>() { @Override public String answer(InvocationOnMock invocation) throws Throwable { return ImagesHandler.cdPathWindowsToLinux(invocation.getArguments()[0].toString(), isoPrefix); } }).when(command).cdPathWindowsToLinux(anyString()); } @Test public void validateSimpleInitrdAndKernelName() throws Exception { String Initrd = "/boot/initrd.initrd"; String Kernel = "/boot/kernel.image"; VM vm = createVmForTesting(Initrd, Kernel); assertEquals(vm.getinitrd_url(), Initrd); assertEquals(vm.getkernel_url(), Kernel); } @Test public void validateIsoPrefix() throws Exception { String initrd = "initrd"; String kernel = "kernel"; VM vm = createVmForTesting(ISO_PREFIX + initrd, ISO_PREFIX + kernel); assertEquals(vm.getinitrd_url(), ACTIVE_ISO_PREFIX + "/" + initrd); assertEquals(vm.getkernel_url(), ACTIVE_ISO_PREFIX + "/" + kernel); } @Test public void validateIsoPrefixForKernelAndNoPrefixForInitrd() throws Exception { String initrd = "initrd"; String kernel = "kernel"; VM vm = createVmForTesting(initrd, ISO_PREFIX + kernel); assertEquals(vm.getinitrd_url(), initrd); assertEquals(vm.getkernel_url(), ACTIVE_ISO_PREFIX + "/" + kernel); } @Test public void validateIsoPrefixForInitrdAndNoPrefixForKernel() throws Exception { String initrd = "initrd"; String kernel = "kernel"; VM vm = createVmForTesting(ISO_PREFIX + initrd, kernel); assertEquals(vm.getinitrd_url(), ACTIVE_ISO_PREFIX + "/" + initrd); assertEquals(vm.getkernel_url(), kernel); } @Test public void validateIsoPrefixNameForKernelAndNullForInitrd() throws Exception { String kernel = "kernel"; VM vm = createVmForTesting(null, ISO_PREFIX + kernel); assertEquals(vm.getinitrd_url(), null); assertEquals(vm.getkernel_url(), ACTIVE_ISO_PREFIX + "/" + kernel); } @Test public void validateIsoPrefixCaseSensitive() throws Exception { String initrd = "ISO://"; VM vm = createVmForTesting(initrd, null); assertEquals(vm.getinitrd_url(), ""); } @Test public void validateIsoPrefixForOnlyIsoPrefixInKernelAndInitrd() throws Exception { String initrd = ISO_PREFIX; String kernelUrl = ISO_PREFIX; VM vm = createVmForTesting(initrd, kernelUrl); assertEquals(vm.getinitrd_url(), ""); assertEquals(vm.getkernel_url(), ""); } @Test public void checkIsoPrefixForNastyCharacters() throws Exception { String initrd = "@#$!"; String kernelUrl = " "; VM vm = createVmForTesting(initrd, kernelUrl); assertEquals(vm.getinitrd_url(), initrd); assertEquals(vm.getkernel_url(), kernelUrl); } @Test public void validateIsoPrefixNameForInitrdAndNullForKernel() throws Exception { String initrd = "initrd"; VM vm = createVmForTesting(ISO_PREFIX + initrd, null); assertEquals(vm.getinitrd_url(), ACTIVE_ISO_PREFIX + "/" + initrd); assertEquals(vm.getkernel_url(), null); } @Test public void validateIsoPrefixWhenNoActiveIso() throws Exception { // Set Valid Iso Prefix setIsoPrefixVDSMethod(INACTIVE_ISO_PREFIX); String initrd = "initrd"; VM vm = createVmForTesting(ISO_PREFIX + initrd, null); assertEquals(vm.getinitrd_url(), INACTIVE_ISO_PREFIX + "/" + initrd); } @Test public void validateIsoPrefixWithTrippleSlash() throws Exception { String initrd = ISO_PREFIX + "/"; VM vm = createVmForTesting(initrd, null); assertEquals(vm.getinitrd_url(), ACTIVE_ISO_PREFIX + "/"); } @Test public void validateIsoPrefixInTheMiddleOfTheInitrdAndKerenelName() throws Exception { String initrd = "initrd " + ISO_PREFIX; String kernelUrl = "kernelUrl " + ISO_PREFIX; VM vm = createVmForTesting(initrd, kernelUrl); assertEquals(vm.getinitrd_url(), initrd); assertEquals(vm.getkernel_url(), kernelUrl); } @Test public void validateInitrdWithSlashOnly() throws Exception { String initrd = "/"; VM vm = createVmForTesting(initrd, null); assertEquals(vm.getinitrd_url(), "/"); } @Test public void validateIsoPrefixWithBackSlash() throws Exception { String initrd = "iso:\\"; VM vm = createVmForTesting(initrd, null); assertEquals(vm.getinitrd_url(), "iso:\\"); } @Test public void validateBootPrefixForInitrdAndKernelImage() throws Exception { String initrd = "/boot"; String kernelImage = "/boot"; VM vm = createVmForTesting(initrd, kernelImage); assertEquals(vm.getinitrd_url(), initrd); assertEquals(vm.getkernel_url(), kernelImage); } @Test public void validateInitrdAndKernelImageWithOneCharacter() throws Exception { String initrd = "i"; String kernelImage = "k"; VM vm = createVmForTesting(initrd, kernelImage); assertEquals(vm.getinitrd_url(), "i"); assertEquals(vm.getkernel_url(), "k"); } private VM createVmForTesting(String initrd, String kernel) { mockVm(command); // Set parameter command.getVm().setinitrd_url(initrd); command.getVm().setkernel_url(kernel); command.createVm(); // Check Vm VM vm = vmDAO.get(command.getParameters().getVmId()); return vm; } /** * Mock a VM. */ private VM mockVm(RunVmCommand<RunVmParams> spyVmCommand) { VM vm = new VM(); vm.setstatus(VMStatus.Down); AuditLogableBaseMockUtils.mockVmDao(spyVmCommand, vmDAO); when(vmDAO.get(command.getParameters().getVmId())).thenReturn(vm); return vm; } @Before public void createCommand() { RunVmParams param = new RunVmParams(Guid.NewGuid()); command = spy(new RunVmCommand<RunVmParams>(param)); mockVmRunHandler(); mockSuccessfulSnapshotValidator(); mockVmPropertiesUtils(); mockBackend(); } protected void mockVmRunHandler() { doReturn(vmRunHandler).when(command).getVmRunHandler(); doReturn(true).when(vmRunHandler).performImageChecksForRunningVm(any(VM.class), anyListOf(String.class), any(RunVmParams.class), anyListOf(Disk.class)); doReturn(false).when(vmRunHandler).isVmInPreview(any(VM.class)); } @Test public void canRunVmFailNodisk() { initDAOMocks(Collections.<Disk> emptyList(), Collections.<VmDevice> emptyList()); final VM vm = new VM(); doReturn(vm).when(command).getVm(); doReturn(new VdsSelector(vm, new Guid(), true, new VdsFreeMemoryChecker(command))).when(command) .getVdsSelector(); assertFalse(command.canRunVm()); assertTrue(command.getReturnValue().getCanDoActionMessages().contains("VM_CANNOT_RUN_FROM_DISK_WITHOUT_DISK")); } @Test public void canRunVmFailVmRunning() { final ArrayList<Disk> disks = new ArrayList<Disk>(); final DiskImage diskImage = createImage(); disks.add(diskImage); final VmDevice vmDevice = createDiskVmDevice(diskImage); initDAOMocks(disks, Collections.singletonList(vmDevice)); final VM vm = new VM(); vm.setstatus(VMStatus.Up); doReturn(vm).when(command).getVm(); doReturn(new VdsSelector(vm, new NGuid(), true, new VdsFreeMemoryChecker(command))).when(command) .getVdsSelector(); assertFalse(command.canRunVm()); assertTrue(command.getReturnValue().getCanDoActionMessages().contains("ACTION_TYPE_FAILED_VM_IS_RUNNING")); } @Test public void canRunVmFailVmDuringSnapshot() { final ArrayList<Disk> disks = new ArrayList<Disk>(); final DiskImage diskImage = createImage(); disks.add(diskImage); final VmDevice vmDevice = createDiskVmDevice(diskImage); initDAOMocks(disks, Collections.singletonList(vmDevice)); final VM vm = new VM(); SnapshotsValidator snapshotsValidator = mock(SnapshotsValidator.class); when(snapshotsValidator.vmNotDuringSnapshot(vm.getId())) .thenReturn(new ValidationResult(VdcBllMessages.ACTION_TYPE_FAILED_VM_IS_DURING_SNAPSHOT)); doReturn(snapshotsValidator).when(command).getSnapshotsValidator(); doReturn(vm).when(command).getVm(); assertFalse(command.canRunVm()); assertTrue(command.getReturnValue() .getCanDoActionMessages() .contains(VdcBllMessages.ACTION_TYPE_FAILED_VM_IS_DURING_SNAPSHOT.name())); } private void canRunStatelessVmTest(boolean autoStartUp, boolean isVmStateless, Boolean isStatelessParam, boolean shouldPass) { final ArrayList<Disk> disks = new ArrayList<Disk>(); final DiskImage diskImage = createImage(); disks.add(diskImage); final VmDevice vmDevice = createDiskVmDevice(diskImage); final VdsSelector vdsSelector = mock(VdsSelector.class); when(vdsSelector.canFindVdsToRunOn(anyListOf(String.class), anyBoolean())).thenReturn(true); doReturn(vdsSelector).when(command).getVdsSelector(); VDSReturnValue vdsReturnValue = new VDSReturnValue(); vdsReturnValue.setReturnValue(false); when(vdsBrokerFrontend.RunVdsCommand(eq(VDSCommandType.IsVmDuringInitiating), any(VDSParametersBase.class))).thenReturn(vdsReturnValue); initDAOMocks(disks, Collections.singletonList(vmDevice)); final VM vm = new VM(); // set stateless and HA vm.setis_stateless(isVmStateless); vm.setauto_startup(autoStartUp); doReturn(vm).when(command).getVm(); command.getParameters().setRunAsStateless(isStatelessParam); boolean canRunVm = command.canRunVm(); final List<String> messages = command.getReturnValue().getCanDoActionMessages(); assertEquals(shouldPass, canRunVm); assertEquals(shouldPass, !messages.contains("VM_CANNOT_RUN_STATELESS_HA")); } private VmPropertiesUtils mockVmPropertiesUtils() { // Mocks vm properties utils (mocks a successful validation) VmPropertiesUtils utils = spy(new VmPropertiesUtils()); doReturn(Collections.singletonMap("agent", "true")).when(utils).getPredefinedProperties(any(Version.class), any(VmStatic.class)); doReturn(Collections.singletonMap("buff", "123")).when(utils).getUserDefinedProperties(any(Version.class), any(VmStatic.class)); doReturn(new HashSet<Version>(Arrays.asList(Version.v3_0, Version.v3_1))).when(utils) .getSupportedClusterLevels(); doReturn(Collections.emptyList()).when(utils).validateVMProperties(any(Version.class), any(VmStatic.class)); doReturn(utils).when(command).getVmPropertiesUtils(); return utils; } @Test public void canRunVmFailStatelessWhenVmHA() { canRunStatelessVmTest(true, false, Boolean.TRUE, false); } @Test public void canRunVmPassStatelessWhenVmHAandStatelessFalse() { canRunStatelessVmTest(true, true, Boolean.FALSE, true); } @Test public void canRunVmFailStatelessWhenVmHAwithNullStatelessParam() { canRunStatelessVmTest(true, true, null, false); } @Test public void canRunVmPassStatelessWhenVmHAwithNullStatelessParam() { canRunStatelessVmTest(true, false, null, true); } @Test public void canRunVmPassStatelessWhenVmHAwithNegativeStatelessParam() { canRunStatelessVmTest(true, false, Boolean.FALSE, true); } @Test public void canRunVmPassStatelessWhenVmNotHAwithNegativeStatelessParam() { canRunStatelessVmTest(false, false, Boolean.TRUE, true); } /** * @param disks * @param vmDevices * @param guid */ protected void initDAOMocks(final List<Disk> disks, final List<VmDevice> vmDevices) { final DiskDao diskDao = mock(DiskDao.class); when(diskDao.getAllForVm(Guid.Empty)).thenReturn(disks); doReturn(diskDao).when(command).getDiskDao(); doReturn(diskDao).when(vmRunHandler).getDiskDao(); final StorageDomainDAO storageDomainDAO = mock(StorageDomainDAO.class); when(storageDomainDAO.getAllForStoragePool(Guid.Empty)) .thenReturn(new ArrayList<storage_domains>()); doReturn(storageDomainDAO).when(command).getStorageDomainDAO(); doReturn(storageDomainDAO).when(vmRunHandler).getStorageDomainDAO(); final VmDeviceDAO vmDeviceDao = mock(VmDeviceDAO.class); when(vmDeviceDao.getVmDeviceByVmIdTypeAndDevice(Guid.Empty, VmDeviceType.DISK.getName(), VmDeviceType.DISK.getName())).thenReturn(vmDevices); doReturn(vmDeviceDao).when(command).getVmDeviceDao(); doReturn(vmDeviceDao).when(vmRunHandler).getVmDeviceDAO(); } private SnapshotsValidator mockSuccessfulSnapshotValidator() { SnapshotsValidator snapshotsValidator = mock(SnapshotsValidator.class); when(snapshotsValidator.vmNotDuringSnapshot(any(Guid.class))).thenReturn(ValidationResult.VALID); doReturn(snapshotsValidator).when(command).getSnapshotsValidator(); return snapshotsValidator; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.optimization.fitting; import java.util.ArrayList; import java.util.List; import org.apache.commons.math3.analysis.DifferentiableMultivariateVectorFunction; import org.apache.commons.math3.analysis.MultivariateMatrixFunction; import org.apache.commons.math3.analysis.ParametricUnivariateFunction; import org.apache.commons.math3.analysis.differentiation.DerivativeStructure; import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction; import org.apache.commons.math3.optimization.DifferentiableMultivariateVectorOptimizer; import org.apache.commons.math3.optimization.MultivariateDifferentiableVectorOptimizer; import org.apache.commons.math3.optimization.PointVectorValuePair; /** Fitter for parametric univariate real functions y = f(x). * <br/> * When a univariate real function y = f(x) does depend on some * unknown parameters p<sub>0</sub>, p<sub>1</sub> ... p<sub>n-1</sub>, * this class can be used to find these parameters. It does this * by <em>fitting</em> the curve so it remains very close to a set of * observed points (x<sub>0</sub>, y<sub>0</sub>), (x<sub>1</sub>, * y<sub>1</sub>) ... (x<sub>k-1</sub>, y<sub>k-1</sub>). This fitting * is done by finding the parameters values that minimizes the objective * function &sum;(y<sub>i</sub>-f(x<sub>i</sub>))<sup>2</sup>. This is * really a least squares problem. * * @param <T> Function to use for the fit. * * @deprecated As of 3.1 (to be removed in 4.0). * @since 2.0 */ @Deprecated public class CurveFitter<T extends ParametricUnivariateFunction> { /** Optimizer to use for the fitting. * @deprecated as of 3.1 replaced by {@link #optimizer} */ @Deprecated private final DifferentiableMultivariateVectorOptimizer oldOptimizer; /** Optimizer to use for the fitting. */ private final MultivariateDifferentiableVectorOptimizer optimizer; /** Observed points. */ private final List<WeightedObservedPoint> observations; /** Simple constructor. * @param optimizer optimizer to use for the fitting * @deprecated as of 3.1 replaced by {@link #CurveFitter(MultivariateDifferentiableVectorOptimizer)} */ @Deprecated public CurveFitter(final DifferentiableMultivariateVectorOptimizer optimizer) { this.oldOptimizer = optimizer; this.optimizer = null; observations = new ArrayList<WeightedObservedPoint>(); } /** Simple constructor. * @param optimizer optimizer to use for the fitting * @since 3.1 */ public CurveFitter(final MultivariateDifferentiableVectorOptimizer optimizer) { this.oldOptimizer = null; this.optimizer = optimizer; observations = new ArrayList<WeightedObservedPoint>(); } /** Add an observed (x,y) point to the sample with unit weight. * <p>Calling this method is equivalent to call * {@code addObservedPoint(1.0, x, y)}.</p> * @param x abscissa of the point * @param y observed value of the point at x, after fitting we should * have f(x) as close as possible to this value * @see #addObservedPoint(double, double, double) * @see #addObservedPoint(WeightedObservedPoint) * @see #getObservations() */ public void addObservedPoint(double x, double y) { addObservedPoint(1.0, x, y); } /** Add an observed weighted (x,y) point to the sample. * @param weight weight of the observed point in the fit * @param x abscissa of the point * @param y observed value of the point at x, after fitting we should * have f(x) as close as possible to this value * @see #addObservedPoint(double, double) * @see #addObservedPoint(WeightedObservedPoint) * @see #getObservations() */ public void addObservedPoint(double weight, double x, double y) { observations.add(new WeightedObservedPoint(weight, x, y)); } /** Add an observed weighted (x,y) point to the sample. * @param observed observed point to add * @see #addObservedPoint(double, double) * @see #addObservedPoint(double, double, double) * @see #getObservations() */ public void addObservedPoint(WeightedObservedPoint observed) { observations.add(observed); } /** Get the observed points. * @return observed points * @see #addObservedPoint(double, double) * @see #addObservedPoint(double, double, double) * @see #addObservedPoint(WeightedObservedPoint) */ public WeightedObservedPoint[] getObservations() { return observations.toArray(new WeightedObservedPoint[observations.size()]); } /** * Remove all observations. */ public void clearObservations() { observations.clear(); } /** * Fit a curve. * This method compute the coefficients of the curve that best * fit the sample of observed points previously given through calls * to the {@link #addObservedPoint(WeightedObservedPoint) * addObservedPoint} method. * * @param f parametric function to fit. * @param initialGuess first guess of the function parameters. * @return the fitted parameters. * @throws org.apache.commons.math3.exception.DimensionMismatchException * if the start point dimension is wrong. */ public double[] fit(T f, final double[] initialGuess) { return fit(Integer.MAX_VALUE, f, initialGuess); } /** * Fit a curve. * This method compute the coefficients of the curve that best * fit the sample of observed points previously given through calls * to the {@link #addObservedPoint(WeightedObservedPoint) * addObservedPoint} method. * * @param f parametric function to fit. * @param initialGuess first guess of the function parameters. * @param maxEval Maximum number of function evaluations. * @return the fitted parameters. * @throws org.apache.commons.math3.exception.TooManyEvaluationsException * if the number of allowed evaluations is exceeded. * @throws org.apache.commons.math3.exception.DimensionMismatchException * if the start point dimension is wrong. * @since 3.0 */ public double[] fit(int maxEval, T f, final double[] initialGuess) { // prepare least squares problem double[] target = new double[observations.size()]; double[] weights = new double[observations.size()]; int i = 0; for (WeightedObservedPoint point : observations) { target[i] = point.getY(); weights[i] = point.getWeight(); ++i; } // perform the fit final PointVectorValuePair optimum; if (optimizer == null) { // to be removed in 4.0 optimum = oldOptimizer.optimize(maxEval, new OldTheoreticalValuesFunction(f), target, weights, initialGuess); } else { optimum = optimizer.optimize(maxEval, new TheoreticalValuesFunction(f), target, weights, initialGuess); } // extract the coefficients return optimum.getPointRef(); } /** Vectorial function computing function theoretical values. */ @Deprecated private class OldTheoreticalValuesFunction implements DifferentiableMultivariateVectorFunction { /** Function to fit. */ private final ParametricUnivariateFunction f; /** Simple constructor. * @param f function to fit. */ public OldTheoreticalValuesFunction(final ParametricUnivariateFunction f) { this.f = f; } /** {@inheritDoc} */ public MultivariateMatrixFunction jacobian() { return new MultivariateMatrixFunction() { public double[][] value(double[] point) { final double[][] jacobian = new double[observations.size()][]; int i = 0; for (WeightedObservedPoint observed : observations) { jacobian[i++] = f.gradient(observed.getX(), point); } return jacobian; } }; } /** {@inheritDoc} */ public double[] value(double[] point) { // compute the residuals final double[] values = new double[observations.size()]; int i = 0; for (WeightedObservedPoint observed : observations) { values[i++] = f.value(observed.getX(), point); } return values; } } /** Vectorial function computing function theoretical values. */ private class TheoreticalValuesFunction implements MultivariateDifferentiableVectorFunction { /** Function to fit. */ private final ParametricUnivariateFunction f; /** Simple constructor. * @param f function to fit. */ public TheoreticalValuesFunction(final ParametricUnivariateFunction f) { this.f = f; } /** {@inheritDoc} */ public double[] value(double[] point) { // compute the residuals final double[] values = new double[observations.size()]; int i = 0; for (WeightedObservedPoint observed : observations) { values[i++] = f.value(observed.getX(), point); } return values; } /** {@inheritDoc} */ public DerivativeStructure[] value(DerivativeStructure[] point) { // extract parameters final double[] parameters = new double[point.length]; for (int k = 0; k < point.length; ++k) { parameters[k] = point[k].getValue(); } // compute the residuals final DerivativeStructure[] values = new DerivativeStructure[observations.size()]; int i = 0; for (WeightedObservedPoint observed : observations) { // build the DerivativeStructure by adding first the value as a constant // and then adding derivatives DerivativeStructure vi = new DerivativeStructure(point.length, 1, f.value(observed.getX(), parameters)); for (int k = 0; k < point.length; ++k) { vi = vi.add(new DerivativeStructure(point.length, 1, k, 0.0)); } values[i++] = vi; } return values; } } }
package org.jolokia.converter.object; import java.math.BigDecimal; import java.math.BigInteger; import java.net.MalformedURLException; import java.net.URL; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; import org.jolokia.config.ConfigKey; import org.jolokia.util.DateUtil; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.testng.Assert; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotNull; import static org.testng.AssertJUnit.assertNull; /* * Copyright 2009-2013 Roland Huss * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author roland * @since Feb 14, 2010 */ public class StringToObjectConverterTest { StringToObjectConverter converter; @BeforeTest public void setup() { converter = new StringToObjectConverter(); } @Test public void simpleConversions() { Object obj = converter.convertFromString(int.class.getCanonicalName(),"10"); assertEquals("Int conversion",10,obj); obj = converter.convertFromString(Integer.class.getCanonicalName(),"10"); assertEquals("Integer conversion",10,obj); obj = converter.convertFromString(Short.class.getCanonicalName(),"10"); assertEquals("Short conversion",(short) 10,obj); obj = converter.convertFromString(short.class.getCanonicalName(),"10"); assertEquals("short conversion",Short.parseShort("10"),obj); obj = converter.convertFromString(Long.class.getCanonicalName(),"10"); assertEquals("long conversion",10L,obj); obj = converter.convertFromString(long.class.getCanonicalName(),"10"); assertEquals("Long conversion",10L,obj); obj = converter.convertFromString(Byte.class.getCanonicalName(),"10"); assertEquals("Byte conversion",(byte) 10,obj); obj = converter.convertFromString(byte.class.getCanonicalName(),"10"); assertEquals("byte conversion",Byte.parseByte("10"),obj); obj = converter.convertFromString(Float.class.getCanonicalName(),"10.5"); assertEquals("Float conversion",10.5f,obj); obj = converter.convertFromString(float.class.getCanonicalName(),"21.3"); assertEquals("float conversion",new Float(21.3f),obj); obj = converter.convertFromString(Double.class.getCanonicalName(),"10.5"); assertEquals("Double conversion",10.5d,obj); obj = converter.convertFromString(double.class.getCanonicalName(),"21.3"); assertEquals("double conversion",21.3d,obj); obj = converter.convertFromString(BigDecimal.class.getCanonicalName(),"83.4e+4"); assertEquals("BigDecimal conversion", new BigDecimal("8.34e+5"), obj); obj = converter.convertFromString(BigInteger.class.getCanonicalName(),"47110815471108154711"); assertEquals("BigInteger conversion", new BigInteger("47110815471108154711"), obj); obj = converter.convertFromString(Boolean.class.getCanonicalName(),"false"); assertEquals("Boolean conversion",false,obj); obj = converter.convertFromString(boolean.class.getCanonicalName(),"true"); assertEquals("boolean conversion",true,obj); obj = converter.convertFromString(char.class.getCanonicalName(),"a"); assertEquals("Char conversion",'a',obj); obj = converter.convertFromString("java.lang.String","10"); assertEquals("String conversion","10",obj); } @Test public void jsonConversion() { JSONObject json = new JSONObject(); json.put("name","roland"); json.put("kind","jolokia"); Object object = converter.convertFromString(JSONObject.class.getName(),json.toString()); assertEquals(json,object); JSONArray array = new JSONArray(); array.add("roland"); array.add("jolokia"); object = converter.convertFromString(JSONArray.class.getName(),array.toString()); assertEquals(array,object); try { converter.convertFromString(JSONObject.class.getName(),"{bla:blub{"); fail(); } catch (IllegalArgumentException exp) { } } @Test public void urlConversion(){ URL url = null; try { url = new URL("http://google.com"); } catch (MalformedURLException e) {} Object object = converter.convertFromString(URL.class.getCanonicalName(),"http://google.com"); assertEquals("URL conversion", url, object); } @Test public void enumConversion() { ConfigKey key = (ConfigKey) converter.prepareValue(ConfigKey.class.getName(), "MAX_DEPTH"); assertEquals(key, ConfigKey.MAX_DEPTH); } @Test public void dateConversion() { Date date = (Date) converter.convertFromString(Date.class.getName(),"0"); assertEquals(date.getTime(),0); Date now = new Date(); date = (Date) converter.convertFromString(Date.class.getName(), DateUtil.toISO8601(now)); assertEquals(date.getTime() / 1000,now.getTime() / 1000); } @Test(expectedExceptions = { IllegalArgumentException.class}) public void dateConversionFailed() { converter.prepareValue(Date.class.getName(),"illegal-date-format"); } @Test public void objectNameConversion() throws MalformedObjectNameException { String name = "JOLOKIA:class=Conversion,type=builder,name=jlk"; ObjectName objName = new ObjectName(name); ObjectName testName = (ObjectName)converter.convertFromString(ObjectName.class.getName(), name); assertEquals(objName, testName); } @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = ".*parse.*ObjectName.*") public void objectNameConversionFailed() { converter.convertFromString(ObjectName.class.getName(),"bla:blub:InvalidName"); } @Test public void arrayConversions() { Object obj = converter.convertFromString(new int[0].getClass().getName(),"10,20,30"); int expected[] = new int[] { 10,20,30}; for (int i = 0;i < expected.length;i++) { assertEquals(expected[i],((int[]) obj)[i]); } obj = converter.convertFromString(new Integer[0].getClass().getName(),"10,20,30"); for (int i = 0;i < expected.length;i++) { assertEquals(expected[i],(int) ((Integer[]) obj)[i]); } // Escaped arrays String[] strings = (String[]) converter.convertFromString(new String[0].getClass().getName(),"hallo!,hans!!,wu!!rs!t"); assertEquals(strings.length,2); assertEquals("hallo,hans!",strings[0]); assertEquals("wu!rst",strings[1]); try { obj = converter.convertFromString("[Lbla;","10,20,30"); fail("Unknown object type"); } catch (IllegalArgumentException exp) {} try { obj = converter.convertFromString("[X","10,20,30"); fail("Unknown object type"); } catch (IllegalArgumentException exp) {} } @Test public void checkNull() { Object obj = converter.convertFromString(new int[0].getClass().getName(),"[null]"); assertNull("Null check",obj); } @Test public void checkEmptyString() { Object obj = converter.convertFromString("java.lang.String","\"\""); assertEquals("Empty String check",0,((String) obj).length()); try { obj = converter.convertFromString("java.lang.Integer","\"\""); fail("Empty string conversion only for string"); } catch (IllegalArgumentException exp) {} } @Test public void unknownExtractor() { try { Object obj = converter.convertFromString(this.getClass().getName(),"bla"); fail("Unknown extractor"); } catch (IllegalArgumentException exp) {}; } @Test public void prepareValue() { assertNull(converter.prepareValue("java.lang.String", null)); assertEquals(converter.prepareValue("java.lang.Long", 10L), 10L); assertEquals(converter.prepareValue("java.lang.Long", "10"), 10L); Map<String,String> map = new HashMap<String, String>(); map.put("euro","fcn"); assertTrue(converter.prepareValue("java.util.Map", map) == map); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void prepareValueInvalidClass() { converter.prepareValue("blubber.bla.hello",10L); } @Test public void prepareValueListConversion1() { List<Boolean> list = new ArrayList<Boolean>(); list.add(true); list.add(false); boolean[] res = (boolean[]) converter.prepareValue("[Z",list); assertTrue(res[0]); assertFalse(res[1]); Assert.assertEquals(res.length,2); } @Test public void prepareValueListConversion2() { List<Boolean> list = new ArrayList<Boolean>(); list.add(true); list.add(false); list.add(null); Boolean[] res = (Boolean[]) converter.prepareValue("[Ljava.lang.Boolean;",list); assertTrue(res[0]); assertFalse(res[1]); assertNull(res[2]); Assert.assertEquals(res.length,3); } @Test(expectedExceptions = { IllegalArgumentException.class }) public void prepareValueWithException() { List<Integer> list = new ArrayList<Integer>(); list.add(10); list.add(null); converter.prepareValue("[I",list); } public static class Example { private String value; private List<String> list; public Example(String value) { this.value = value; } public Example(List<String> list) { this.list = list; } public String getValue() { return value; } public List<String> getList() { return list; } } public static class PrivateExample { private String value; private PrivateExample(String value) { this.value = value; } public String getValue() { return value; } } public static class MultipleConstructorExample { private String value; private List<String> list; public MultipleConstructorExample(String value, List<String> list) { this.value = value; this.list = list; } public String getValue() { return value; } public List<String> getList() { return list; } } @Test public void prepareValueWithConstructor() { Object o = converter.prepareValue(this.getClass().getCanonicalName() + "$Example", "test"); assertTrue(o instanceof Example); assertEquals("test", ((Example)o).getValue()); } @Test public void prepareValueWithConstructorList() { Object o = converter.prepareValue(this.getClass().getCanonicalName() + "$Example", Arrays.asList("test")); assertTrue(o instanceof Example); assertNull(((Example)o).getList()); assertEquals("[test]", ((Example)o).getValue()); } @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Cannot convert string test to type " + "org.jolokia.converter.object.StringToObjectConverterTest\\$PrivateExample " + "because no converter could be found") public void prepareValueWithPrivateExample() { converter.prepareValue(this.getClass().getCanonicalName() + "$PrivateExample", "test"); } @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Cannot convert string test to type " + "org.jolokia.converter.object.StringToObjectConverterTest\\$MultipleConstructorExample " + "because no converter could be found") public void prepareValueWithMultipleConstructors() { converter.prepareValue(this.getClass().getCanonicalName() + "$MultipleConstructorExample", "test"); } @Test public void dateConversionNotByConstructor() throws ParseException { final String dateStr = "2015-11-20T00:00:00+00:00"; try { new Date(dateStr); fail("Should have throw IllegalArgumentException"); } catch (IllegalArgumentException ignore) {} // new Date(dateStr) will throw IllegalArgumentException but our convert does not. // so it does not use Constructor to convert date Object obj = converter.convertFromString(Date.class.getCanonicalName(), dateStr); assertNotNull(obj); assertTrue(obj instanceof Date); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ"); Date expectedDate = sdf.parse(dateStr.replaceFirst("\\+(0\\d)\\:(\\d{2})$", "+$1$2")); assertEquals(expectedDate, obj); } }
/* * Copyright 2015 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.samples.apps.iosched.debug; import com.google.samples.apps.iosched.Config; import no.java.schedule.R; import com.google.samples.apps.iosched.debug.actions.DisplayUserDataDebugAction; import com.google.samples.apps.iosched.debug.actions.ForceAppDataSyncNowAction; import com.google.samples.apps.iosched.debug.actions.ForceSyncNowAction; import com.google.samples.apps.iosched.debug.actions.ScheduleStarredSessionAlarmsAction; import com.google.samples.apps.iosched.debug.actions.ShowAllDriveFilesDebugAction; import com.google.samples.apps.iosched.debug.actions.ShowSessionNotificationDebugAction; import com.google.samples.apps.iosched.debug.actions.TestScheduleHelperAction; import com.google.samples.apps.iosched.explore.ExploreSessionsActivity; import com.google.samples.apps.iosched.service.SessionAlarmService; import com.google.samples.apps.iosched.settings.ConfMessageCardUtils; import com.google.samples.apps.iosched.settings.SettingsUtils; import com.google.samples.apps.iosched.ui.widget.DrawShadowFrameLayout; import com.google.samples.apps.iosched.util.AccountUtils; import com.google.samples.apps.iosched.util.TimeUtils; import com.google.samples.apps.iosched.util.UIUtils; import com.google.samples.apps.iosched.util.WiFiUtils; import com.google.samples.apps.iosched.welcome.WelcomeActivity; import android.app.Fragment; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; import static com.google.samples.apps.iosched.util.LogUtils.LOGW; import static com.google.samples.apps.iosched.util.LogUtils.makeLogTag; /** * {@link android.app.Activity} displaying debug options so a developer can debug and test. This * functionality is only enabled when {@link com.google.samples.apps.iosched.BuildConfig}.DEBUG * is true. */ public class DebugFragment extends Fragment { private static final String TAG = makeLogTag(DebugFragment.class); /** * Area of screen used to display log log messages. */ private TextView mLogArea; @Override public View onCreateView(LayoutInflater inflater, final ViewGroup container, Bundle savedInstanceState) { super.onCreate(savedInstanceState); ViewGroup rootView = (ViewGroup) inflater.inflate(R.layout.debug_frag, null); mLogArea = (TextView) rootView.findViewById(R.id.logArea); ViewGroup tests = (ViewGroup) rootView.findViewById(R.id.debug_action_list); tests.addView(createTestAction(new ForceSyncNowAction())); tests.addView(createTestAction(new DisplayUserDataDebugAction())); tests.addView(createTestAction(new ShowAllDriveFilesDebugAction())); tests.addView(createTestAction(new ForceAppDataSyncNowAction())); tests.addView(createTestAction(new TestScheduleHelperAction())); tests.addView(createTestAction(new ScheduleStarredSessionAlarmsAction())); tests.addView(createTestAction(new DebugAction() { @Override public void run(final Context context, final Callback callback) { final String sessionId = SessionAlarmService.DEBUG_SESSION_ID; final String sessionTitle = "Debugging with Placeholder Text"; Intent intent = new Intent( SessionAlarmService.ACTION_NOTIFY_SESSION_FEEDBACK, null, context, SessionAlarmService.class); intent.putExtra(SessionAlarmService.EXTRA_SESSION_ID, sessionId); intent.putExtra(SessionAlarmService.EXTRA_SESSION_START, System.currentTimeMillis() - 30 * 60 * 1000); intent.putExtra(SessionAlarmService.EXTRA_SESSION_END, System.currentTimeMillis()); intent.putExtra(SessionAlarmService.EXTRA_SESSION_TITLE, sessionTitle); context.startService(intent); Toast.makeText(context, "Showing DEBUG session feedback notification.", Toast.LENGTH_LONG).show(); } @Override public String getLabel() { return "Show session feedback notification"; } })); tests.addView(createTestAction(new ShowSessionNotificationDebugAction())); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { context.startActivity(new Intent(context, WelcomeActivity.class)); } @Override public String getLabel() { return "Display Welcome Activity"; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { SettingsUtils.markTosAccepted(context, false); SettingsUtils.markConductAccepted(context, false); SettingsUtils.setAttendeeAtVenue(context, false); SettingsUtils.markAnsweredLocalOrRemote(context, false); AccountUtils.setActiveAccount(context, null); ConfMessageCardUtils.unsetStateForAllCards(context); } @Override public String getLabel() { return "Reset Welcome Flags"; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { Intent intent = new Intent(context, ExploreSessionsActivity.class); intent.putExtra(ExploreSessionsActivity.EXTRA_FILTER_TAG, "TOPIC_ANDROID"); context.startActivity(intent); } @Override public String getLabel() { return "Show Explore Sessions Activity (Android Topic)"; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { LOGW(TAG, "Unsetting all Explore I/O message card answers."); ConfMessageCardUtils.markAnsweredConfMessageCardsPrompt(context, null); ConfMessageCardUtils.setConfMessageCardsEnabled(context, null); ConfMessageCardUtils.unsetStateForAllCards(context); } @Override public String getLabel() { return "Unset all Explore I/O-based card answers"; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { java.util.Date currentTime = new java.util.Date(UIUtils.getCurrentTime(context)); java.util.Date newTime = new java.util.Date(Config.CONFERENCE_START_MILLIS - TimeUtils.HOUR * 3); LOGW(TAG, "Setting time from " + currentTime + " to " + newTime); UIUtils.setCurrentTime(context, newTime.getTime()); } @Override public String getLabel() { return "Set time to 3 hours before Conf"; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { java.util.Date currentTime = new java.util.Date(UIUtils.getCurrentTime(context)); java.util.Date newTime = new java.util.Date(Config.CONFERENCE_START_MILLIS - TimeUtils.DAY); LOGW(TAG, "Setting time from " + currentTime + " to " + newTime); UIUtils.setCurrentTime(context, newTime.getTime()); } @Override public String getLabel() { return "Set time to Day Before Conf"; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { java.util.Date currentTime = new java.util.Date(UIUtils.getCurrentTime(context)); java.util.Date newTime = new java.util.Date(Config.CONFERENCE_START_MILLIS + TimeUtils.HOUR * 3); LOGW(TAG, "Setting time from " + currentTime + " to " + newTime); UIUtils.setCurrentTime(context, newTime.getTime()); LOGW(TAG, "Unsetting all Explore I/O card answers and settings."); ConfMessageCardUtils.markAnsweredConfMessageCardsPrompt(context, null); ConfMessageCardUtils.setConfMessageCardsEnabled(context, null); SettingsUtils.markDeclinedWifiSetup(context, false); WiFiUtils.uninstallConferenceWiFi(context); } @Override public String getLabel() { return "Set time to 3 hours after Conf start"; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { java.util.Date currentTime = new java.util.Date(UIUtils.getCurrentTime(context)); java.util.Date newTime = new java.util.Date(Config.CONFERENCE_DAYS[1][0] + TimeUtils.HOUR * 3); LOGW(TAG, "Setting time from " + currentTime + " to " + newTime); UIUtils.setCurrentTime(context, newTime.getTime()); } @Override public String getLabel() { return "Set time to 3 hours after 2nd day start"; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { java.util.Date currentTime = new java.util.Date(UIUtils.getCurrentTime(context)); java.util.Date newTime = new java.util.Date(Config.CONFERENCE_END_MILLIS + TimeUtils.HOUR * 3); LOGW(TAG, "Setting time from " + currentTime + " to " + newTime); UIUtils.setCurrentTime(context, newTime.getTime()); } @Override public String getLabel() { return "Set time to 3 hours after Conf end"; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { ConfMessageCardUtils.markShouldShowConfMessageCard(context, ConfMessageCardUtils.ConfMessageCard.CONFERENCE_CREDENTIALS, true); } @Override public String getLabel() { return "Force 'Conference Credentials' message card."; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { ConfMessageCardUtils.markShouldShowConfMessageCard(context, ConfMessageCardUtils.ConfMessageCard.KEYNOTE_ACCESS, true); } @Override public String getLabel() { return "Force 'Keynote Access' message card."; } })); tests.addView(createTestAction(new DebugAction() { @Override public void run(Context context, Callback callback) { ConfMessageCardUtils.markShouldShowConfMessageCard(context, ConfMessageCardUtils.ConfMessageCard.AFTER_HOURS, true); } @Override public String getLabel() { return "Force 'After Hours' message card."; } })); return rootView; } protected View createTestAction(final DebugAction test) { Button testButton = new Button(this.getActivity()); testButton.setText(test.getLabel()); testButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { final long start = System.currentTimeMillis(); mLogArea.setText(""); test.run(view.getContext(), new DebugAction.Callback() { @Override public void done(boolean success, String message) { logTimed((System.currentTimeMillis() - start), (success ? "[OK] " : "[FAIL] ") + message); } }); } }); return testButton; } protected void logTimed(long time, String message) { message = "[" + time + "ms] " + message; Log.d(TAG, message); mLogArea.append(message + "\n"); } private void setContentTopClearance(int clearance) { if (getView() != null) { getView().setPadding(getView().getPaddingLeft(), clearance, getView().getPaddingRight(), getView().getPaddingBottom()); } } @Override public void onResume() { super.onResume(); // configure fragment's top clearance to take our overlaid controls (Action Bar // and spinner box) into account. int actionBarSize = UIUtils.calculateActionBarSize(getActivity()); DrawShadowFrameLayout drawShadowFrameLayout = (DrawShadowFrameLayout) getActivity().findViewById(R.id.main_content); if (drawShadowFrameLayout != null) { drawShadowFrameLayout.setShadowTopOffset(actionBarSize); } setContentTopClearance(actionBarSize + getResources().getDimensionPixelSize(R.dimen.explore_grid_padding)); } }
package com.nghianh.giaitriviet.providers.radio.visualizer; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.view.MotionEvent; import android.view.SurfaceHolder; import android.view.SurfaceView; public class DrawingPanel extends SurfaceView implements SurfaceHolder.Callback { private SurfaceHolder surfaceHolder; private MainThread _thread; private Canvas canvas; private Paint p = new Paint(); private GrabAudio grabAudio = null; private boolean visible = false; private boolean dataExists = false; private boolean frequency = true; private int displayType = 4; private int resolution = 512; private int fadeAmt = 100; private int cToggle = 0; private int cToggle2 = 0; private int flashVal = 0; private double ax = -100, ay = -100; private int y = 0; private int[] data; private int[] tempData; private int[] lagData = new int[50]; private int[][] locData = new int[50][2]; private int waveAmt = 1; private int[][] waveData = new int[waveAmt][resolution]; private int[] boxArray = {0, 0, 0, 0, 0, 0, 0, 0}; private double tempAvg = 0; private int audioSessionID = 0; public DrawingPanel(Context context, int vType, int cMode, int cMode2, boolean freq, int audioSessionID) { super(context); frequency = freq; displayType = vType; cToggle = cMode; cToggle2 = cMode2; getHolder().addCallback(this); surfaceHolder = getHolder(); this.audioSessionID = audioSessionID; } public void setAudioSessionID(int id) { audioSessionID = id; surfaceDestroyed(surfaceHolder); surfaceCreated(surfaceHolder); } public Canvas getCanvas() { return canvas; } @Override public void onDraw(Canvas c) { // Background, initial alpha value (fadeAmt) determines fading of previous render, // cToggle determines color scheme // cToggle 2 determines color of wave, if 1, means wave is red and background is changed // to a cream color to accentuate the wave //try drawARGB(a,r,g,b); to fill screen if (cToggle == 0) { //p.setColor(Color.argb(fadeAmt, 0, 4, 20)); c.drawARGB(fadeAmt, 0, 4, 20); } if (cToggle == 0 && cToggle2 == 1) { //p.setColor(Color.argb(fadeAmt, 20, 4, 0)); c.drawARGB(fadeAmt, 20, 4, 0); } if (cToggle == 1) { //p.setColor(Color.argb(255, 230, 255, 255)); c.drawARGB(255, 230, 255, 255); } if (cToggle == 1 && cToggle2 == 1) { //p.setColor(Color.argb(255, 255, 231, 200)); c.drawARGB(255, 255, 231, 200); } //c.drawRect(0, 0, c.getWidth(), c.getHeight(), p); // Normal display of frequencies if (displayType == 0) { fadeAmt = 200; if (visible) { for (int i = 0; i < resolution; i++) { if (cToggle2 == 0) { p.setColor(Color.argb(255, (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, (i / 2), 0)); } double offset = 1 + ((double) i / (double) (resolution / 4)); c.drawRect( ((c.getWidth() / 2) + (int) (2 * data[i] * offset)), c.getHeight() - (int) (i * c.getHeight() / resolution), ((c.getWidth() / 2) - (int) (2 * data[i] * offset)), c.getHeight() - (int) (i * c.getHeight() / resolution + c .getHeight() / resolution), p); } } } // bubble display of frequencies if (displayType == 1) { fadeAmt = 200; if (visible) { for (int i = 0; i < resolution; i++) { if (cToggle2 == 0) { p.setColor(Color.argb(255, (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, (i / 2), 0)); } double offset = 1 + ((double) i / (double) 50); c.drawCircle(c.getWidth() / 2, c.getHeight() - (int) (i * c.getHeight() / resolution), (int) (offset * data[i]), p); } } } // pixely raindropy effect if (displayType == 2) { fadeAmt = 50; if (visible) { for (int i = 0; i < resolution; i++) { if (cToggle2 == 0) { p.setColor(Color.argb(255, (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, (i / 2), 0)); } double offset = 1 + ((double) i / (double) (resolution / 32)); c.drawRect( ((c.getWidth() / 2) + (int) (2 * data[i] * offset + offset)), c.getHeight() - (int) (i * c.getHeight() / resolution), ((c.getWidth() / 2) + (int) (2 * data[i] * offset + offset) - 10), c.getHeight() - (i * c.getHeight() / resolution + c.getHeight() / resolution + 1), p); } } } // Rising if (displayType == 3) { fadeAmt = 100; if (visible) { for (int i = 0; i < resolution; i++) { double offset = 1 + ((double) i / (double) (resolution) / 5); if (cToggle2 == 0) { p.setColor(Color.argb((int) Math.abs(offset * 5 * data[i]), (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb((int) Math.abs(offset * 5 * data[i]), 255, (i / 2), 0)); } c.drawRect(0, c.getHeight() - i * c.getHeight() / resolution, c.getWidth(), c.getHeight() - (i + 1) * c.getHeight() / resolution, p); } } } // old school display of frequencies if (displayType == 4) { fadeAmt = 255; double width = c.getHeight() / 8; if (visible) { for (int n = 0; n < 8; n++) { int h = 0; for (int i = n * (resolution / 8); i < ((n * resolution / 8) + resolution / 8); i++) { if (Math.abs(data[i]) > h) { h = Math.abs(data[i]); } } if (n == 0) { h = h / 4; } if (cToggle2 == 0) { p.setColor(Color.argb(255, 0, 255, 255 - 32 * n)); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, 255 - 32 * n, 0)); } h = h * 12; if (h > c.getWidth() - 10) { h = c.getWidth() - 10; } c.drawRect(c.getWidth() - h, c.getHeight() - (int) (n * (width) + width - 1), (c.getWidth()), c.getHeight() - (int) ((n * width)), p); //White blocks on top if (boxArray[n] < h) { boxArray[n] = h; } int m = boxArray[n]; c.drawRect((c.getWidth() - 10) - m, c.getHeight() - (int) (n * (width) + width - 1), c.getWidth() - m, c.getHeight() - (int) ((n * width)), p); if (boxArray[n] > 0) { boxArray[n] -= 6; } } } tempData = data; } //Wavelength view of sound if (displayType == 5) { fadeAmt = 255; if (visible) { for (int i = 0; i < resolution; i++) { if (cToggle2 == 0) { p.setColor(Color.argb(255, (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, (i / 2), 0)); } c.drawRect( ((c.getWidth() / 2) + 2 * data[i]), c.getHeight() - i * c.getHeight() / resolution, ((c.getWidth() / 2) - 2 * data[i]), c.getHeight() - (i * c.getHeight() / resolution + c .getHeight() / resolution), p); } } } //Wavelength Knot if (displayType == 6) { fadeAmt = 255; if (visible) { for (int i = 0; i < resolution; i++) { if (cToggle2 == 0) { p.setColor(Color.argb(255, (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, (i / 2), 0)); } int i2 = 0; if (i < 511) { i2 = i + 1; double xCo = Math.cos(i * (1.0 / 4.0)); double yCo = Math.sin(i * (1.0 / 4.0)); double xCo2 = Math.cos(i2 * (1.0 / 4.0)); double yCo2 = Math.sin(i2 * (1.0 / 4.0)); c.drawLine((int) ((c.getWidth() / 2) + (2 * xCo * data[i])), (int) ((c.getHeight() / 2) + (2 * yCo * data[i])), (int) ((c.getWidth() / 2) + (2 * xCo2 * data[i2])), (int) ((c.getHeight() / 2) + (2 * yCo2 * data[i2])), p); } } } } //Starburst if (displayType == 7) { fadeAmt = 255; if (visible) { for (int i = 0; i < resolution; i++) { if (cToggle2 == 0) { p.setColor(Color.argb(255, (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, (i / 2), 0)); } int i2 = 0; if (i < 255) { i2 = i + 1; } double xCo = Math.cos(i * (360.0 / 256.0)); double yCo = Math.sin(i * (360.0 / 256.0)); double xCo2 = Math.cos(i * (360.0 / 256.0)); double yCo2 = Math.sin(i * (360.0 / 256.0)); c.drawLine((int) ((c.getWidth() / 2) + (2 * xCo * data[i])), (int) ((c.getHeight() / 2) + (2 * yCo * data[i])), (int) ((c.getWidth() / 2) + (2 * xCo2 * data[i2])), (int) ((c.getHeight() / 2) + (2 * yCo2 * data[i2])), p); //flashVal = Math.abs(data[i]); } } } //Flower if (displayType == 8) { fadeAmt = 255; if (visible) { for (int i = 0; i < resolution; i++) { if (cToggle2 == 0) { p.setColor(Color.argb(255, (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, (i / 2), 0)); } int i2 = 0; if (i < 511) { i2 = i + 1; double pos = i * (360.0 / 512.0); double pos2 = i2 * (360.0 / 512.0); double xCo = Math.cos(Math.toRadians(pos)); double yCo = Math.sin(Math.toRadians(pos)); double xCo2 = Math.cos(Math.toRadians(pos2)); double yCo2 = Math.sin(Math.toRadians(pos2)); c.drawLine((int) ((c.getWidth() / 2) + (4 * xCo * Math.abs(data[i]))), (int) ((c.getHeight() / 2) + (4 * yCo * Math.abs(data[i]))), (int) ((c.getWidth() / 2) + (4 * xCo2 * Math.abs(data[i2]))), (int) ((c.getHeight() / 2) + (4 * yCo2 * Math.abs(data[i2]))), p); } } } } //Ring if (displayType == 9) { fadeAmt = 255; if (visible) { for (int i = 0; i < resolution + 2; i++) { if (cToggle2 == 0) { p.setColor(Color.argb(255, (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, (i / 2), 0)); } int i2 = 0; if (i < 509) { i2 = i + 1; } double pos = (i * (360.0 / (resolution + 2))) + 90; double pos2 = (i2 * (360.0 / (resolution + 2))) + 90; double xCo = Math.cos(Math.toRadians(pos)); double yCo = Math.sin(Math.toRadians(pos)); double xCo2 = Math.cos(Math.toRadians(pos2)); double yCo2 = Math.sin(Math.toRadians(pos2)); if (i < resolution) { c.drawLine((int) ((c.getWidth() / 2) + (xCo * (150 + data[i]))), (int) ((c.getHeight() / 2) + (yCo * (150 + data[i]))), (int) ((c.getWidth() / 2) + (xCo2 * (150 + data[i2]))), (int) ((c.getHeight() / 2) + (yCo2 * (150 + data[i2]))), p); } } } } //Plaid if (displayType == 10) { fadeAmt = 255; if (visible) { int[] tempLagData = new int[50]; for (int i = 0; i < 50; i++) { tempLagData[i] = lagData[i]; } lagData[0] = data[0]; for (int i = 1; i < 50; i++) { lagData[i] = tempLagData[i - 1]; } //System.out.println(lagData[0] != lagData[50]); for (int i = 0; i < 50; i++) { double m = 255 / 50; if (cToggle2 == 0) { p.setColor(Color.argb((2 * Math.abs(lagData[i])), (int) (i * m), 255, 255 - (int) (i * m))); } if (cToggle2 == 1) { p.setColor(Color.argb((2 * Math.abs(lagData[i])), 255, (int) (i * m), 0)); } double n = i * (((c.getHeight()) / 2) / (double) 50); c.drawLine(0, (int) (((c.getHeight()) / 2) + 10 + n), c.getWidth(), (int) (((c.getHeight()) / 2) + 10 + n), p); c.drawLine(0, (int) (((c.getHeight()) / 2) - 10 - n), c.getWidth(), (int) (((c.getHeight()) / 2) - 10 - n), p); c.drawLine((int) (((c.getWidth()) / 2) + 10 + n), 0, (int) (((c.getWidth()) / 2) + 10 + n), c.getHeight(), p); c.drawLine((int) (((c.getWidth()) / 2) - 10 - n), 0, (int) (((c.getWidth()) / 2) - 10 - n), c.getHeight(), p); } } } //Radio if (displayType == 11) { p.setStyle(Paint.Style.STROKE); p.setStrokeWidth((int) (((c.getHeight() * 1.5)) / (double) 50) - 5); fadeAmt = 150; if (visible) { int alphaVal = 0; int avg = 0; for (int i = 0; i < resolution; i++) { avg += Math.abs(data[i]); } avg /= (resolution * 2); if (tempAvg < avg) { tempAvg = avg; } else if (tempAvg > 0) { tempAvg -= 0.3; } int[] tempLagData = new int[50]; for (int i = 0; i < 50; i++) { tempLagData[i] = lagData[i]; } lagData[0] = data[0]; for (int i = 1; i < 50; i++) { lagData[i] = tempLagData[i - 1]; } for (int i = 0; i < 50; i++) { double m = 255 / 50; if (tempAvg > i) { if (cToggle2 == 0) { p.setColor(Color.argb(5 * (Math.abs(lagData[i])), (int) (i * m), 255, 255 - (int) (i * m))); } if (cToggle2 == 1) { p.setColor(Color.argb(5 * (Math.abs(lagData[i])), 255, (int) (i * m), 0)); } } else { p.setColor(Color.argb(0, 0, 0, 0)); } double n = i * (((c.getHeight() * 1.5)) / (double) 50); c.drawCircle(c.getWidth(), c.getHeight(), (float) n, p); } } p.setStyle(Paint.Style.FILL); } //Fireworks if (displayType == 12) { p.setStyle(Paint.Style.STROKE); p.setStrokeWidth(10); fadeAmt = 200; if (visible) { int[] tempLagData = new int[50]; for (int i = 0; i < 50; i++) { tempLagData[i] = lagData[i]; } lagData[0] = data[0]; for (int i = 1; i < 50; i++) { lagData[i] = tempLagData[i - 1]; } int[][] tempLocData = new int[50][2]; for (int i = 0; i < 50; i++) { tempLocData[i][0] = locData[i][0]; tempLocData[i][1] = locData[i][1]; } if (Math.abs(data[0]) > 20 + Math.abs(lagData[1]) && Math.abs(data[0]) > 20 + Math.abs(lagData[2])) { locData[0][0] = (int) (Math.random() * c.getWidth()); locData[0][1] = (int) (Math.random() * c.getHeight()); } else { locData[0][0] = -10000; locData[0][1] = -10000; } for (int i = 1; i < 50; i++) { locData[i][0] = tempLocData[i - 1][0]; locData[i][1] = tempLocData[i - 1][1]; } for (int i = 0; i < 50; i++) { double m = 255 / 50; int alphaValue = (4 * (Math.abs(lagData[i]))) - (i * 5); if (alphaValue < 0) { alphaValue = 0; } if (cToggle2 == 0) { p.setColor(Color.argb(alphaValue, (int) (i * m), 255, 255 - (int) (i * m))); } if (cToggle2 == 1) { p.setColor(Color.argb(alphaValue, 255, (int) (i * m), 0)); } double n = i * (((c.getHeight()) / 2) / (double) 50); c.drawCircle(locData[i][0], locData[i][1], (float) n, p); } } p.setStyle(Paint.Style.FILL); } //Tunnel if (displayType == 13) { fadeAmt = 255; p.setStyle(Paint.Style.STROKE); p.setStrokeWidth(3); if (visible) { int[] tempLagData = new int[50]; for (int i = 0; i < 50; i++) { tempLagData[i] = lagData[i]; } lagData[0] = data[0]; for (int i = 1; i < 50; i++) { lagData[i] = tempLagData[i - 1]; } //System.out.println(lagData[0] != lagData[50]); for (int i = 0; i < 50; i++) { double m = 255 / 50; if (cToggle2 == 0) { p.setColor(Color.argb((4 * Math.abs(lagData[i])), (int) (i * m), 255, 255 - (int) (i * m))); } if (cToggle2 == 1) { p.setColor(Color.argb((4 * Math.abs(lagData[i])), 255, (int) (i * m), 0)); } double n = 1 + ((50 - i) / 50.0) * ((50 - i) * (((c.getHeight()) / 2) / (double) 50)); c.drawLine((int) (((c.getWidth()) / 2) + n), (int) (((c.getHeight()) / 2) + n), (int) (((c.getWidth()) / 2) - n), (int) (((c.getHeight()) / 2) + n), p); c.drawLine((int) (((c.getWidth()) / 2) + n), (int) (((c.getHeight()) / 2) - n), (int) (((c.getWidth()) / 2) - n), (int) (((c.getHeight()) / 2) - n), p); c.drawLine((int) (((c.getWidth()) / 2) + n), (int) (((c.getHeight()) / 2) - n), (int) (((c.getWidth()) / 2) + n), (int) (((c.getHeight()) / 2) + n + 1), p); c.drawLine((int) (((c.getWidth()) / 2) - n), (int) (((c.getHeight()) / 2) - n), (int) (((c.getWidth()) / 2) - n), (int) (((c.getHeight()) / 2) + n), p); } } p.setStyle(Paint.Style.FILL); } //Fuzz if (displayType == 14) { fadeAmt = 255; if (visible) { for (int i = 0; i < resolution; i++) { if (cToggle2 == 0) { p.setColor(Color.argb(255 - (i / 2), (i / 2), 255, 255 - (i / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255 - (i / 2), 255, (i / 2), 0)); } int i2 = 0; if (i < 511) { i2 = i + 1; } double pos = (i * (360.0 / 512.0)) + 90; double pos2 = (i2 * (360.0 / 512.0)) + 90; double xCo = Math.cos(Math.toRadians(pos)); double yCo = Math.sin(Math.toRadians(pos)); double xCo2 = Math.cos(Math.toRadians(pos2)); double yCo2 = Math.sin(Math.toRadians(pos2)); int val1 = (c.getHeight() / 4) + data[i];// * (1 + i/15); int val2 = (c.getHeight() / 4) + data[i2];// * (1 + i/15); if (i == 0) { c.drawLine((int) ((c.getWidth() / 2) + (xCo * val1)), (int) ((c.getHeight() / 2) - (c.getHeight() / 4) + (yCo * val1)), (int) ((c.getWidth() / 2) - (xCo * val1)), (int) ((c.getHeight() / 2) + (c.getHeight() / 4) - (yCo * val1)), p); } c.drawLine((int) ((c.getWidth() / 2) + (xCo * val1)), (int) ((c.getHeight() / 2) - (c.getHeight() / 4) + (yCo * val1)), (int) ((c.getWidth() / 2) + (xCo2 * val2)), (int) ((c.getHeight() / 2) - (c.getHeight() / 4) + (yCo2 * val2)), p); c.drawLine((int) ((c.getWidth() / 2) - (xCo * val1)), (int) ((c.getHeight() / 2) + (c.getHeight() / 4) - (yCo * val1)), (int) ((c.getWidth() / 2) - (xCo2 * val2)), (int) ((c.getHeight() / 2) + (c.getHeight() / 4) - (yCo2 * val2)), p); } } } // Spiral if (displayType == 15) { fadeAmt = 255; p.setStyle(Paint.Style.STROKE); p.setStrokeWidth(3); if (visible) { int[] tempLagData = new int[50]; for (int i = 0; i < 50; i++) { tempLagData[i] = lagData[i]; } lagData[0] = data[0]; for (int i = 1; i < 50; i++) { lagData[i] = tempLagData[i - 1]; } //System.out.println(lagData[0] != lagData[50]); for (int i = 0; i < 50; i++) { double m = 255 / 50; if (cToggle2 == 0) { p.setColor(Color.argb((4 * Math.abs(lagData[i])), (int) (i * m), 255, 255 - (int) (i * m))); } if (cToggle2 == 1) { p.setColor(Color.argb((4 * Math.abs(lagData[i])), 255, (int) (i * m), 0)); } double n = 1 + (i / 50.0) * (i * (((c.getHeight()) * 0.6) / (double) 50)); c.drawLine((int) (((c.getWidth()) / 2) + n), (int) (((c.getHeight()) / 2) + n), (int) (((c.getWidth()) / 2) - n), (int) (((c.getHeight()) / 2) + n), p); c.drawLine((int) (((c.getWidth()) / 2) + n), (int) (((c.getHeight()) / 2) - n), (int) (((c.getWidth()) / 2) - n), (int) (((c.getHeight()) / 2) - n), p); c.drawLine((int) (((c.getWidth()) / 2) + n), (int) (((c.getHeight()) / 2) - n), (int) (((c.getWidth()) / 2) + n), (int) (((c.getHeight()) / 2) + n + 1), p); c.drawLine((int) (((c.getWidth()) / 2) - n), (int) (((c.getHeight()) / 2) - n), (int) (((c.getWidth()) / 2) - n), (int) (((c.getHeight()) / 2) + n), p); c.rotate((int) (180.0 / 50.0), c.getWidth() / 2, c.getHeight() / 2); } } p.setStyle(Paint.Style.FILL); } // Ascension if (displayType == 16) { fadeAmt = 255; p.setStyle(Paint.Style.STROKE); p.setStrokeWidth(4); if (visible) { int[] tempLagData = new int[50]; for (int i = 0; i < 50; i++) { tempLagData[i] = lagData[i]; } lagData[0] = data[0]; for (int i = 1; i < 50; i++) { lagData[i] = tempLagData[i - 1]; } //System.out.println(lagData[0] != lagData[50]); for (int i = 0; i < 50; i++) { double m = 255 / 50; if (cToggle2 == 0) { p.setColor(Color.argb((4 * Math.abs(lagData[i])), (int) (i * m), 255, 255 - (int) (i * m))); } if (cToggle2 == 1) { p.setColor(Color.argb((4 * Math.abs(lagData[i])), 255, (int) (i * m), 0)); } //POINT WHERE ITS COOL double n = 1 + (i / 50.0) * (i * (c.getHeight() / (double) 50)); c.drawLine(0, (int) (c.getHeight() - n), c.getWidth(), (int) (c.getHeight() - n), p); } } p.setStyle(Paint.Style.FILL); } // Line if (displayType == 17) { fadeAmt = 255; p.setStyle(Paint.Style.STROKE); p.setStrokeWidth((c.getHeight() / 50)); if (visible) { int[] tempLagData = new int[50]; for (int i = 0; i < 50; i++) { tempLagData[i] = lagData[i]; } lagData[0] = data[0]; for (int i = 1; i < 50; i++) { lagData[i] = tempLagData[i - 1]; } for (int i = 0; i < 50; i++) { double m = 255 / 50; if (cToggle2 == 0) { p.setColor(Color.argb((4 * Math.abs(lagData[i])), (int) (i * m), 255, 255 - (int) (i * m))); } if (cToggle2 == 1) { p.setColor(Color.argb((4 * Math.abs(lagData[i])), 255, (int) (i * m), 0)); } double n = (i * (c.getHeight() / (double) 50)); c.drawLine(0, (int) (c.getHeight() - n), c.getWidth(), (int) (c.getHeight() - n), p); } } p.setStyle(Paint.Style.FILL); } // Big Ess if (displayType == 18) { fadeAmt = 150; p.setStyle(Paint.Style.STROKE); p.setStrokeWidth(3); if (visible) { int[] tempLagData = new int[50]; for (int i = 0; i < 50; i++) { tempLagData[i] = lagData[i]; } lagData[0] = data[0]; if (Math.abs(data[0]) > 15 + Math.abs(lagData[1]) && Math.abs(data[0]) > 15 + Math.abs(lagData[2])) { lagData[0] = 0; } for (int i = 1; i < 50; i++) { lagData[i] = tempLagData[i - 1]; } for (int i = 0; i < 50; i++) { double m = 255 / 50; int alphaValue = (4 * (Math.abs(lagData[i]))); if (alphaValue < 0) { alphaValue = 0; } if (cToggle2 == 0) { p.setColor(Color.argb(alphaValue, (int) (i * m), 255, 255 - (int) (i * m))); } if (cToggle2 == 1) { p.setColor(Color.argb(alphaValue, 255, (int) (i * m), 0)); } int r = (int) (c.getWidth() / 10.0); int xVal = r + r * 2 * ((i) / 10); int yVal = r + r * 2 * ((i) % 10); c.drawCircle(xVal, yVal, r - 5, p); } } p.setStyle(Paint.Style.FILL); } //Waves coming in if (displayType == 19) { p.setStyle(Paint.Style.STROKE); p.setStrokeWidth(2); fadeAmt = 255; if (visible) { int[][] tempWaveData = new int[waveAmt][resolution]; for (int i = 0; i < waveAmt; i++) { tempWaveData[i] = waveData[i]; } waveData[0] = data; for (int i = 1; i < waveAmt; i++) { waveData[i] = tempWaveData[i - 1]; } for (int j = 0; j < resolution; j++) { if (cToggle2 == 0) { p.setColor(Color.argb(255, (j / 2), 255, 255 - (j / 2))); } if (cToggle2 == 1) { p.setColor(Color.argb(255, 255, (j / 2), 0)); } int j2 = 0; if (j < 511) { j2 = j + 1; } float yCo = (float) (j * ((double) c.getHeight() / (double) resolution)); float xCo = (c.getWidth() / 2) + waveData[0][j]; float yCo2 = (float) ((j + 1) * ((double) c.getHeight() / (double) resolution)); float xCo2 = (c.getWidth() / 2) + waveData[0][j2]; c.drawLine(xCo, yCo, xCo2, yCo2, p); } } p.setStyle(Paint.Style.FILL); } //Staggered Circles, looks like a 3d cone from the side, radius controlled by frequency values } public void getData() { if (visible && grabAudio != null) { while (!dataExists) { try { data = grabAudio.getFormattedData(1, 1); tempData = data; dataExists = true; } catch (NullPointerException e) { } } // end if } // end while dataExists = false; } public void update() { } public boolean onTouchEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_DOWN) { ax = event.getX(); ay = event.getY(); } if (event.getAction() == MotionEvent.ACTION_MOVE) { ax = event.getX(); ay = event.getY(); } if (event.getAction() == MotionEvent.ACTION_UP) { } // update(); return true; } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceCreated(SurfaceHolder holder) { // update(); visible = true; if (grabAudio == null) { if (frequency) { grabAudio = new GrabAudio(1, resolution, audioSessionID); } else { grabAudio = new GrabAudio(0, resolution, audioSessionID); } } grabAudio.start(); getData(); // setWillNotDraw(false); //Allows us to use invalidate() to call // onDraw() _thread = new MainThread(getHolder(), this); // Start the thread that _thread.setRunning(true); // will make calls to _thread.start(); // onDraw() } @Override public void surfaceDestroyed(SurfaceHolder holder) { if (grabAudio != null) { grabAudio.stop(); grabAudio.release(); grabAudio = null; } try { _thread.setRunning(false); // Tells thread to stop _thread.join(); // Removes thread from memory. } catch (InterruptedException e) { } } }
/* Copyright (c) 2008 Health Market Science, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.healthmarketscience.rmiio; import java.io.BufferedInputStream; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InvalidObjectException; import java.io.NotSerializableException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; import java.rmi.RemoteException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * <p> * RemoteInputStream implementation which mimics the RemoteInputStream * functionality while not actually causing any additional RMI invocations. * This class is <i>not recommended for general use</i>, but may be useful (or * even required) in certain scenarios. It basically works by writing the * stream data directly into the ObjectOutputStream during serialization. * There are a variety of implications to this approach, so please read the * pros and cons list carefully before deciding to use this class. * </p> * <ul> * <li><b>Pros:</b> * <ul> * <li>No extra RMI invocations are needed, so this implementation will not * have problems with firewalls. * <li>Since this implementation is not an RMI server, no extra RMI related * objects are instantiated (servers, stubs, etc.) and no export is * needed. * </ul> * <li><b>Cons:</b> * <ul> * <li>Send operations cannot be retried automatically. Once the underlying * stream has begun serialization, it can no longer be reserialized. * And, since potentially lots of data may be sent in one invocation, * the chance of network failures is increased. <i>All in all, this * implementation is much more fragile in the face of network * failures</i>. Note, however, that the application layer may be able * to manually handle retries if the underlying stream is "restartable", * such as a stream based on a File. * <li>If the RPC implementation keeps the entire invocation in memory, you * will have memory consumption problems again. This should not be a * problem with vanilla RMI, which should write the data directly to an * underlying socket. * <li>The server side process cannot start processing the data until the * entire stream is sent (whereas with the other implementations, the * data can be processed as it is received). * <li>The stream data is temporarily stored on the server's local * filesystem. This can have any number of implications including * slower performance, excess disk consumption, and/or exposure of * sensitive data if temp file attributes are incorrect. * <li>This implementation is RMI specific, so it cannot be used with any * non-RMI compatible RPC frameworks (e.g. CORBA). * </ul> * </ul> * <p> * Finally, the good news is that since this implementation is a * RemoteInputStream, <i>the client-side decision to use this class will not * impact the server</i>. If the need arises in the future, client code which * uses this class may switch over to using one of the more robust * RemoteInputStream implementations without any changes to the server. * * @author James Ahlborn */ public class DirectRemoteInputStream implements RemoteInputStream, Closeable, Serializable { private static final Log LOG = LogFactory.getLog(DirectRemoteInputStream.class); private static final long serialVersionUID = 20080125L; /** status of the consumption of the underlying stream */ private enum ConsumptionState { /** the underlying stream has not been consumed yet */ NONE, /** the underlying stream is being consumed locally */ LOCAL, /** the underlying stream is being consumed by serialization */ SERIAL; } /** chunk code which indicates that the next chunk of data is the default length */ private static final int DEFAULT_CHUNK_CODE = 0; /** chunk code which indicates that the next chunk of data is a custom length (the next 4 bytes will include the integer value of that length) */ private static final int CUSTOM_CHUNK_CODE = 1; /** chunk code which indicates the end of the embedded stream data. */ private static final int EOF_CODE = 2; /** stream containing the actual data. when this class is instantiated directly, this will be any stream. when this class is deserialized, this will be a temporary file on the local filesystem. */ private transient InputStream _in; /** optional monitor for the initial serialization of the underlying stream */ private transient RemoteStreamMonitor<RemoteInputStreamServer> _monitor; /** indicates how this object is being consumed. it can be consumed locally or for serialization, but not both. */ private transient ConsumptionState _consumptionState; /** indicates whether or not the underlying stream has been completely consumed */ private transient boolean _gotEOF; /** local file which is caching the streamed data, only used when this object is deserialized */ private transient File _tmpFile; /** whether or not the bytes should be compressed when serialized */ private final boolean _compress; public DirectRemoteInputStream(InputStream in) { this(in, true, RemoteInputStreamServer.DUMMY_MONITOR); } public DirectRemoteInputStream(InputStream in, boolean compress) { this(in, compress, RemoteInputStreamServer.DUMMY_MONITOR); } public DirectRemoteInputStream( InputStream in, boolean compress, RemoteStreamMonitor<RemoteInputStreamServer> monitor) { if(in == null) { throw new IllegalArgumentException("InputStream cannot be null"); } _in = in; _compress = compress; _monitor = monitor; _consumptionState = ConsumptionState.NONE; } /** * Mark this object as being consumed locally. This happens whenever the * underlying stream starts being consumed via the regular read/skip * methods. */ private void markLocalConsumption() { if(_consumptionState == ConsumptionState.SERIAL) { throw new IllegalStateException( "locally consuming stream which was already serialized"); } _consumptionState = ConsumptionState.LOCAL; } @Override public boolean usingGZIPCompression() throws IOException, RemoteException { return _compress; } @Override public int available() throws IOException, RemoteException { markLocalConsumption(); return _in.available(); } @Override public void close(boolean readSuccess) throws IOException, RemoteException { close(); } @Override public byte[] readPacket(int packetId) throws IOException, RemoteException { // note, this code should always be used locally, so the incoming packetId // can be safely ignored if(_gotEOF) { return null; } markLocalConsumption(); byte[] packet = PacketInputStream.readPacket( _in, new byte[RemoteInputStreamServer.DEFAULT_CHUNK_SIZE]); _gotEOF = (packet == null); return packet; } @Override public long skip(long n, int skipId) throws IOException, RemoteException { // note, this code should always be used locally, so the incoming skipId // can be safely ignored markLocalConsumption(); return _in.skip(n); } @Override public void close() throws IOException { if(_consumptionState == ConsumptionState.NONE) { _consumptionState = ConsumptionState.LOCAL; } try { if(_in != null) { _in.close(); } } finally { _in = null; _gotEOF = true; // attempt to delete temp file (if any) if(_tmpFile != null) { _tmpFile.delete(); _tmpFile = null; } } } /** * Serializes this object and all of the underlying stream's data directly * to the given ObjectOutputStream. * @serialData the compression status of the stream, followed by the default * chunk size for the serialized stream data (int), followed by * chunks of the underlying stream. each chunk has a chunk code * which indicates how to handle it's length (either default, * explicit as int, or EOF), and then the specified number of * bytes if not EOF. */ private void writeObject(ObjectOutputStream out) throws IOException { switch(_consumptionState) { case NONE: // this is the required state break; case LOCAL: case SERIAL: throw new NotSerializableException( getClass().getName() + " (underlying stream has already been consumed, type: " + _consumptionState + ")"); default: throw new RuntimeException("unknown state " + _consumptionState); } out.defaultWriteObject(); // once we start consuming the inputstream, we can't rewrite it _consumptionState = ConsumptionState.SERIAL; final int defaultChunkSize = RemoteInputStreamServer.DEFAULT_CHUNK_SIZE; // note, we create RemoteInputStreamServer instances, but we do not // actually export them. RemoteInputStreamServer server = null; try { if(_compress && (_tmpFile == null)) { // this is the first time the data is being read, and we need to // compress it as we read it. server = new GZIPRemoteInputStream(_in, _monitor, defaultChunkSize); } else { // we are re-serializing a previously serialized stream, so the data // is already compressed (if compression was desired) server = new SimpleRemoteInputStream(_in, _monitor, defaultChunkSize); } // record the default chunk size out.writeInt(defaultChunkSize); int packetId = RemoteStreamServer.INITIAL_VALID_SEQUENCE_ID; while(true) { byte[] packet = server.readPacket(packetId++); if(packet != null) { if(packet.length > 0) { // we have a packet with data, write it to the output stream. if // the packet is a different length, record the length. if(packet.length == defaultChunkSize) { out.write(DEFAULT_CHUNK_CODE); } else { out.write(CUSTOM_CHUNK_CODE); out.writeInt(packet.length); } out.write(packet); } } else { // reached end of stream, indicate this out.write(EOF_CODE); break; } } // local stream is exhausted _gotEOF = true; // indicate successful read try { server.close(true); } catch(IOException e) { // log, but ignore failures here if(LOG.isDebugEnabled()) { LOG.debug("Failed closing server", e); } } } finally { RmiioUtil.closeQuietly(server); RmiioUtil.closeQuietly(this); } } /** * Reads the state of this object and all of the underlying stream's data * directly from the given ObjectInputStream. The stream data is stored in * a temporary file in the default java temp directory with the name * {@code "stream_<num>.dat"}. */ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); // read the default chunk size from the incoming file final int defaultChunkSize = in.readInt(); checkChunkSize(defaultChunkSize); // setup a temp file for the incoming data (make sure it gets cleaned up // somehow) _tmpFile = File.createTempFile("stream_", ".dat"); _tmpFile.deleteOnExit(); FileOutputStream out = new FileOutputStream(_tmpFile); try { // limit buffer size in case of malicious input byte[] transferBuf = new byte[ Math.min(defaultChunkSize, RemoteInputStreamServer.DEFAULT_CHUNK_SIZE)]; while(true) { // read in another chunk int chunkCode = in.read(); if(chunkCode == EOF_CODE) { // all done break; } int readLen = defaultChunkSize; if(chunkCode != DEFAULT_CHUNK_CODE) { readLen = in.readInt(); checkChunkSize(readLen); } // copy chunk into temp file copy(in, out, transferBuf, readLen); } // attempt to close the temp file. if successful, we're good to go out.close(); // sweet, setup final state _monitor = RemoteInputStreamServer.DUMMY_MONITOR; _in = new BufferedInputStream(new FileInputStream(_tmpFile)); // the underlying stream is now in it's initial state _consumptionState = ConsumptionState.NONE; _gotEOF = false; } finally { RmiioUtil.closeQuietly(out); } } /** * Throws an InvalidObjectException if the given chunkSize is invalid. */ private static void checkChunkSize(int chunkSize) throws IOException { if(chunkSize <= 0) { throw new InvalidObjectException("invalid chunk size " + chunkSize); } } /** * Copies the given number of bytes from the given InputStream to the given * OutputStream using the given buffer for transfer. The given InputStream * is expected to have at least this many bytes left to read, otherwise an * InvalidObjectException will be thrown. */ private static void copy(InputStream in, OutputStream out, byte[] buffer, int length) throws IOException { while(length > 0) { int readLen = in.read(buffer, 0, Math.min(buffer.length, length)); if(readLen < 0) { throw new InvalidObjectException("input stream data truncated"); } out.write(buffer, 0, readLen); length -= readLen; } } }
/* ======================================================= Copyright 2020 - ePortfolium - Licensed under the Educational Community License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.osedu.org/licenses/ECL-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ======================================================= */ package eportfolium.com.karuta.model.bean; // Generated 13 juin 2019 19:14:13 by Hibernate Tools 5.2.10.Final import java.io.Serializable; import java.util.Date; import java.util.UUID; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EntityListeners; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.Lob; import javax.persistence.ManyToOne; import javax.persistence.OneToOne; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import org.hibernate.annotations.GenericGenerator; import org.hibernate.search.annotations.Indexed; /** ResourceTable generated by hbm2java */ @Indexed @Entity @EntityListeners(AuditListener.class) @Table(name = "resource_table") public class ResourceTable implements Serializable { private static final long serialVersionUID = 3994535580465308742L; private UUID id; private String xsiType; private String content; private Credential credential; private Long modifUserId; private Date modifDate; private Node node; private Node resNode; private Node contextNode; public ResourceTable() { } public ResourceTable(ResourceTable resource) { this.xsiType = resource.getXsiType() != null ? new String(resource.getXsiType()) : null; this.content = resource.getContent() != null ? new String(resource.getContent()) : null; this.credential = resource.getCredential() != null ? new Credential(resource.getCredential().getId()) : null; this.modifUserId = resource.getModifUserId() != null ? Long.valueOf(resource.getModifUserId()) : null; this.modifDate = resource.getModifDate() != null ? new Date(resource.getModifDate().getTime()) : null; } public ResourceTable(UUID id) { this.id = id; } public ResourceTable(UUID nodeUuid, Long modifUserId) { this.id = nodeUuid; this.modifUserId = modifUserId; } public ResourceTable(UUID nodeUuid, String xsiType, String content, Credential credential, Long modifUserId, Date modifDate) { this.id = nodeUuid; this.xsiType = xsiType; this.content = content; this.credential = credential; this.modifUserId = modifUserId; this.modifDate = modifDate; } @Id @GenericGenerator(name = "uuid2", strategy = "uuid2") @GeneratedValue(generator = "uuid2") @Column(name = "node_uuid", unique = true, nullable = false, length = 16) public UUID getId() { return this.id; } public void setId(UUID id) { this.id = id; } @Column(name = "xsi_type", length = 50) public String getXsiType() { return this.xsiType; } public void setXsiType(String xsiType) { this.xsiType = xsiType; } @Lob @Column(name = "content") public String getContent() { return this.content; } public void setContent(String content) { this.content = content; } @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "user_id") public Credential getCredential() { return this.credential; } public void setCredential(Credential credential) { this.credential = credential; } @Column(name = "modif_user_id", nullable = false) public Long getModifUserId() { return this.modifUserId; } public void setModifUserId(Long modifUserId) { this.modifUserId = modifUserId; } @Temporal(TemporalType.TIMESTAMP) @Column(name = "modif_date", length = 19) public Date getModifDate() { return this.modifDate; } public void setModifDate(Date modifDate) { this.modifDate = modifDate; } @OneToOne(fetch = FetchType.LAZY, mappedBy = "resource", cascade = CascadeType.ALL) public Node getNode() { return node; } public void setNode(Node node) { this.node = node; } @OneToOne(fetch = FetchType.LAZY, mappedBy = "resResource", cascade = CascadeType.ALL) public Node getResNode() { return resNode; } public void setResNode(Node resNode) { this.resNode = resNode; } @OneToOne(fetch = FetchType.LAZY, mappedBy = "contextResource", cascade = CascadeType.ALL) public Node getContextNode() { return contextNode; } public void setContextNode(Node contextNode) { this.contextNode = contextNode; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((getId() == null) ? 0 : getId().hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ResourceTable other = (ResourceTable) obj; if (getId() == null) { if (other.getId() != null) return false; } else if (!getId().equals(other.getId())) return false; return true; } }
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.nd4j.tensorflow.conversion; import org.nd4j.shade.protobuf.InvalidProtocolBufferException; import org.bytedeco.javacpp.*; import org.bytedeco.javacpp.indexer.*; import org.nd4j.linalg.api.buffer.DataBuffer; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.concurrency.AffinityManager; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.compression.CompressedDataBuffer; import org.nd4j.linalg.compression.CompressionDescriptor; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.util.ArrayUtil; import org.nd4j.tensorflow.conversion.graphrunner.SavedModelConfig; import org.tensorflow.framework.MetaGraphDef; import org.tensorflow.framework.SignatureDef; import org.tensorflow.framework.TensorInfo; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Map; import org.bytedeco.tensorflow.*; import static org.bytedeco.tensorflow.global.tensorflow.*; /** * Interop between nd4j {@link INDArray} * and {@link TF_Tensor} * * @author Adam Gibson */ public class TensorflowConversion { //used for passing to tensorflow: this dummy de allocator //allows us to use nd4j buffers for memory management //rather than having them managed by tensorflow private static Deallocator_Pointer_long_Pointer calling; private static TensorflowConversion INSTANCE; /** * Get a singleton instance * @return */ public static TensorflowConversion getInstance() { if(INSTANCE == null) INSTANCE = new TensorflowConversion(); return INSTANCE; } private TensorflowConversion() { if(calling == null) calling = DummyDeAllocator.getInstance(); } /** * Convert an {@link INDArray} * to a {@link TF_Tensor} * with zero copy. * Uses a direct pointer to the underlying ndarray's * data * @param ndArray the ndarray to use * @return the equivalent {@link TF_Tensor} */ public TF_Tensor tensorFromNDArray(INDArray ndArray) { if(ndArray == null) { throw new IllegalArgumentException("NDArray must not be null!"); } //we infer data type from the ndarray.databuffer() //for now we throw an exception if(ndArray.data() == null) { throw new IllegalArgumentException("Unable to infer data type from null databuffer"); } if(ndArray.isView() || ndArray.ordering() != 'c') { ndArray = ndArray.dup('c'); } long[] ndShape = ndArray.shape(); long[] tfShape = new long[ndShape.length]; System.arraycopy(ndShape, 0, tfShape, 0, ndShape.length); int type; DataBuffer data = ndArray.data(); DataType dataType = data.dataType(); switch (dataType) { case DOUBLE: type = DT_DOUBLE; break; case FLOAT: type = DT_FLOAT; break; case INT: type = DT_INT32; break; case HALF: type = DT_HALF; break; case COMPRESSED: CompressedDataBuffer compressedData = (CompressedDataBuffer)data; CompressionDescriptor desc = compressedData.getCompressionDescriptor(); String algo = desc.getCompressionAlgorithm(); switch (algo) { case "FLOAT16": type = DT_HALF; break; case "INT8": type = DT_INT8; break; case "UINT8": type = DT_UINT8; break; case "INT16": type = DT_INT16; break; case "UINT16": type = DT_UINT16; break; default: throw new IllegalArgumentException("Unsupported compression algorithm: " + algo); } break; case LONG: type = DT_INT64; break; case UTF8: type = DT_STRING; break; default: throw new IllegalArgumentException("Unsupported data type: " + dataType); } try { Nd4j.getAffinityManager().ensureLocation(ndArray, AffinityManager.Location.HOST); } catch (Exception e) { // ND4J won't let us access compressed data in GPU memory, so we'll leave TensorFlow do the conversion instead ndArray.getDouble(0); // forces decompression and data copy to host data = ndArray.data(); dataType = data.dataType(); switch (dataType) { case DOUBLE: type = DT_DOUBLE; break; case FLOAT: type = DT_FLOAT; break; case INT: type = DT_INT32; break; case LONG: type = DT_INT64; break; case UTF8: type = DT_STRING; break; default: throw new IllegalArgumentException("Unsupported data type: " + dataType); } } LongPointer longPointer = new LongPointer(tfShape); TF_Tensor tf_tensor = null; if (type == DT_STRING) { long size = 0; long length = ndArray.length(); BytePointer[] strings = new BytePointer[(int)length]; for (int i = 0; i < length; i++) { strings[i] = new BytePointer(ndArray.getString(i)); size += TF_StringEncodedSize(strings[i].capacity()); } tf_tensor = TF_AllocateTensor( type, longPointer, tfShape.length, 8 * length + size); long offset = 0; BytePointer tf_data = new BytePointer(TF_TensorData(tf_tensor)).capacity(TF_TensorByteSize(tf_tensor)); TF_Status status = TF_NewStatus(); for (int i = 0; i < length; i++) { tf_data.position(8 * i).putLong(offset); offset += TF_StringEncode(strings[i], strings[i].capacity() - 1, tf_data.position(8 * length + offset), tf_data.capacity() - tf_data.position(), status); if (TF_GetCode(status) != TF_OK) { throw new IllegalStateException("ERROR: Unable to convert tensor " + TF_Message(status).getString()); } } TF_DeleteStatus(status); } else { tf_tensor = TF_NewTensor( type, longPointer, tfShape.length, data.pointer(), data.length() * data.getElementSize(), calling,null); } return tf_tensor; } /** * Convert a {@link INDArray} * to a {@link TF_Tensor} * using zero copy. * It will use the underlying * pointer with in nd4j. * @param tensor the tensor to use * @return */ public INDArray ndArrayFromTensor(TF_Tensor tensor) { int rank = TF_NumDims(tensor); int[] ndShape; if (rank == 0) { // scalar ndShape = new int[] { 1 }; } else { ndShape = new int[rank]; for (int i = 0; i < ndShape.length; i++) { ndShape[i] = (int) TF_Dim(tensor,i); } } int tfType = TF_TensorType(tensor); DataType nd4jType = typeFor(tfType); int length = ArrayUtil.prod(ndShape); INDArray array; if (nd4jType == DataType.UTF8) { String[] strings = new String[length]; BytePointer data = new BytePointer(TF_TensorData(tensor)).capacity(TF_TensorByteSize(tensor)); BytePointer str = new BytePointer((Pointer)null); SizeTPointer size = new SizeTPointer(1); TF_Status status = TF_NewStatus(); for (int i = 0; i < length; i++) { long offset = data.position(8 * i).getLong(); TF_StringDecode(data.position(8 * length + offset), data.capacity() - data.position(), str, size, status); if (TF_GetCode(status) != TF_OK) { throw new IllegalStateException("ERROR: Unable to convert tensor " + TF_Message(status).getString()); } strings[i] = str.position(0).capacity(size.get()).getString(); } TF_DeleteStatus(status); array = Nd4j.create(strings); } else { Pointer pointer = TF_TensorData(tensor).capacity(length); Indexer indexer = indexerForType(nd4jType,pointer); DataBuffer d = Nd4j.createBuffer(indexer.pointer(),nd4jType,length,indexer); array = Nd4j.create(d,ndShape); } // we don't need this in this case. Device memory will be updated right in the constructor //Nd4j.getAffinityManager().tagLocation(array, AffinityManager.Location.HOST); return array; } private Indexer indexerForType(DataType type,Pointer pointer) { switch(type) { case DOUBLE: return DoubleIndexer.create(new DoublePointer(pointer)); case FLOAT: return FloatIndexer.create(new FloatPointer(pointer)); case INT: return IntIndexer.create(new IntPointer(pointer)); case LONG: return LongIndexer.create(new LongPointer(pointer)); default: throw new IllegalArgumentException("Illegal type " + type); } } private DataType typeFor(int tensorflowType) { switch(tensorflowType) { case DT_DOUBLE: return DataType.DOUBLE; case DT_FLOAT: return DataType.FLOAT; case DT_INT32: return DataType.LONG; case DT_INT64: return DataType.LONG; case DT_STRING: return DataType.UTF8; default: throw new IllegalArgumentException("Illegal type " + tensorflowType); } } /** * Get an initialized {@link TF_Graph} * based on the passed in file * (the file must be a binary protobuf/pb file) * The graph will be modified to be associated * with the device associated with this current thread. * * Depending on the active {@link Nd4j#getBackend()} * the device will either be the gpu pinned to the current thread * or the cpu * @param filePath the path to the file to read * @return the initialized graph * @throws IOException */ public TF_Graph loadGraph(String filePath, TF_Status status) throws IOException { byte[] bytes = Files.readAllBytes(Paths.get(filePath)); return loadGraph(bytes, status); } /** * Infers the device for the given thread * based on the {@link Nd4j#getAffinityManager()} * Usually, this will either be a gpu or cpu * reserved for the current device. * You can think of the "current thread" * as a worker. This is mainly useful with multiple gpus * @return */ public static String defaultDeviceForThread() { Integer deviceForThread = Nd4j.getAffinityManager().getDeviceForCurrentThread(); String deviceName = null; //gpu if(Nd4j.getBackend().getClass().getName().contains("JCublasBackend")) { deviceName = "/device:gpu:" + deviceForThread; } else { deviceName = "/device:cpu:" + deviceForThread; } return deviceName; } /** * Get an initialized {@link TF_Graph} * based on the passed in byte array content * (the content must be a binary protobuf/pb file) * The graph will be modified to be associated * with the device associated with this current thread. * * Depending on the active {@link Nd4j#getBackend()} * the device will either be the gpu pinned to the current thread * or the content * @param content the path to the file to read * @return the initialized graph * @throws IOException */ public TF_Graph loadGraph(byte[] content, TF_Status status) { byte[] toLoad = content; TF_Buffer graph_def = TF_NewBufferFromString(new BytePointer(toLoad), content.length); TF_Graph graphC = TF_NewGraph(); TF_ImportGraphDefOptions opts = TF_NewImportGraphDefOptions(); TF_GraphImportGraphDef(graphC, graph_def, opts, status); if (TF_GetCode(status) != TF_OK) { throw new IllegalStateException("ERROR: Unable to import graph " + TF_Message(status).getString()); } TF_DeleteImportGraphDefOptions(opts); return graphC; } /** * Load a session based on the saved model * @param savedModelConfig the configuration for the saved model * @param options the session options to use * @param runOptions the run configuration to use * @param graph the tf graph to use * @param inputsMap the input map * @param outputsMap the output names * @param status the status object to use for verifying the results * @return */ public TF_Session loadSavedModel(SavedModelConfig savedModelConfig, TF_SessionOptions options, TF_Buffer runOptions, TF_Graph graph, Map<String, String> inputsMap, Map<String, String> outputsMap, TF_Status status) { TF_Buffer metaGraph = TF_Buffer.newBuffer(); TF_Session session = TF_LoadSessionFromSavedModel(options, runOptions, new BytePointer(savedModelConfig.getSavedModelPath()), new BytePointer(savedModelConfig.getModelTag()), 1, graph, metaGraph, status); if (TF_GetCode(status) != TF_OK) { throw new IllegalStateException("ERROR: Unable to import model " + TF_Message(status).getString()); } MetaGraphDef metaGraphDef; try { metaGraphDef = MetaGraphDef.parseFrom(metaGraph.data().capacity(metaGraph.length()).asByteBuffer()); } catch (InvalidProtocolBufferException ex) { throw new IllegalStateException("ERROR: Unable to import model " + ex); } Map<String, SignatureDef> signatureDefMap = metaGraphDef.getSignatureDefMap(); SignatureDef signatureDef = signatureDefMap.get(savedModelConfig.getSignatureKey()); Map<String, TensorInfo> inputs = signatureDef.getInputsMap(); for (Map.Entry<String, TensorInfo> e : inputs.entrySet()) { inputsMap.put(e.getKey(), e.getValue().getName()); } Map<String, TensorInfo> outputs = signatureDef.getOutputsMap(); for (Map.Entry<String, TensorInfo> e : outputs.entrySet()) { outputsMap.put(e.getKey(), e.getValue().getName()); } return session; } }
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.events.aggr.session; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import javax.persistence.Query; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.ParameterExpression; import javax.persistence.criteria.Root; import org.jasig.portal.events.LoginEvent; import org.jasig.portal.events.PortalEvent; import org.jasig.portal.events.aggr.groups.AggregatedGroupLookupDao; import org.jasig.portal.events.aggr.groups.AggregatedGroupMapping; import org.jasig.portal.groups.ICompositeGroupService; import org.jasig.portal.groups.IEntityGroup; import org.jasig.portal.groups.IGroupMember; import org.jasig.portal.jpa.BaseAggrEventsJpaDao; import org.jasig.portal.jpa.cache.EntityManagerCache; import org.jasig.portal.security.IPerson; import org.jasig.portal.utils.cache.CacheKey; import org.joda.time.DateTime; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Repository; import com.google.common.base.Function; /** * @author Eric Dalquist * @version $Revision$ */ @Repository("eventSessionDao") public class JpaEventSessionDao extends BaseAggrEventsJpaDao implements EventSessionDao { private final static String EVENT_SESSION_CACHE_SOURCE = JpaEventSessionDao.class.getName() + "_EVENT_SESSION"; private int maxPurgeBatchSize; private String deleteByEventSessionIdQuery; private CriteriaQuery<EventSessionImpl> findExpiredEventSessionsQuery; private CriteriaQuery<Long> countExpiredEventSessionsQuery; private ParameterExpression<String> eventSessionIdParameter; private ParameterExpression<DateTime> dateTimeParameter; private AggregatedGroupLookupDao aggregatedGroupLookupDao; private ICompositeGroupService compositeGroupService; private EntityManagerCache entityManagerCache; @Autowired @Value("${org.jasig.portal.events.aggr.session.JpaEventSessionDao.maxPurgeBatchSize:100000}") public void setMaxPurgeBatchSize(int maxPurgeBatchSize) { this.maxPurgeBatchSize = maxPurgeBatchSize; } @Autowired public void setEntityManagerCache(EntityManagerCache entityManagerCache) { this.entityManagerCache = entityManagerCache; } @Autowired public void setCompositeGroupService(ICompositeGroupService compositeGroupService) { this.compositeGroupService = compositeGroupService; } @Autowired public void setAggregatedGroupLookupDao(AggregatedGroupLookupDao aggregatedGroupLookupDao) { this.aggregatedGroupLookupDao = aggregatedGroupLookupDao; } @Override public void afterPropertiesSet() throws Exception { this.eventSessionIdParameter = this.createParameterExpression(String.class, "eventSessionId"); this.dateTimeParameter = this.createParameterExpression(DateTime.class, "dateTime"); this.findExpiredEventSessionsQuery = this.createCriteriaQuery(new Function<CriteriaBuilder, CriteriaQuery<EventSessionImpl>>() { @Override public CriteriaQuery<EventSessionImpl> apply(CriteriaBuilder cb) { final CriteriaQuery<EventSessionImpl> criteriaQuery = cb.createQuery(EventSessionImpl.class); final Root<EventSessionImpl> root = criteriaQuery.from(EventSessionImpl.class); criteriaQuery.select(root); criteriaQuery.where( cb.lessThanOrEqualTo(root.get(EventSessionImpl_.lastAccessed), dateTimeParameter) ); return criteriaQuery; } }); this.countExpiredEventSessionsQuery = this.createCriteriaQuery(new Function<CriteriaBuilder, CriteriaQuery<Long>>() { @Override public CriteriaQuery<Long> apply(CriteriaBuilder cb) { final CriteriaQuery<Long> criteriaQuery = cb.createQuery(Long.class); final Root<EventSessionImpl> root = criteriaQuery.from(EventSessionImpl.class); criteriaQuery.select(cb.count(root)); criteriaQuery.where( cb.lessThanOrEqualTo(root.get(EventSessionImpl_.lastAccessed), dateTimeParameter) ); return criteriaQuery; } }); this.deleteByEventSessionIdQuery = "DELETE FROM " + EventSessionImpl.class.getName() + " e " + "WHERE e." + EventSessionImpl_.eventSessionId.getName() + " = :" + this.eventSessionIdParameter.getName(); } @AggrEventsTransactional @Override public void storeEventSession(EventSession eventSession) { this.getEntityManager().persist(eventSession); } @AggrEventsTransactional @Override public EventSession getEventSession(PortalEvent event) { final String eventSessionId = event.getEventSessionId(); final CacheKey key = CacheKey.build(EVENT_SESSION_CACHE_SOURCE, eventSessionId); EventSessionImpl eventSession = this.entityManagerCache.get(PERSISTENCE_UNIT_NAME, key); if (eventSession != null) { return eventSession; } final NaturalIdQuery<EventSessionImpl> naturalIdQuery = this.createNaturalIdQuery(EventSessionImpl.class); naturalIdQuery.using(EventSessionImpl_.eventSessionId, eventSessionId); eventSession = naturalIdQuery.load(); if (eventSession == null) { //No event session, somehow we missed the login event. Look at the groups the user is currently a member of final Set<AggregatedGroupMapping> groupMappings = this.getGroupsForEvent(event); final DateTime eventDate = event.getTimestampAsDate(); eventSession = new EventSessionImpl(eventSessionId, eventDate, groupMappings); this.getEntityManager().persist(eventSession); this.entityManagerCache.put(PERSISTENCE_UNIT_NAME, key, eventSession); } return eventSession; } @AggrEventsTransactional @Override public void deleteEventSession(String eventSessionId) { final Query query = this.getEntityManager().createQuery(this.deleteByEventSessionIdQuery); query.setParameter(this.eventSessionIdParameter.getName(), eventSessionId); query.executeUpdate(); } private void purgeEventList(int batchSize, DateTime lastAggregatedEventDate) { final TypedQuery<EventSessionImpl> query = this.createQuery(this.findExpiredEventSessionsQuery); query.setParameter(this.dateTimeParameter, lastAggregatedEventDate); query.setMaxResults(batchSize); final List<EventSessionImpl> resultList = query.getResultList(); for (final EventSessionImpl eventSession : resultList) { this.getEntityManager().remove(eventSession); } } @AggrEventsTransactional @Override public int purgeEventSessionsBefore(DateTime lastAggregatedEventDate) { final TypedQuery<Long> countQuery = this.createQuery(this.countExpiredEventSessionsQuery); countQuery.setParameter(this.dateTimeParameter, lastAggregatedEventDate); final int totalRows = countQuery.getSingleResult().intValue(); if (totalRows > 0) { final int numberBatches = totalRows / maxPurgeBatchSize; for (int i = 0; i <= numberBatches; i++) { purgeEventList(maxPurgeBatchSize, lastAggregatedEventDate); } } return totalRows; } /** * Get groups for the event */ protected Set<AggregatedGroupMapping> getGroupsForEvent(PortalEvent event) { final Set<AggregatedGroupMapping> groupMappings = new LinkedHashSet<AggregatedGroupMapping>(); if (event instanceof LoginEvent) { for (final String groupKey : ((LoginEvent) event).getGroups()) { final AggregatedGroupMapping groupMapping = this.aggregatedGroupLookupDao.getGroupMapping(groupKey); if (groupMapping != null) { groupMappings.add(groupMapping); } } } else { final String userName = event.getUserName(); final IGroupMember groupMember = this.compositeGroupService.getGroupMember(userName, IPerson.class); for (@SuppressWarnings("unchecked") final Iterator<IEntityGroup> containingGroups = this.compositeGroupService.findParentGroups(groupMember); containingGroups.hasNext(); ) { final IEntityGroup group = containingGroups.next(); final AggregatedGroupMapping groupMapping = this.aggregatedGroupLookupDao.getGroupMapping(group.getServiceName().toString(), group.getName()); groupMappings.add(groupMapping); } } return groupMappings; } }
package com.gentics.mesh.core.role; import static com.gentics.mesh.assertj.MeshAssertions.assertThat; import static com.gentics.mesh.core.data.perm.InternalPermission.CREATE_PERM; import static com.gentics.mesh.core.data.perm.InternalPermission.DELETE_PERM; import static com.gentics.mesh.core.rest.MeshEvent.GROUP_UPDATED; import static com.gentics.mesh.core.rest.MeshEvent.ROLE_PERMISSIONS_CHANGED; import static com.gentics.mesh.core.rest.MeshEvent.ROLE_UPDATED; import static com.gentics.mesh.core.rest.MeshEvent.USER_UPDATED; import static com.gentics.mesh.core.rest.common.Permission.CREATE; import static com.gentics.mesh.core.rest.common.Permission.DELETE; import static com.gentics.mesh.core.rest.common.Permission.READ; import static com.gentics.mesh.core.rest.common.Permission.READ_PUBLISHED; import static com.gentics.mesh.core.rest.common.Permission.UPDATE; import static com.gentics.mesh.test.ClientHelper.call; import static com.gentics.mesh.test.ElasticsearchTestMode.TRACKING; import static com.gentics.mesh.test.TestDataProvider.PROJECT_NAME; import static com.gentics.mesh.test.TestSize.FULL; import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Test; import com.gentics.mesh.ElementType; import com.gentics.mesh.core.data.dao.GroupDao; import com.gentics.mesh.core.data.dao.RoleDao; import com.gentics.mesh.core.data.dao.UserDao; import com.gentics.mesh.core.data.group.HibGroup; import com.gentics.mesh.core.data.node.HibNode; import com.gentics.mesh.core.data.perm.InternalPermission; import com.gentics.mesh.core.data.role.HibRole; import com.gentics.mesh.core.data.schema.HibMicroschema; import com.gentics.mesh.core.data.user.HibUser; import com.gentics.mesh.core.db.Tx; import com.gentics.mesh.core.rest.common.GenericMessageResponse; import com.gentics.mesh.core.rest.common.Permission; import com.gentics.mesh.core.rest.event.role.PermissionChangedEventModelImpl; import com.gentics.mesh.core.rest.node.NodeListResponse; import com.gentics.mesh.core.rest.node.NodeResponse; import com.gentics.mesh.core.rest.project.ProjectResponse; import com.gentics.mesh.core.rest.role.RolePermissionRequest; import com.gentics.mesh.core.rest.role.RolePermissionResponse; import com.gentics.mesh.core.rest.role.RoleReference; import com.gentics.mesh.core.rest.tag.TagFamilyResponse; import com.gentics.mesh.test.MeshTestSetting; import com.gentics.mesh.test.context.AbstractMeshTest; @MeshTestSetting(elasticsearch = TRACKING, testSize = FULL, startServer = true) public class RoleEndpointPermissionsTest extends AbstractMeshTest { @Test public void testRevokeAllPermissionFromProject() { final String roleName = tx(() -> role().getName()); try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Add permission on own role roleDao.grantPermissions(role(), role(), InternalPermission.UPDATE_PERM); assertTrue(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, tagFamily("colors"))); tx.success(); } // All elements in the project should be affected. // +2 for the project and the branch int totalEvents = getNodeCount() + tagFamilies().size() + tags().size() + 2; expect(ROLE_PERMISSIONS_CHANGED).match(totalEvents, PermissionChangedEventModelImpl.class, event -> { RoleReference roleRef = event.getRole(); assertEquals("The uuid of the role did not match for the event.", roleUuid(), roleRef.getUuid()); assertEquals("The name of the role did not match for the event.", roleName, roleRef.getName()); }).total(totalEvents); RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(true); request.getPermissions().setOthers(false); GenericMessageResponse message = call(() -> client().updateRolePermissions(roleUuid(), "projects/" + projectUuid(), request)); awaitEvents(); waitForSearchIdleEvent(); long nodecontainerCount = tx(() -> getAllContents().count()); // +1 for Project (Branch is not indexed) long updateEvents = nodecontainerCount + tagFamilies().size() + tags().size() + 1; assertThat(trackingSearchProvider()).hasEvents(0, updateEvents, 0, 0, 0); try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); assertThat(message).matches("role_updated_permission", role().getName()); assertFalse(roleDao.hasPermission(role(), InternalPermission.READ_PERM, tagFamily("colors"))); } } @Test public void testRevokeAllPermissionFromProjectByName() { try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Add permission on own role roleDao.grantPermissions(role(), role(), InternalPermission.UPDATE_PERM); assertTrue(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, tagFamily("colors"))); tx.success(); } try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(true); request.getPermissions().setOthers(false); GenericMessageResponse message = call(() -> client().updateRolePermissions(role().getUuid(), "projects/" + PROJECT_NAME, request)); assertThat(message).matches("role_updated_permission", role().getName()); assertFalse(roleDao.hasPermission(role(), InternalPermission.READ_PERM, tagFamily("colors"))); } } @Test public void testAddPermissionToProjectTagFamily() { try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Add permission on own role roleDao.grantPermissions(role(), role(), InternalPermission.UPDATE_PERM); assertTrue(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, tagFamily("colors"))); tx.success(); } RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(false); request.getPermissions().add(READ); request.getPermissions().add(UPDATE); request.getPermissions().add(CREATE); request.getPermissions().setOthers(false); String tagFamilyUuid = tx(() -> tagFamily("colors").getUuid()); String roleName = tx(() -> role().getName()); GenericMessageResponse message = call(() -> client().updateRolePermissions(roleUuid(), "projects/" + projectUuid() + "/tagFamilies/" + tagFamilyUuid, request)); assertThat(message).matches("role_updated_permission", roleName); try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); assertFalse(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, tagFamily("colors"))); } } @Test public void testAddPermissionToMicroschema() { try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Add permission on own role roleDao.grantPermissions(role(), role(), InternalPermission.UPDATE_PERM); HibMicroschema vcard = microschemaContainer("vcard"); // Revoke all permissions to vcard microschema roleDao.revokePermissions(role(), vcard, InternalPermission.values()); tx.success(); } HibMicroschema vcard; try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Validate revocation vcard = microschemaContainer("vcard"); assertFalse(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, vcard)); RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(false); request.getPermissions().add(READ); request.getPermissions().add(UPDATE); request.getPermissions().add(CREATE); GenericMessageResponse message = call(() -> client().updateRolePermissions(role().getUuid(), "microschemas/" + vcard.getUuid(), request)); assertThat(message).matches("role_updated_permission", role().getName()); } try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); assertFalse(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, vcard)); assertTrue(roleDao.hasPermission(role(), InternalPermission.UPDATE_PERM, vcard)); assertTrue(roleDao.hasPermission(role(), InternalPermission.CREATE_PERM, vcard)); assertTrue(roleDao.hasPermission(role(), InternalPermission.READ_PERM, vcard)); } } @Test public void testSetOnlyCreatePerm() { String pathToElement = "groups"; RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(true); request.getPermissions().add(CREATE); request.getPermissions().setOthers(false); tx(tx -> { RoleDao roleDao = tx.roleDao(); assertTrue("The role should have read permission on the group.", roleDao.hasPermission(role(), InternalPermission.READ_PERM, group())); }); GenericMessageResponse message = call(() -> client().updateRolePermissions(roleUuid(), pathToElement, request)); assertThat(message).matches("role_updated_permission", tx(() -> role().getName())); tx(tx -> { RoleDao roleDao = tx.roleDao(); assertFalse("The role should no longer have read permission on the group.", roleDao.hasPermission(role(), InternalPermission.READ_PERM, group())); }); } @Test public void testAddPermissionsOnGroup() { String pathToElement = "groups"; String roleName = tx(() -> role().getName()); RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(true); request.getPermissions().add(READ); request.getPermissions().add(UPDATE); request.getPermissions().add(CREATE); request.getPermissions().setOthers(false); try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); assertTrue("The role should have delete permission on the group.", roleDao.hasPermission(role(), DELETE_PERM, group())); } expect(ROLE_PERMISSIONS_CHANGED).match(9, PermissionChangedEventModelImpl.class, event -> { RoleReference roleRef = event.getRole(); assertEquals("The role name in the event did not match.", roleName, roleRef.getName()); assertEquals("The role uuid in the event did not match.", roleUuid(), roleRef.getUuid()); ElementType type = event.getType(); switch (type) { case ROLE: assertThat(event.getName()).as("The listed roles should have been affected.").containsPattern("anonymous|joe1_role"); break; case USER: assertThat(event.getName()).as("All users in the groups should be affected due to recursive true.") .containsPattern("joe1|anonymous|guest|admin"); break; case GROUP: assertThat(event.getName()).as("All groups should be affected.") .containsPattern("anonymous|joe1_group|extra_group|guests|admin"); break; default: fail("Unexpected event for type {" + type + "}"); } }).total(9); expect(ROLE_UPDATED).none(); expect(USER_UPDATED).none(); expect(GROUP_UPDATED).none(); GenericMessageResponse message = call(() -> client().updateRolePermissions(roleUuid(), pathToElement, request)); assertThat(message).matches("role_updated_permission", roleName); awaitEvents(); try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); assertFalse("The role should no longer have delete permission on the group.", roleDao.hasPermission(role(), DELETE_PERM, group())); } } @Test public void testGrantPermToProjectByName() { try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Add permission on own role roleDao.grantPermissions(role(), role(), InternalPermission.UPDATE_PERM); assertTrue(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, tagFamily("colors"))); } String pathToElement = PROJECT_NAME + "/tagFamilies/" + tx(() -> tagFamily("colors").getUuid()); RolePermissionResponse response = call(() -> client().readRolePermissions(roleUuid(), pathToElement)); assertThat(response).hasPerm(Permission.basicPermissions()); response = call(() -> client().readRolePermissions(roleUuid(), "/" + PROJECT_NAME)); assertThat(response).hasPerm(Permission.basicPermissions()); tx(tx -> { RoleDao roleDao = tx.roleDao(); roleDao.revokePermissions(role(), project(), DELETE_PERM); }); response = call(() -> client().readRolePermissions(roleUuid(), "/" + PROJECT_NAME)); assertThat(response).hasNoPerm(DELETE); ProjectResponse projectResponse = call(() -> client().findProjectByUuid(projectUuid())); assertFalse(projectResponse.getPermissions().hasPerm(DELETE)); } @Test public void testReadPermissionsOnProjectTagFamily() { try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Add permission on own role roleDao.grantPermissions(role(), role(), InternalPermission.UPDATE_PERM); assertTrue(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, tagFamily("colors"))); tx.success(); } String pathToElement = tx(() -> "projects/" + project().getUuid() + "/tagFamilies/" + tagFamily("colors").getUuid()); RolePermissionResponse response = call(() -> client().readRolePermissions(roleUuid(), pathToElement)); assertNotNull(response); assertThat(response).hasPerm(Permission.basicPermissions()); } @Test public void testApplyPermissionsOnTag() { try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Add permission on own role roleDao.grantPermissions(role(), role(), InternalPermission.UPDATE_PERM); assertTrue(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, tagFamily("colors"))); roleDao.revokePermissions(role(), tag("red"), DELETE_PERM); tx.success(); } String pathToElement = tx( () -> "projects/" + project().getUuid() + "/tagFamilies/" + tagFamily("colors").getUuid() + "/tags/" + tag("red").getUuid()); RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(false); request.getPermissions().setDelete(true); call(() -> client().updateRolePermissions(roleUuid(), pathToElement, request)); try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); assertTrue(roleDao.hasPermission(role(), DELETE_PERM, tag("red"))); } } @Test public void testApplyPermissionsOnTags() { try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Add permission on own role roleDao.grantPermissions(role(), role(), InternalPermission.UPDATE_PERM); assertTrue(roleDao.hasPermission(role(), InternalPermission.DELETE_PERM, tagFamily("colors"))); roleDao.revokePermissions(role(), tag("red"), DELETE_PERM); tx.success(); } // TODO - This action will currently only affect the tag family. We need to decide how we want to change this behaviour: // https://github.com/gentics/mesh/issues/154 String pathToElement = tx(() -> "projects/" + project().getUuid() + "/tagFamilies/" + tagFamily("colors").getUuid() + "/tags"); RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(false); request.getPermissions().setDelete(true); call(() -> client().updateRolePermissions(roleUuid(), pathToElement, request)); try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); assertFalse("The perm of the tag should not change since the action currently only affects the tag family itself", roleDao.hasPermission(role(), DELETE_PERM, tag("red"))); assertTrue("The tag family perm did not change", roleDao.hasPermission(role(), DELETE_PERM, tagFamily("colors"))); } } @Test public void testApplyCreatePermissionsOnTagFamily() { try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); // Add permission on own role roleDao.grantPermissions(role(), role(), InternalPermission.UPDATE_PERM); roleDao.revokePermissions(role(), tagFamily("colors"), CREATE_PERM); roleDao.revokePermissions(role(), tag("red"), CREATE_PERM); assertFalse(roleDao.hasPermission(role(), InternalPermission.CREATE_PERM, tagFamily("colors"))); tx.success(); } String tagFamilyUuid = tx(() -> tagFamily("colors").getUuid()); TagFamilyResponse tagFamilyResponse = call(() -> client().findTagFamilyByUuid(PROJECT_NAME, tagFamilyUuid)); assertFalse(tagFamilyResponse.getPermissions().hasPerm(CREATE)); String pathToElement = tx(() -> "projects/" + project().getUuid() + "/tagFamilies/" + tagFamilyUuid); RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(false); request.getPermissions().setOthers(true); call(() -> client().updateRolePermissions(roleUuid(), pathToElement, request)); try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); assertFalse("The perm of the tag should not change since the action currently only affects the tag family itself", roleDao.hasPermission(role(), CREATE_PERM, tag("red"))); assertTrue("The tag family perm did not change", roleDao.hasPermission(role(), CREATE_PERM, tagFamily("colors"))); } tagFamilyResponse = call(() -> client().findTagFamilyByUuid(PROJECT_NAME, tagFamilyUuid)); assertTrue(tagFamilyResponse.getPermissions().hasPerm(CREATE)); } @Test public void testAddRecursivePermissionsToNodes() { String roleUuid; try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); GroupDao groupDao = tx.groupDao(); HibGroup testGroup = groupDao.create("testGroup", user()); HibRole testRole = tx.roleDao().create("testRole", user()); HibUser testUser = tx.userDao().create("test", user()); tx.userDao().setPassword(testUser, "dummy"); groupDao.addRole(testGroup, testRole); groupDao.addUser(testGroup, testUser); roleUuid = testRole.getUuid(); roleDao.grantPermissions(role(), testRole, InternalPermission.values()); tx.success(); } RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(true); request.getPermissions().setRead(true); request.getPermissions().setOthers(false); GenericMessageResponse message = call(() -> client().updateRolePermissions(roleUuid, "projects/" + projectUuid() + "/nodes", request)); assertThat(message).matches("role_updated_permission", "testRole"); request.getPermissions().setUpdate(true); message = call(() -> client().updateRolePermissions(roleUuid, "projects/" + projectUuid() + "/nodes", request)); assertThat(message).matches("role_updated_permission", "testRole"); client().logout().blockingGet(); client().setLogin("test", "dummy"); client().login().blockingGet(); NodeListResponse nodeList = call(() -> client().findNodes(PROJECT_NAME)); System.out.println(nodeList.toJson()); for (NodeResponse node : nodeList.getData()) { assertThat(node.getPermissions()).as("Node uuid: " + node.getUuid()).hasPerm(READ, UPDATE).hasNoPerm(CREATE, DELETE) .hasPerm(READ_PUBLISHED); } NodeResponse response = call(() -> client().findNodeByUuid(PROJECT_NAME, contentUuid())); assertThat(response.getPermissions()).hasPerm(READ, UPDATE).hasNoPerm(CREATE, DELETE).hasPerm(READ_PUBLISHED); } @Test public void testAddPermissionToNode() { try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); UserDao userDao = tx.userDao(); HibNode node = folder("2015"); roleDao.revokePermissions(role(), node, InternalPermission.UPDATE_PERM); assertFalse(roleDao.hasPermission(role(), InternalPermission.UPDATE_PERM, node)); assertTrue(userDao.hasPermission(user(), role(), InternalPermission.UPDATE_PERM)); tx.success(); } expect(ROLE_PERMISSIONS_CHANGED).total(1); HibNode node; try (Tx tx = tx()) { node = folder("2015"); RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(false); request.getPermissions().add(READ); request.getPermissions().add(UPDATE); request.getPermissions().add(CREATE); GenericMessageResponse message = call( () -> client().updateRolePermissions(role().getUuid(), "projects/" + project().getUuid() + "/nodes/" + node.getUuid(), request)); assertThat(message).matches("role_updated_permission", role().getName()); } try (Tx tx = tx()) { RoleDao roleDao = tx.roleDao(); assertTrue(roleDao.hasPermission(role(), InternalPermission.UPDATE_PERM, node)); assertTrue(roleDao.hasPermission(role(), InternalPermission.CREATE_PERM, node)); assertTrue(roleDao.hasPermission(role(), InternalPermission.READ_PERM, node)); } awaitEvents(); // do an "empty" update (not changing any permissions) and expect no more events eventAsserter().clear(); expect(ROLE_PERMISSIONS_CHANGED).none(); try (Tx tx = tx()) { RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(false); request.getPermissions().add(READ); request.getPermissions().add(UPDATE); request.getPermissions().add(CREATE); GenericMessageResponse message = call( () -> client().updateRolePermissions(role().getUuid(), "projects/" + project().getUuid() + "/nodes/" + node.getUuid(), request)); assertThat(message).matches("role_updated_permission", role().getName()); } awaitEvents(); } @Test public void testAddPermissionToNonExistingProject() { try (Tx tx = tx()) { RolePermissionRequest request = new RolePermissionRequest(); request.getPermissions().add(READ); String path = "projects/bogus1234/nodes"; call(() -> client().updateRolePermissions(role().getUuid(), path, request), NOT_FOUND, "error_element_for_path_not_found", path); } } }
/* * Copyright 2016 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config; import com.thoughtworks.go.domain.materials.ValidationBean; import com.thoughtworks.go.server.messaging.SendEmailMessage; import com.thoughtworks.go.util.SystemUtil; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import javax.mail.AuthenticationFailedException; import javax.mail.MessagingException; import javax.mail.NoSuchProviderException; import javax.mail.Transport; import javax.mail.internet.MimeMessage; import java.util.Properties; import static com.thoughtworks.go.util.GoConstants.DEFAULT_TIMEOUT; import static javax.mail.Message.RecipientType.TO; public class GoSmtpMailSender implements GoMailSender { private static final Log LOGGER = LogFactory.getLog(GoSmtpMailSender.class); private String host; private int port; private String username; private String password; private Boolean tls; private String administratorEmail; private String from; public GoSmtpMailSender(String hostName, int port, String username, String password, boolean tls, String from, String to) { this.host = hostName; this.port = port; this.username = username; this.password = password; this.tls = tls; this.from = from; this.administratorEmail = to; } public GoSmtpMailSender() { } public void setHost(String host) { this.host = host; } public void setPort(int port) { this.port = port; } public void setUsername(String username) { this.username = username; } public void setPassword(String password) { this.password = password; } public void setTls(Boolean tls) { this.tls = tls; } public ValidationBean send(String subject, String body, String to) { Transport transport = null; try { if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("Sending email [%s] to [%s]", subject, to)); } Properties props = mailProperties(); MailSession session = MailSession.getInstance().createWith(props, username, password); transport = session.getTransport(); transport.connect(host, port, nullIfEmpty(username), nullIfEmpty(password)); MimeMessage msg = session.createMessage(from, to, subject, body); transport.sendMessage(msg, msg.getRecipients(TO)); return ValidationBean.valid(); } catch (Exception e) { LOGGER.error(String.format("Sending failed for email [%s] to [%s]", subject, to), e); return ValidationBean.notValid(ERROR_MESSAGE); } finally { if (transport != null) { try { transport.close(); } catch (MessagingException e) { LOGGER.error("Failed to close transport", e); } } } } public ValidationBean send(SendEmailMessage message) { return send(message.getSubject(), message.getBody(), message.getTo()); } private String getUsername() { return nullIfEmpty(username); } private String nullIfEmpty(String aString) { if (aString ==null || aString.isEmpty()) { return null; } return aString; } private Properties mailProperties() { Properties props = new Properties(); props.put("mail.from", from); if (!System.getProperties().containsKey("mail.smtp.connectiontimeout")) { props.put("mail.smtp.connectiontimeout", DEFAULT_TIMEOUT); } if (!System.getProperties().containsKey("mail.smtp.timeout")) { props.put("mail.smtp.timeout", DEFAULT_TIMEOUT); } if (System.getProperties().containsKey("mail.smtp.starttls.enable")) { props.put("mail.smtp.starttls.enable", "true"); } String mailProtocol = tls ? "smtps" : "smtp"; props.put("mail.transport.protocol", mailProtocol); return props; } public void setAdministratorEmail(String administratorEmail) { this.administratorEmail = administratorEmail; } public void setFrom(String from) { this.from = from; } public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } GoSmtpMailSender that = (GoSmtpMailSender) o; if (port != that.port) { return false; } if (administratorEmail != null ? !administratorEmail.equals( that.administratorEmail) : that.administratorEmail != null) { return false; } if (from != null ? !from.equals(that.from) : that.from != null) { return false; } if (host != null ? !host.equals(that.host) : that.host != null) { return false; } if (password != null ? !password.equals(that.password) : that.password != null) { return false; } if (tls != null ? !tls.equals(that.tls) : that.tls != null) { return false; } if (username != null ? !username.equals(that.username) : that.username != null) { return false; } return true; } public int hashCode() { int result; result = (host != null ? host.hashCode() : 0); result = 31 * result + port; result = 31 * result + (username != null ? username.hashCode() : 0); result = 31 * result + (password != null ? password.hashCode() : 0); result = 31 * result + (tls != null ? tls.hashCode() : 0); result = 31 * result + (administratorEmail != null ? administratorEmail.hashCode() : 0); result = 31 * result + (from != null ? from.hashCode() : 0); return result; } public static String emailBody() { String ip = SystemUtil.getFirstLocalNonLoopbackIpAddress(); String hostName = SystemUtil.getLocalhostName(); return String.format("You received this configuration test email from Go Server:\n\n%s (%s)\n\nThank you.", hostName, ip); } public static GoMailSender createSender(MailHost mailHost) { GoSmtpMailSender sender = new GoSmtpMailSender(); sender.setHost(mailHost.getHostName()); sender.setPort(mailHost.getPort()); sender.setUsername(mailHost.getUserName()); sender.setPassword(mailHost.getCurrentPassword()); sender.setAdministratorEmail(mailHost.getAdminMail()); sender.setFrom(mailHost.getFrom()); sender.setTls(mailHost.getTls()); return new BackgroundMailSender(sender); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package backtype.storm.utils; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.InputStream; import java.util.Map; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.atomic.AtomicBoolean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; abstract public class ShellUtils { public static Logger LOG = LoggerFactory.getLogger(ShellUtils.class); // OSType detection public enum OSType { OS_TYPE_LINUX, OS_TYPE_WIN, OS_TYPE_SOLARIS, OS_TYPE_MAC, OS_TYPE_FREEBSD, OS_TYPE_OTHER } public static final OSType osType = getOSType(); static private OSType getOSType() { String osName = System.getProperty("os.name"); if (osName.startsWith("Windows")) { return OSType.OS_TYPE_WIN; } else if (osName.contains("SunOS") || osName.contains("Solaris")) { return OSType.OS_TYPE_SOLARIS; } else if (osName.contains("Mac")) { return OSType.OS_TYPE_MAC; } else if (osName.contains("FreeBSD")) { return OSType.OS_TYPE_FREEBSD; } else if (osName.startsWith("Linux")) { return OSType.OS_TYPE_LINUX; } else { // Some other form of Unix return OSType.OS_TYPE_OTHER; } } // Helper static vars for each platform public static final boolean WINDOWS = (osType == OSType.OS_TYPE_WIN); public static final boolean SOLARIS = (osType == OSType.OS_TYPE_SOLARIS); public static final boolean MAC = (osType == OSType.OS_TYPE_MAC); public static final boolean FREEBSD = (osType == OSType.OS_TYPE_FREEBSD); public static final boolean LINUX = (osType == OSType.OS_TYPE_LINUX); public static final boolean OTHER = (osType == OSType.OS_TYPE_OTHER); /** Token separator regex used to parse Shell tool outputs */ public static final String TOKEN_SEPARATOR_REGEX = WINDOWS ? "[|\n\r]" : "[ \t\n\r\f]"; private long interval; // refresh interval in msec private long lastTime; // last time the command was performed final private boolean redirectErrorStream; // merge stdout and stderr private Map<String, String> environment; // env for the command execution private File dir; private Process process; // sub process used to execute the command private int exitCode; /** Time after which the executing script would be timedout */ protected long timeOutInterval = 0L; /** If or not script timed out */ private AtomicBoolean timedOut; /** If or not script finished executing */ private volatile AtomicBoolean completed; public ShellUtils() { this(0L); } public ShellUtils(long interval) { this(interval, false); } /** * @param interval the minimum duration to wait before re-executing the command. */ public ShellUtils(long interval, boolean redirectErrorStream) { this.interval = interval; this.lastTime = (interval < 0) ? 0 : -interval; this.redirectErrorStream = redirectErrorStream; } /** * set the environment for the command * * @param env Mapping of environment variables */ protected void setEnvironment(Map<String, String> env) { this.environment = env; } /** * set the working directory * * @param dir The directory where the command would be executed */ protected void setWorkingDirectory(File dir) { this.dir = dir; } /** a Unix command to get the current user's groups list */ public static String[] getGroupsCommand() { return (WINDOWS) ? new String[] { "cmd", "/c", "groups" } : new String[] { "bash", "-c", "groups" }; } /** * a Unix command to get a given user's groups list. If the OS is not WINDOWS, the command will get the user's primary group first and finally get the * groups list which includes the primary group. i.e. the user's primary group will be included twice. */ public static String[] getGroupsForUserCommand(final String user) { // 'groups username' command return is non-consistent across different unixes return new String[] { "bash", "-c", "id -gn " + user + "&& id -Gn " + user }; } /** check to see if a command needs to be executed and execute if needed */ protected void run() throws IOException { if (lastTime + interval > System.currentTimeMillis()) return; exitCode = 0; // reset for next run runCommand(); } /** Run a command */ private void runCommand() throws IOException { ProcessBuilder builder = new ProcessBuilder(getExecString()); Timer timeOutTimer = null; ShellTimeoutTimerTask timeoutTimerTask = null; timedOut = new AtomicBoolean(false); completed = new AtomicBoolean(false); if (environment != null) { builder.environment().putAll(this.environment); } if (dir != null) { builder.directory(this.dir); } builder.redirectErrorStream(redirectErrorStream); process = builder.start(); if (timeOutInterval > 0) { timeOutTimer = new Timer("Shell command timeout"); timeoutTimerTask = new ShellTimeoutTimerTask(this); // One time scheduling. timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); } final BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream())); BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); final StringBuffer errMsg = new StringBuffer(); // read error and input streams as this would free up the buffers // free the error stream buffer Thread errThread = new Thread() { @Override public void run() { try { String line = errReader.readLine(); while ((line != null) && !isInterrupted()) { errMsg.append(line); errMsg.append(System.getProperty("line.separator")); line = errReader.readLine(); } } catch (IOException ioe) { LOG.warn("Error reading the error stream", ioe); } } }; try { errThread.start(); } catch (IllegalStateException ise) { } try { parseExecResult(inReader); // parse the output // clear the input stream buffer String line = inReader.readLine(); while (line != null) { line = inReader.readLine(); } // wait for the process to finish and check the exit code exitCode = process.waitFor(); // make sure that the error thread exits joinThread(errThread); completed.set(true); // the timeout thread handling // taken care in finally block if (exitCode != 0) { throw new ExitCodeException(exitCode, errMsg.toString()); } } catch (InterruptedException ie) { throw new IOException(ie.toString()); } finally { if (timeOutTimer != null) { timeOutTimer.cancel(); } // close the input stream try { // JDK 7 tries to automatically drain the input streams for us // when the process exits, but since close is not synchronized, // it creates a race if we close the stream first and the same // fd is recycled. the stream draining thread will attempt to // drain that fd!! it may block, OOM, or cause bizarre behavior // see: https://bugs.openjdk.java.net/browse/JDK-8024521 // issue is fixed in build 7u60 InputStream stdout = process.getInputStream(); synchronized (stdout) { inReader.close(); } } catch (IOException ioe) { LOG.warn("Error while closing the input stream", ioe); } if (!completed.get()) { errThread.interrupt(); joinThread(errThread); } try { InputStream stderr = process.getErrorStream(); synchronized (stderr) { errReader.close(); } } catch (IOException ioe) { LOG.warn("Error while closing the error stream", ioe); } process.destroy(); lastTime = System.currentTimeMillis(); } } private static void joinThread(Thread t) { while (t.isAlive()) { try { t.join(); } catch (InterruptedException ie) { if (LOG.isWarnEnabled()) { LOG.warn("Interrupted while joining on: " + t, ie); } t.interrupt(); // propagate interrupt } } } /** return an array containing the command name & its parameters */ protected abstract String[] getExecString(); /** Parse the execution result */ protected abstract void parseExecResult(BufferedReader lines) throws IOException; /** * get the current sub-process executing the given command * * @return process executing the command */ public Process getProcess() { return process; } /** * This is an IOException with exit code added. */ public static class ExitCodeException extends IOException { int exitCode; public ExitCodeException(int exitCode, String message) { super(message); this.exitCode = exitCode; } public int getExitCode() { return exitCode; } } /** * A simple shell command executor. * * <code>ShellCommandExecutor</code>should be used in cases where the output of the command needs no explicit parsing and where the command, working * directory and the environment remains unchanged. The output of the command is stored as-is and is expected to be small. */ public static class ShellCommandExecutor extends ShellUtils { private String[] command; private StringBuffer output; public ShellCommandExecutor(String[] execString) { this(execString, null); } public ShellCommandExecutor(String[] execString, File dir) { this(execString, dir, null); } public ShellCommandExecutor(String[] execString, File dir, Map<String, String> env) { this(execString, dir, env, 0L); } /** * Create a new instance of the ShellCommandExecutor to execute a command. * * @param execString The command to execute with arguments * @param dir If not-null, specifies the directory which should be set as the current working directory for the command. If null, the current working * directory is not modified. * @param env If not-null, environment of the command will include the key-value pairs specified in the map. If null, the current environment is not * modified. * @param timeout Specifies the time in milliseconds, after which the command will be killed and the status marked as timedout. If 0, the command will * not be timed out. */ public ShellCommandExecutor(String[] execString, File dir, Map<String, String> env, long timeout) { command = execString.clone(); if (dir != null) { setWorkingDirectory(dir); } if (env != null) { setEnvironment(env); } timeOutInterval = timeout; } /** Execute the shell command. */ public void execute() throws IOException { this.run(); } @Override public String[] getExecString() { return command; } @Override protected void parseExecResult(BufferedReader lines) throws IOException { output = new StringBuffer(); char[] buf = new char[512]; int nRead; while ((nRead = lines.read(buf, 0, buf.length)) > 0) { output.append(buf, 0, nRead); } } /** Get the output of the shell command. */ public String getOutput() { return (output == null) ? "" : output.toString(); } /** * Returns the commands of this instance. Arguments with spaces in are presented with quotes round; other arguments are presented raw * * @return a string representation of the object. */ @Override public String toString() { StringBuilder builder = new StringBuilder(); String[] args = getExecString(); for (String s : args) { if (s.indexOf(' ') >= 0) { builder.append('"').append(s).append('"'); } else { builder.append(s); } builder.append(' '); } return builder.toString(); } } /** * To check if the passed script to shell command executor timed out or not. * * @return if the script timed out. */ public boolean isTimedOut() { return timedOut.get(); } /** * Set if the command has timed out. * */ private void setTimedOut() { this.timedOut.set(true); } /** * Static method to execute a shell command. Covers most of the simple cases without requiring the user to implement the <code>Shell</code> interface. * * @param cmd shell command to execute. * @return the output of the executed command. */ public static String execCommand(String... cmd) throws IOException { return execCommand(null, cmd, 0L); } /** * Static method to execute a shell command. Covers most of the simple cases without requiring the user to implement the <code>Shell</code> interface. * * @param env the map of environment key=value * @param cmd shell command to execute. * @param timeout time in milliseconds after which script should be marked timeout * @return the output of the executed command.o */ public static String execCommand(Map<String, String> env, String[] cmd, long timeout) throws IOException { ShellCommandExecutor exec = new ShellCommandExecutor(cmd, null, env, timeout); exec.execute(); return exec.getOutput(); } /** * Static method to execute a shell command. Covers most of the simple cases without requiring the user to implement the <code>Shell</code> interface. * * @param env the map of environment key=value * @param cmd shell command to execute. * @return the output of the executed command. */ public static String execCommand(Map<String, String> env, String... cmd) throws IOException { return execCommand(env, cmd, 0L); } /** * Timer which is used to timeout scripts spawned off by shell. */ private static class ShellTimeoutTimerTask extends TimerTask { private ShellUtils shell; public ShellTimeoutTimerTask(ShellUtils shell) { this.shell = shell; } @Override public void run() { Process p = shell.getProcess(); try { p.exitValue(); } catch (Exception e) { // Process has not terminated. // So check if it has completed // if not just destroy it. if (p != null && !shell.completed.get()) { shell.setTimedOut(); p.destroy(); } } } } }
/* * Copyright 2014-2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.routing.config.impl; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.junit.Before; import org.junit.Test; import org.onlab.packet.IpAddress; import org.onlab.packet.IpPrefix; import org.onlab.packet.MacAddress; import org.onlab.packet.VlanId; import org.onosproject.net.ConnectPoint; import org.onosproject.net.DeviceId; import org.onosproject.net.PortNumber; import org.onosproject.net.host.HostService; import org.onosproject.net.host.InterfaceIpAddress; import org.onosproject.net.host.PortAddresses; import org.onosproject.routing.config.Interface; import java.util.Collections; import java.util.Map; import java.util.Set; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.reset; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; /** * Unit tests for the HostToInterfaceAdaptor class. */ public class HostToInterfaceAdaptorTest { private HostService hostService; private HostToInterfaceAdaptor adaptor; private Set<PortAddresses> portAddresses; private Map<ConnectPoint, Interface> interfaces; private static final ConnectPoint CP1 = new ConnectPoint( DeviceId.deviceId("of:1"), PortNumber.portNumber(1)); private static final ConnectPoint CP2 = new ConnectPoint( DeviceId.deviceId("of:1"), PortNumber.portNumber(2)); private static final ConnectPoint CP3 = new ConnectPoint( DeviceId.deviceId("of:2"), PortNumber.portNumber(1)); private static final ConnectPoint NON_EXISTENT_CP = new ConnectPoint( DeviceId.deviceId("doesnotexist"), PortNumber.portNumber(1)); @Before public void setUp() throws Exception { hostService = createMock(HostService.class); portAddresses = Sets.newHashSet(); interfaces = Maps.newHashMap(); InterfaceIpAddress ia11 = new InterfaceIpAddress(IpAddress.valueOf("192.168.1.1"), IpPrefix.valueOf("192.168.1.0/24")); createPortAddressesAndInterface(CP1, Sets.newHashSet(ia11), MacAddress.valueOf("00:00:00:00:00:01"), VlanId.NONE); // Two addresses in the same subnet InterfaceIpAddress ia21 = new InterfaceIpAddress(IpAddress.valueOf("192.168.2.1"), IpPrefix.valueOf("192.168.2.0/24")); InterfaceIpAddress ia22 = new InterfaceIpAddress(IpAddress.valueOf("192.168.2.2"), IpPrefix.valueOf("192.168.2.0/24")); createPortAddressesAndInterface(CP2, Sets.newHashSet(ia21, ia22), MacAddress.valueOf("00:00:00:00:00:02"), VlanId.vlanId((short) 4)); // Two addresses in different subnets InterfaceIpAddress ia31 = new InterfaceIpAddress(IpAddress.valueOf("192.168.3.1"), IpPrefix.valueOf("192.168.3.0/24")); InterfaceIpAddress ia41 = new InterfaceIpAddress(IpAddress.valueOf("192.168.4.1"), IpPrefix.valueOf("192.168.4.0/24")); createPortAddressesAndInterface(CP3, Sets.newHashSet(ia31, ia41), MacAddress.valueOf("00:00:00:00:00:03"), VlanId.NONE); expect(hostService.getAddressBindings()).andReturn(portAddresses).anyTimes(); replay(hostService); adaptor = new HostToInterfaceAdaptor(hostService); } /** * Creates both a PortAddresses and an Interface for the given inputs and * places them in the correct global data stores. * * @param cp the connect point * @param ipAddresses the set of interface IP addresses * @param mac the MAC address * @param vlan the VLAN ID */ private void createPortAddressesAndInterface( ConnectPoint cp, Set<InterfaceIpAddress> ipAddresses, MacAddress mac, VlanId vlan) { PortAddresses pa = new PortAddresses(cp, ipAddresses, mac, vlan); portAddresses.add(pa); expect(hostService.getAddressBindingsForPort(cp)).andReturn( Collections.singleton(pa)).anyTimes(); Interface intf = new Interface(cp, ipAddresses, mac, vlan); interfaces.put(cp, intf); } /** * Tests {@link HostToInterfaceAdaptor#getInterfaces()}. * Verifies that the set of interfaces returned matches what is expected * based on the input PortAddresses data. */ @Test public void testGetInterfaces() { Set<Interface> adaptorIntfs = adaptor.getInterfaces(); assertEquals(3, adaptorIntfs.size()); assertTrue(adaptorIntfs.contains(this.interfaces.get(CP1))); assertTrue(adaptorIntfs.contains(this.interfaces.get(CP2))); assertTrue(adaptorIntfs.contains(this.interfaces.get(CP3))); } /** * Tests {@link HostToInterfaceAdaptor#getInterface(ConnectPoint)}. * Verifies that the correct interface is returned for a given connect * point. */ @Test public void testGetInterface() { assertEquals(this.interfaces.get(CP1), adaptor.getInterface(CP1)); assertEquals(this.interfaces.get(CP2), adaptor.getInterface(CP2)); assertEquals(this.interfaces.get(CP3), adaptor.getInterface(CP3)); // Try and get an interface for a connect point with no addresses reset(hostService); expect(hostService.getAddressBindingsForPort(NON_EXISTENT_CP)) .andReturn(Collections.<PortAddresses>emptySet()).anyTimes(); replay(hostService); assertNull(adaptor.getInterface(NON_EXISTENT_CP)); } /** * Tests {@link HostToInterfaceAdaptor#getInterface(ConnectPoint)} in the * case that the input connect point is null. * Verifies that a NullPointerException is thrown. */ @Test(expected = NullPointerException.class) public void testGetInterfaceNull() { ConnectPoint c = null; adaptor.getInterface(c); } /** * Tests {@link HostToInterfaceAdaptor#getMatchingInterface(IpAddress)}. * Verifies that the correct interface is returned based on the given IP * address. */ @Test public void testGetMatchingInterface() { assertEquals(this.interfaces.get(CP1), adaptor.getMatchingInterface(IpAddress.valueOf("192.168.1.100"))); assertEquals(this.interfaces.get(CP2), adaptor.getMatchingInterface(IpAddress.valueOf("192.168.2.100"))); assertEquals(this.interfaces.get(CP3), adaptor.getMatchingInterface(IpAddress.valueOf("192.168.3.100"))); assertEquals(this.interfaces.get(CP3), adaptor.getMatchingInterface(IpAddress.valueOf("192.168.4.100"))); // Try and match an address we don't have subnet configured for assertNull(adaptor.getMatchingInterface(IpAddress.valueOf("1.1.1.1"))); } /** * Tests {@link HostToInterfaceAdaptor#getMatchingInterface(IpAddress)} in the * case that the input IP address is null. * Verifies that a NullPointerException is thrown. */ @Test(expected = NullPointerException.class) public void testGetMatchingInterfaceNull() { adaptor.getMatchingInterface(null); } }
//--------------------------------------------------------------------------------// // COPYRIGHT NOTICE // //--------------------------------------------------------------------------------// // Copyright (c) 2012, Instituto de Microelectronica de Sevilla (IMSE-CNM) // // // // All rights reserved. // // // // Redistribution and use in source and binary forms, with or without // // modification, are permitted provided that the following conditions are met: // // // // * Redistributions of source code must retain the above copyright notice, // // this list of conditions and the following disclaimer. // // // // * Redistributions in binary form must reproduce the above copyright // // notice, this list of conditions and the following disclaimer in the // // documentation and/or other materials provided with the distribution. // // // // * Neither the name of the IMSE-CNM nor the names of its contributors may // // be used to endorse or promote products derived from this software // // without specific prior written permission. // // // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE // // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // //--------------------------------------------------------------------------------// //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// // GENERADOR DEL FICHERO "TP_spec_type.java" // //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// package xfuzzy.xfj; import xfuzzy.lang.*; import java.io.*; public class XfjSpecType { //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// // MIEMBROS PRIVADOS // //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// private String eol = System.getProperty("line.separator", "\n"); private File dir; private String pkgname; private String classname; private String specname; private Type type; //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// // METODOS CONSTANTES // //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// public static final String create(File dir, String pkgname, Type type, String spec) { XfjSpecType creator = new XfjSpecType(dir,pkgname,type,spec); creator.createFile(); return creator.getMessage(); } //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// // CONSTRUCTOR // //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// public XfjSpecType(File dir, String pkgname, Type type, String spec) { this.dir = dir; this.pkgname = pkgname; this.type = type; this.classname = "TP_"+spec+"_"+type.getName(); this.specname = spec; } //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// // METODOS PUBLICOS // //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// //-------------------------------------------------------------// // Obtiene el nombre del fichero creado // //-------------------------------------------------------------// private String getMessage() { File file = new File(dir,classname+".java"); return file.getAbsolutePath(); } //-------------------------------------------------------------// // Genera el fichero "TP_spec_type.java" // //-------------------------------------------------------------// public void createFile() { File file = new File(dir,classname+".java"); String heading[] = getHeading(); String source = getSource(); String code = ""; for(int i=0; i<heading.length; i++) code += heading[i]+eol; code += getPackage()+eol+eol; code += source+eol; byte[] buf = code.getBytes(); try { OutputStream stream = new FileOutputStream(file); stream.write(buf); stream.close(); } catch (IOException e) {} } //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// // METODOS PRIVADOS // //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// //-------------------------------------------------------------// // Genera el codigo de cabecera // //-------------------------------------------------------------// private String[] getHeading() { String source[] = { "//++++++++++++++++++++++++++++++++++++++++++++++++++++++//", "// //", "// Class: "+classname+" //", "// //", "// Author: Automatically generated by Xfuzzy //", "// //", "// Description: Type \""+type.getName()+"\" //", "// //", "//++++++++++++++++++++++++++++++++++++++++++++++++++++++//", "" }; return source; } //-------------------------------------------------------------// // Genera el codigo del paquete // //-------------------------------------------------------------// private String getPackage() { if(pkgname != null && pkgname.length()>0) return "package "+this.pkgname+";"; return ""; } //-------------------------------------------------------------// // Genera el codigo de la clase "TP_spec_type" // //-------------------------------------------------------------// private String getSource() { Type parent = type.getParent(); Universe u = type.getUniverse(); Family fam[] = type.getFamilies(); ParamMemFunc mf[] = type.getParamMembershipFunctions(); FamiliarMemFunc fmf[] = type.getFamiliarMembershipFunctions(); String code = ""; code += "public class "+classname; if(parent != null) code += " extends TP_"+specname+"_"+parent.getName(); code += " {"+eol; if(parent == null) { code += " private double min = "+u.min()+";"+eol; code += " private double max = "+u.max()+";"+eol; code += " private double step = "+u.step()+";"+eol; } for(int i=0; i<fam.length; i++) { Parameter single[] = fam[i].getSingleParameters(); code +=" double _pfs_"+fam[i]+"[] = { "; for(int j=0; j<single.length; j++) code += (j==0? "":",")+single[j].value; code += " };"+eol; Parameter list[] = fam[i].getParamList(); int listlength = (list == null? 0 : list.length); code +=" double _pfl_"+fam[i]+"[] = { "; for(int j=0; j<listlength; j++) code += (j==0? "":",")+list[j].value; code += " };"+eol; } for(int i=0; i<mf.length; i++) { Parameter single[] = mf[i].getSingleParameters(); code +=" double _ps_"+mf[i].getLabel()+"[] = { "; for(int j=0; j<single.length; j++) code += (j==0? "":",")+single[j].value; code += " };"+eol; Parameter list[] = mf[i].getParamList(); int listlength = (list == null? 0 : list.length); code +=" double _pl_"+mf[i].getLabel()+"[] = { "; for(int j=0; j<listlength; j++) code += (j==0? "":",")+list[j].value; code += " };"+eol; } for(int i=0; i<fam.length; i++) { String pkgname = fam[i].getPackageName(); String famname = fam[i].getFunctionName(); code += " FAM_"+pkgname+"_"+famname+" "+fam[i]; code += " = new FAM_"+pkgname+"_"+famname; code += "(min,max,step,_pfs_"+fam[i]+",_pfl_"+fam[i]+");"+eol; } for(int i=0; i<mf.length; i++) { code += " MF_"+mf[i].getPackageName()+"_"+mf[i].getFunctionName(); code += " "+mf[i].getLabel(); code += " = new MF_"+mf[i].getPackageName()+"_"+mf[i].getFunctionName(); code += "(min,max,step,_ps_"+mf[i].getLabel(); code += ",_pl_"+mf[i].getLabel()+");"+eol; } for(int i=0; i<fmf.length; i++) { code += " FamiliarMembershipFunction "+fmf[i].getLabel(); code += " = new FamiliarMembershipFunction"; code += "("+fmf[i].getFamily()+","+fmf[i].getIndex()+");"+eol; } code += "}"+eol+eol; return code; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.session; import org.apache.ignite.*; import org.apache.ignite.compute.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.resources.*; import org.apache.ignite.testframework.*; import org.apache.ignite.testframework.junits.common.*; import java.io.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; /** * */ @SuppressWarnings({"CatchGenericClass"}) @GridCommonTest(group = "Task Session") public class GridSessionFutureWaitJobAttributeSelfTest extends GridCommonAbstractTest { /** */ private static final int WAIT_TIME = 20000; /** */ public static final int SPLIT_COUNT = 5; /** */ public static final int EXEC_COUNT = 25; /** */ private static CountDownLatch[] startSignal; /** */ private static CountDownLatch[] stopSignal; /** */ public GridSessionFutureWaitJobAttributeSelfTest() { super(true); } /** * @throws Exception if failed. */ public void testSetAttribute() throws Exception { Ignite ignite = G.ignite(getTestGridName()); ignite.compute().localDeployTask(GridTaskSessionTestTask.class, GridTaskSessionTestTask.class.getClassLoader()); refreshInitialData(); for (int i = 0; i < EXEC_COUNT; i++) checkTask(i); } /** * @throws Exception if failed. */ public void testMultiThreaded() throws Exception { Ignite ignite = G.ignite(getTestGridName()); ignite.compute().localDeployTask(GridTaskSessionTestTask.class, GridTaskSessionTestTask.class.getClassLoader()); refreshInitialData(); final GridThreadSerialNumber sNum = new GridThreadSerialNumber(); final AtomicBoolean failed = new AtomicBoolean(false); GridTestUtils.runMultiThreaded(new Runnable() { @Override public void run() { int num = sNum.get(); try { checkTask(num); } catch (Throwable e) { error("Failed to execute task.", e); failed.set(true); } } }, EXEC_COUNT, "grid-session-test"); if (failed.get()) fail(); } /** * @param num Number. * @throws InterruptedException if failed. */ private void checkTask(int num) throws InterruptedException { Ignite ignite = G.ignite(getTestGridName()); IgniteCompute comp = ignite.compute().withAsync(); comp.execute(GridTaskSessionTestTask.class.getName(), num); ComputeTaskFuture<?> fut = comp.future(); assert fut != null; try { // Wait until task receive results from jobs. boolean await = startSignal[num].await(WAIT_TIME, TimeUnit.MILLISECONDS); assert await : "Jobs did not executed."; String val = fut.getTaskSession().waitForAttribute("testName", 100000); info("Received attribute 'testName': " + val); // Signal jobs to finish work. stopSignal[num].countDown(); assert "testVal".equals(val) : "Invalid attribute value: " + val; Object res = fut.get(); assert (Integer)res == SPLIT_COUNT : "Invalid result [num=" + num + ", fut=" + fut + ']'; } finally { // We must wait for the jobs to be sure that they have completed // their execution since they use static variable (shared for the tests). fut.get(); } } /** */ private void refreshInitialData() { startSignal = new CountDownLatch[EXEC_COUNT]; stopSignal = new CountDownLatch[EXEC_COUNT]; for(int i=0 ; i < EXEC_COUNT; i++){ startSignal[i] = new CountDownLatch(1); stopSignal[i] = new CountDownLatch(1); } } /** * */ @ComputeTaskSessionFullSupport private static class GridTaskSessionTestTask extends ComputeTaskSplitAdapter<Serializable, Integer> { /** */ @LoggerResource private IgniteLogger log; /** */ @TaskSessionResource private ComputeTaskSession taskSes; /** */ private volatile int taskNum = -1; /** {@inheritDoc} */ @Override protected Collection<? extends ComputeJob> split(int gridSize, Serializable arg) { if (log.isInfoEnabled()) log.info("Splitting job [task=" + this + ", gridSize=" + gridSize + ", arg=" + arg + ']'); assert arg != null; taskNum = (Integer)arg; assert taskNum != -1; Collection<ComputeJob> jobs = new ArrayList<>(SPLIT_COUNT); for (int i = 1; i <= SPLIT_COUNT; i++) { jobs.add(new ComputeJobAdapter(i) { @Override public Serializable execute() { assert taskSes != null; if (log.isInfoEnabled()) { log.info("Computing job [job=" + this + ", arg=" + argument(0) + ']'); log.info("Set attribute 'testName'."); } taskSes.setAttribute("testName", "testVal"); // Signal main process to wait for attribute. startSignal[taskNum].countDown(); try { // Wait until future receive attribute. if (!stopSignal[taskNum].await(WAIT_TIME, TimeUnit.MILLISECONDS)) fail(); } catch (InterruptedException e) { if (log.isInfoEnabled()) log.info("Job got interrupted [arg=" + argument(0) + ", e=" + e + ']'); return 0; } return 1; } }); } return jobs; } /** {@inheritDoc} */ @Override public ComputeJobResultPolicy result(ComputeJobResult res, List<ComputeJobResult> received) { if (res.getException() != null) throw res.getException(); return received.size() == SPLIT_COUNT ? ComputeJobResultPolicy.REDUCE : ComputeJobResultPolicy.WAIT; } /** {@inheritDoc} */ @Override public Integer reduce(List<ComputeJobResult> results) { if (log.isInfoEnabled()) log.info("Reducing job [job=" + this + ", results=" + results + ']'); if (results.size() < SPLIT_COUNT) fail(); int sum = 0; for (ComputeJobResult result : results) { if (result.getData() != null) sum += (Integer)result.getData(); } return sum; } } }
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.notifications.impl; import com.evolveum.midpoint.notifications.api.NotificationManager; import com.evolveum.midpoint.notifications.api.OperationStatus; import com.evolveum.midpoint.notifications.api.events.ResourceObjectEvent; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.provisioning.api.ChangeNotificationDispatcher; import com.evolveum.midpoint.provisioning.api.ResourceOperationDescription; import com.evolveum.midpoint.provisioning.api.ResourceOperationListener; import com.evolveum.midpoint.repo.api.RepositoryService; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.task.api.LightweightIdentifierGenerator; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; /** * @author mederly */ @Component public class AccountOperationListener implements ResourceOperationListener { private static final Trace LOGGER = TraceManager.getTrace(AccountOperationListener.class); private static final String DOT_CLASS = AccountOperationListener.class.getName() + "."; @Autowired private LightweightIdentifierGenerator lightweightIdentifierGenerator; @Autowired private ChangeNotificationDispatcher provisioningNotificationDispatcher; @Autowired private NotificationManager notificationManager; @Autowired @Qualifier("cacheRepositoryService") private transient RepositoryService cacheRepositoryService; @Autowired private NotificationFunctionsImpl notificationsUtil; @PostConstruct public void init() { provisioningNotificationDispatcher.registerNotificationListener(this); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Registered account operation notification listener."); } } @Override public String getName() { return "user notification account change listener"; } @Override public void notifySuccess(ResourceOperationDescription operationDescription, Task task, OperationResult parentResult) { if (notificationsEnabled()) { notifyAny(OperationStatus.SUCCESS, operationDescription, task, parentResult.createMinorSubresult(DOT_CLASS + "notifySuccess")); } } private boolean notificationsEnabled() { if (notificationManager.isDisabled()) { LOGGER.trace("Notifications are temporarily disabled, exiting the hook."); return false; } else { return true; } } @Override public void notifyInProgress(ResourceOperationDescription operationDescription, Task task, OperationResult parentResult) { if (notificationsEnabled()) { notifyAny(OperationStatus.IN_PROGRESS, operationDescription, task, parentResult.createMinorSubresult(DOT_CLASS + "notifyInProgress")); } } @Override public void notifyFailure(ResourceOperationDescription operationDescription, Task task, OperationResult parentResult) { if (notificationsEnabled()) { notifyAny(OperationStatus.FAILURE, operationDescription, task, parentResult.createMinorSubresult(DOT_CLASS + "notifyFailure")); } } private void notifyAny(OperationStatus status, ResourceOperationDescription operationDescription, Task task, OperationResult result) { try { executeNotifyAny(status, operationDescription, task, result); } catch (RuntimeException e) { result.recordFatalError("An unexpected exception occurred when preparing and sending notifications: " + e.getMessage(), e); LoggingUtils.logException(LOGGER, "An unexpected exception occurred when preparing and sending notifications: " + e.getMessage(), e); } // todo work correctly with operationResult (in whole notification module) if (result.isUnknown()) { result.computeStatus(); } result.recordSuccessIfUnknown(); // if (LOGGER.isTraceEnabled()) { // LOGGER.trace("Returning operation result: " + result.dump()); // } } private void executeNotifyAny(OperationStatus status, ResourceOperationDescription operationDescription, Task task, OperationResult result) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("AccountOperationListener.notify ({}) called with operationDescription = {}", status, operationDescription.debugDump()); } if (operationDescription.getObjectDelta() == null) { LOGGER.warn("Object delta is null, exiting the change listener."); return; } if (operationDescription.getCurrentShadow() == null) { LOGGER.warn("Current shadow is null, exiting the change listener."); return; } // for the time being, we deal only with accounts here if (operationDescription.getObjectDelta().getObjectTypeClass() == null || !ShadowType.class.isAssignableFrom(operationDescription.getObjectDelta().getObjectTypeClass())) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Object that was changed was not an account, exiting the operation listener (class = {})", operationDescription.getObjectDelta().getObjectTypeClass()); } return; } ResourceObjectEvent request = createRequest(status, operationDescription, task, result); notificationManager.processEvent(request, task, result); } private ResourceObjectEvent createRequest(OperationStatus status, ResourceOperationDescription operationDescription, Task task, OperationResult result) { ResourceObjectEvent event = new ResourceObjectEvent(lightweightIdentifierGenerator); event.setAccountOperationDescription(operationDescription); event.setOperationStatus(status); event.setChangeType(operationDescription.getObjectDelta().getChangeType()); // fortunately there's 1:1 mapping String accountOid = operationDescription.getObjectDelta().getOid(); PrismObject<UserType> user = findRequestee(accountOid, task, result, operationDescription.getObjectDelta().isDelete()); if (user != null) { event.setRequestee(new SimpleObjectRefImpl(notificationsUtil, user.asObjectable())); } // otherwise, appropriate messages were already logged if (task != null && task.getOwner() != null) { event.setRequester(new SimpleObjectRefImpl(notificationsUtil, task.getOwner())); } else { LOGGER.warn("No owner for task {}, therefore no requester will be set for event {}", task, event.getId()); } if (task != null && task.getChannel() != null) { event.setChannel(task.getChannel()); } else if (operationDescription.getSourceChannel() != null) { event.setChannel(operationDescription.getSourceChannel()); } return event; } // private boolean isRequestApplicable(ResourceObjectEvent request, NotificationConfigurationEntryType entry) { // // ResourceOperationDescription opDescr = request.getAccountOperationDescription(); // OperationStatus status = request.getOperationStatus(); // ChangeType type = opDescr.getObjectDelta().getChangeType(); // return typeMatches(type, entry.getSituation(), opDescr) && statusMatches(status, entry.getSituation()); // } private PrismObject<UserType> findRequestee(String accountOid, Task task, OperationResult result, boolean isDelete) { PrismObject<UserType> user; if (accountOid != null) { try { user = cacheRepositoryService.listAccountShadowOwner(accountOid, result); if (LOGGER.isTraceEnabled()) { LOGGER.trace("listAccountShadowOwner for account {} yields {}",accountOid, user); } } catch (ObjectNotFoundException e) { LOGGER.trace("There's a problem finding account " + accountOid, e); return null; } if (user != null) { return user; } } PrismObject<UserType> requestee = task != null ? task.getRequestee() : null; if (requestee == null) { LOGGER.debug("There is no owner of account {} (in repo nor in task).", accountOid); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Task = {}", (task != null ? task.debugDump() : null)); } return null; } if (LOGGER.isTraceEnabled()) { LOGGER.trace("Requestee = {} for account {}", requestee, accountOid); } if (requestee.getOid() == null) { return requestee; } // let's try to get current value of requestee ... if it exists (it will NOT exist in case of delete operation) try { return cacheRepositoryService.getObject(UserType.class, requestee.getOid(), null, result); } catch (ObjectNotFoundException e) { if (isDelete) { result.removeLastSubresult(); // get rid of this error - it's not an error } return requestee; // returning last known value // if (!isDelete) { // LoggingUtils.logException(LOGGER, "Cannot find owner of account " + accountOid, e); // } else { // LOGGER.info("Owner of account " + accountOid + " (user oid " + userOid + ") was probably already deleted."); // result.removeLastSubresult(); // to suppress the error message (in GUI + in tests) // } // return null; } catch (SchemaException e) { LoggingUtils.logException(LOGGER, "Cannot find owner of account " + accountOid, e); return null; } } }
package sumeetkumar.in.wearsense.views; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.support.v7.app.ActionBarActivity; import android.text.method.ScrollingMovementMethod; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.TextView; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import sumeetkumar.in.wearsense.R; import sumeetkumar.in.wearsense.services.AlarmManager; import sumeetkumar.in.wearsense.services.StartSensingBroadcastReceiver; import sumeetkumar.in.wearsense.utils.BLEScanner; import sumeetkumar.in.wearsense.utils.BLESignalScanner; import sumeetkumar.in.wearsense.utils.Constants; import sumeetkumar.in.wearsense.utils.Logger; import sumeetkumar.in.wearsense.utils.SoundPlayer; public class MainActivity extends ActionBarActivity { /** * The {@link android.support.v4.view.PagerAdapter} that will provide * fragments for each of the sections. We use a * {@link FragmentPagerAdapter} derivative, which will keep every * loaded fragment in memory. If this becomes too memory intensive, it * may be best to switch to a * {@link android.support.v4.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter; /** * The {@link ViewPager} that will host the section contents. */ ViewPager mViewPager; private NewDataReceivedBroadcastReceiver dataReceiver; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // Create the adapter that will return a fragment for each of the three // primary sections of the activity. mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setAdapter(mSectionsPagerAdapter); Logger.log("scheduling alarm"); AlarmManager.setupRepeatingAlarmToWakeUpApplication( this.getApplicationContext(), Constants.TIME_RANGE_TO_SHOW_ALERT_IN_MINUTES * 60 * 1000); if (dataReceiver == null) { dataReceiver = new NewDataReceivedBroadcastReceiver(new Handler()); Log.d("WEA", "New configuration receiver created in main activity"); IntentFilter filter = new IntentFilter(); filter.addAction(Constants.NEW_DATA_INTENT_FILTER); filter.addCategory(Intent.CATEGORY_DEFAULT); getApplicationContext().registerReceiver(dataReceiver, filter); } } @Override protected void onDestroy() { if (dataReceiver != null) { getApplication().unregisterReceiver(dataReceiver); dataReceiver = null; } super.onDestroy(); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } public void readSensors() { Intent intent = new Intent(this, StartSensingBroadcastReceiver.class); intent.putExtra(Constants.ACTION, Constants.START_AUDIO_RECORDING); sendBroadcast(intent); } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { // getItem is called to instantiate the fragment for the given page. // Return a PlaceholderFragment (defined as a static inner class below). return PlaceholderFragment.newInstance(position + 1); } @Override public int getCount() { // Show 3 total pages. return 1; } @Override public CharSequence getPageTitle(int position) { Locale l = Locale.getDefault(); switch (position) { case 0: return getString(R.string.title_section1).toUpperCase(l); case 1: return getString(R.string.title_section2).toUpperCase(l); case 2: return getString(R.string.title_section3).toUpperCase(l); } return null; } } /** * A placeholder fragment containing a simple view. */ public static class PlaceholderFragment extends Fragment { /** * The fragment argument representing the section number for this * fragment. */ private static final String ARG_SECTION_NUMBER = "section_number"; /** * Returns a new instance of this fragment for the given section * number. */ public static PlaceholderFragment newInstance(int sectionNumber) { PlaceholderFragment fragment = new PlaceholderFragment(); Bundle args = new Bundle(); args.putInt(ARG_SECTION_NUMBER, sectionNumber); fragment.setArguments(args); return fragment; } public PlaceholderFragment() { } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_main, container, false); return rootView; } @Override public void onViewCreated(View view, Bundle savedInstanceState) { final TextView txtStatus = (TextView) getActivity().findViewById(R.id.txtFragment); Button btnGetData = (Button) getActivity().findViewById(R.id.btnGetData); btnGetData.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { ((MainActivity) getActivity()).readSensors(); txtStatus.setText("Asking wear for new data"); } }); TextView txtData = (TextView) getActivity().findViewById(R.id.txtWearData); txtData.setMovementMethod(new ScrollingMovementMethod()); Button btnPlaySound = (Button) getActivity().findViewById(R.id.btnPlaySound); btnPlaySound.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { SoundPlayer player = new SoundPlayer(); player.playSound(); } }); Button btnScan = (Button) getActivity().findViewById(R.id.btnScan); btnScan.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { BLESignalScanner.getSignalStrength("NA", getActivity().getApplicationContext()); BLEScanner bleScanner = new BLEScanner(getActivity()); } }); } }); } } public class NewDataReceivedBroadcastReceiver extends BroadcastReceiver { private final Handler handler; public NewDataReceivedBroadcastReceiver(Handler handler) { this.handler = handler; } @Override public void onReceive(final Context context, final Intent intent) { try { final String message = intent.getStringExtra(Constants.NEW_DATA); final TextView txtData = (TextView) findViewById(R.id.txtWearData); final TextView txtStatus = (TextView) findViewById(R.id.txtFragment); // Post the UI updating code to our Handler if (handler != null) { handler.post(new Runnable() { @Override public void run() { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd_HHmmss"); String currentDateandTime = sdf.format(new Date()); txtStatus.setText("Updated at: " + currentDateandTime); txtData.setText(message); } }); } } catch (Exception ex) { Logger.log(ex.getMessage()); } } } }
/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.settings; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.admin.DevicePolicyManager; import android.content.ActivityNotFoundException; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.content.res.Resources; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.net.Uri; import android.nfc.NfcAdapter; import android.nfc.NfcManager; import android.os.Bundle; import android.os.SystemProperties; import android.os.UserHandle; import android.os.UserManager; import android.preference.Preference; import android.preference.PreferenceScreen; import android.preference.SwitchPreference; import android.provider.SearchIndexableResource; import android.provider.Settings; import android.telephony.CarrierConfigManager; import android.telephony.PhoneStateListener; import android.telephony.SubscriptionManager; import android.telephony.TelephonyManager; import android.text.TextUtils; import android.util.Log; import com.android.ims.ImsManager; import com.android.internal.logging.MetricsLogger; import com.android.internal.telephony.TelephonyIntents; import com.android.internal.telephony.TelephonyProperties; import com.android.settings.nfc.NfcEnabler; import com.android.settings.search.BaseSearchIndexProvider; import com.android.settings.search.Indexable; import com.mediatek.internal.telephony.cdma.CdmaFeatureOptionUtils; import com.mediatek.settings.ext.DefaultWfcSettingsExt; import com.mediatek.settings.ext.IRCSSettings; import com.mediatek.settings.ext.ISettingsMiscExt; import com.mediatek.settings.ext.IWfcSettingsExt; import com.mediatek.settings.FeatureOption; import com.mediatek.settings.UtilsExt; import com.mediatek.settings.ext.IRCSSettings; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class WirelessSettings extends SettingsPreferenceFragment implements Indexable { private static final String TAG = "WirelessSettings"; private static final String KEY_TOGGLE_AIRPLANE = "toggle_airplane"; private static final String KEY_TOGGLE_NFC = "toggle_nfc"; /// M: Add MTK nfc seting @{ private static final String KEY_MTK_TOGGLE_NFC = "toggle_mtk_nfc"; private static final String ACTION_MTK_NFC = "mediatek.settings.NFC_SETTINGS"; /// @} private static final String KEY_WIMAX_SETTINGS = "wimax_settings"; private static final String KEY_ANDROID_BEAM_SETTINGS = "android_beam_settings"; private static final String KEY_VPN_SETTINGS = "vpn_settings"; private static final String KEY_TETHER_SETTINGS = "tether_settings"; private static final String KEY_PROXY_SETTINGS = "proxy_settings"; private static final String KEY_MOBILE_NETWORK_SETTINGS = "mobile_network_settings"; private static final String KEY_MANAGE_MOBILE_PLAN = "manage_mobile_plan"; private static final String KEY_TOGGLE_NSD = "toggle_nsd"; //network service discovery private static final String KEY_CELL_BROADCAST_SETTINGS = "cell_broadcast_settings"; private static final String KEY_WFC_SETTINGS = "wifi_calling_settings"; public static final String EXIT_ECM_RESULT = "exit_ecm_result"; public static final int REQUEST_CODE_EXIT_ECM = 1; private AirplaneModeEnabler mAirplaneModeEnabler; private SwitchPreference mAirplaneModePreference; private NfcEnabler mNfcEnabler; private NfcAdapter mNfcAdapter; private NsdEnabler mNsdEnabler; private ConnectivityManager mCm; private TelephonyManager mTm; private PackageManager mPm; private UserManager mUm; private static final int MANAGE_MOBILE_PLAN_DIALOG_ID = 1; private static final String SAVED_MANAGE_MOBILE_PLAN_MSG = "mManageMobilePlanMessage"; private PreferenceScreen mButtonWfc; /// M: RCSE key&intent @{ private static final String RCSE_SETTINGS_INTENT = "com.mediatek.rcse.RCSE_SETTINGS"; private static final String KEY_RCSE_SETTINGS = "rcse_settings"; /// @} /// M: Wfc plugin @{ IWfcSettingsExt mWfcExt; /// @} /** * Invoked on each preference click in this hierarchy, overrides * PreferenceFragment's implementation. Used to make sure we track the * preference click events. */ @Override public boolean onPreferenceTreeClick(PreferenceScreen preferenceScreen, Preference preference) { log("onPreferenceTreeClick: preference=" + preference); if (preference == mAirplaneModePreference && Boolean.parseBoolean( SystemProperties.get(TelephonyProperties.PROPERTY_INECM_MODE))) { // In ECM mode launch ECM app dialog startActivityForResult( new Intent(TelephonyIntents.ACTION_SHOW_NOTICE_ECM_BLOCK_OTHERS, null), REQUEST_CODE_EXIT_ECM); return true; } else if (preference == findPreference(KEY_MANAGE_MOBILE_PLAN)) { onManageMobilePlanClick(); } // Let the intents be launched by the Preference manager return super.onPreferenceTreeClick(preferenceScreen, preference); } private String mManageMobilePlanMessage; public void onManageMobilePlanClick() { log("onManageMobilePlanClick:"); mManageMobilePlanMessage = null; Resources resources = getActivity().getResources(); NetworkInfo ni = mCm.getActiveNetworkInfo(); if (mTm.hasIccCard() && (ni != null)) { // Check for carrier apps that can handle provisioning first Intent provisioningIntent = new Intent(TelephonyIntents.ACTION_CARRIER_SETUP); List<String> carrierPackages = mTm.getCarrierPackageNamesForIntent(provisioningIntent); if (carrierPackages != null && !carrierPackages.isEmpty()) { if (carrierPackages.size() != 1) { Log.w(TAG, "Multiple matching carrier apps found, launching the first."); } provisioningIntent.setPackage(carrierPackages.get(0)); startActivity(provisioningIntent); return; } // Get provisioning URL String url = mCm.getMobileProvisioningUrl(); if (!TextUtils.isEmpty(url)) { Intent intent = Intent.makeMainSelectorActivity(Intent.ACTION_MAIN, Intent.CATEGORY_APP_BROWSER); intent.setData(Uri.parse(url)); intent.setFlags(Intent.FLAG_ACTIVITY_BROUGHT_TO_FRONT | Intent.FLAG_ACTIVITY_NEW_TASK); try { startActivity(intent); } catch (ActivityNotFoundException e) { Log.w(TAG, "onManageMobilePlanClick: startActivity failed" + e); } } else { // No provisioning URL String operatorName = mTm.getSimOperatorName(); if (TextUtils.isEmpty(operatorName)) { // Use NetworkOperatorName as second choice in case there is no // SPN (Service Provider Name on the SIM). Such as with T-mobile. operatorName = mTm.getNetworkOperatorName(); if (TextUtils.isEmpty(operatorName)) { mManageMobilePlanMessage = resources.getString( R.string.mobile_unknown_sim_operator); } else { mManageMobilePlanMessage = resources.getString( R.string.mobile_no_provisioning_url, operatorName); } } else { mManageMobilePlanMessage = resources.getString( R.string.mobile_no_provisioning_url, operatorName); } } } else if (mTm.hasIccCard() == false) { // No sim card mManageMobilePlanMessage = resources.getString(R.string.mobile_insert_sim_card); } else { // NetworkInfo is null, there is no connection mManageMobilePlanMessage = resources.getString(R.string.mobile_connect_to_internet); } if (!TextUtils.isEmpty(mManageMobilePlanMessage)) { log("onManageMobilePlanClick: message=" + mManageMobilePlanMessage); showDialog(MANAGE_MOBILE_PLAN_DIALOG_ID); } } @Override public Dialog onCreateDialog(int dialogId) { log("onCreateDialog: dialogId=" + dialogId); switch (dialogId) { case MANAGE_MOBILE_PLAN_DIALOG_ID: return new AlertDialog.Builder(getActivity()) .setMessage(mManageMobilePlanMessage) .setCancelable(false) .setPositiveButton(com.android.internal.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int id) { log("MANAGE_MOBILE_PLAN_DIALOG.onClickListener id=" + id); mManageMobilePlanMessage = null; } }) .create(); } return super.onCreateDialog(dialogId); } private void log(String s) { Log.d(TAG, s); } @Override protected int getMetricsCategory() { return MetricsLogger.WIRELESS; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (savedInstanceState != null) { mManageMobilePlanMessage = savedInstanceState.getString(SAVED_MANAGE_MOBILE_PLAN_MSG); } log("onCreate: mManageMobilePlanMessage=" + mManageMobilePlanMessage); mCm = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); mTm = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE); mPm = getPackageManager(); mUm = (UserManager) getSystemService(Context.USER_SERVICE); addPreferencesFromResource(R.xml.wireless_settings); final int myUserId = UserHandle.myUserId(); final boolean isSecondaryUser = myUserId != UserHandle.USER_OWNER; final Activity activity = getActivity(); mAirplaneModePreference = (SwitchPreference) findPreference(KEY_TOGGLE_AIRPLANE); SwitchPreference nfc = (SwitchPreference) findPreference(KEY_TOGGLE_NFC); PreferenceScreen androidBeam = (PreferenceScreen) findPreference(KEY_ANDROID_BEAM_SETTINGS); SwitchPreference nsd = (SwitchPreference) findPreference(KEY_TOGGLE_NSD); /// M: Get MTK NFC setting preference PreferenceScreen mtkNfc = (PreferenceScreen) findPreference(KEY_MTK_TOGGLE_NFC); mAirplaneModeEnabler = new AirplaneModeEnabler(activity, mAirplaneModePreference); /// M: mNetworkSettingsPreference = (PreferenceScreen) findPreference(KEY_MOBILE_NETWORK_SETTINGS); mNfcEnabler = new NfcEnabler(activity, nfc, androidBeam); mButtonWfc = (PreferenceScreen) findPreference(KEY_WFC_SETTINGS); // Remove NSD checkbox by default getPreferenceScreen().removePreference(nsd); //mNsdEnabler = new NsdEnabler(activity, nsd); String toggleable = Settings.Global.getString(activity.getContentResolver(), Settings.Global.AIRPLANE_MODE_TOGGLEABLE_RADIOS); //enable/disable wimax depending on the value in config.xml final boolean isWimaxEnabled = !isSecondaryUser && this.getResources().getBoolean( com.android.internal.R.bool.config_wimaxEnabled); if (!isWimaxEnabled || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_MOBILE_NETWORKS)) { PreferenceScreen root = getPreferenceScreen(); Preference ps = (Preference) findPreference(KEY_WIMAX_SETTINGS); if (ps != null) root.removePreference(ps); } else { if (toggleable == null || !toggleable.contains(Settings.Global.RADIO_WIMAX ) && isWimaxEnabled) { Preference ps = (Preference) findPreference(KEY_WIMAX_SETTINGS); ps.setDependency(KEY_TOGGLE_AIRPLANE); } } // Manually set dependencies for Wifi when not toggleable. if (toggleable == null || !toggleable.contains(Settings.Global.RADIO_WIFI)) { findPreference(KEY_VPN_SETTINGS).setDependency(KEY_TOGGLE_AIRPLANE); } // Disable VPN. if (isSecondaryUser || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_VPN)) { removePreference(KEY_VPN_SETTINGS); } // Manually set dependencies for Bluetooth when not toggleable. if (toggleable == null || !toggleable.contains(Settings.Global.RADIO_BLUETOOTH)) { // No bluetooth-dependent items in the list. Code kept in case one is added later. } // Manually set dependencies for NFC when not toggleable. if (toggleable == null || !toggleable.contains(Settings.Global.RADIO_NFC)) { findPreference(KEY_TOGGLE_NFC).setDependency(KEY_TOGGLE_AIRPLANE); findPreference(KEY_ANDROID_BEAM_SETTINGS).setDependency(KEY_TOGGLE_AIRPLANE); /// M: Manually set dependencies for NFC findPreference(KEY_MTK_TOGGLE_NFC).setDependency(KEY_TOGGLE_AIRPLANE); } // Remove NFC if not available mNfcAdapter = NfcAdapter.getDefaultAdapter(activity); if (mNfcAdapter == null) { getPreferenceScreen().removePreference(nfc); getPreferenceScreen().removePreference(androidBeam); mNfcEnabler = null; /// M: Remove MTK NFC setting getPreferenceScreen().removePreference(mtkNfc); } else { /// M: Remove NFC duplicate items @{ if (FeatureOption.MTK_NFC_ADDON_SUPPORT) { getPreferenceScreen().removePreference(nfc); getPreferenceScreen().removePreference(androidBeam); mNfcEnabler = null; } else { getPreferenceScreen().removePreference(mtkNfc); } /// @} } // Remove Mobile Network Settings and Manage Mobile Plan for secondary users, // if it's a wifi-only device, or if the settings are restricted. if (isSecondaryUser || Utils.isWifiOnly(getActivity()) || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_MOBILE_NETWORKS)) { removePreference(KEY_MOBILE_NETWORK_SETTINGS); removePreference(KEY_MANAGE_MOBILE_PLAN); } // Remove Mobile Network Settings and Manage Mobile Plan // if config_show_mobile_plan sets false. final boolean isMobilePlanEnabled = this.getResources().getBoolean( R.bool.config_show_mobile_plan); if (!isMobilePlanEnabled) { Preference pref = findPreference(KEY_MANAGE_MOBILE_PLAN); if (pref != null) { removePreference(KEY_MANAGE_MOBILE_PLAN); } } // Remove Airplane Mode settings if it's a stationary device such as a TV. if (mPm.hasSystemFeature(PackageManager.FEATURE_TELEVISION)) { removePreference(KEY_TOGGLE_AIRPLANE); } // Enable Proxy selector settings if allowed. Preference mGlobalProxy = findPreference(KEY_PROXY_SETTINGS); final DevicePolicyManager mDPM = (DevicePolicyManager) activity.getSystemService(Context.DEVICE_POLICY_SERVICE); // proxy UI disabled until we have better app support getPreferenceScreen().removePreference(mGlobalProxy); mGlobalProxy.setEnabled(mDPM.getGlobalProxyAdmin() == null); // Disable Tethering if it's not allowed or if it's a wifi-only device final ConnectivityManager cm = (ConnectivityManager) activity.getSystemService(Context.CONNECTIVITY_SERVICE); if (isSecondaryUser || !cm.isTetheringSupported() || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_TETHERING)) { getPreferenceScreen().removePreference(findPreference(KEY_TETHER_SETTINGS)); } else { Preference p = findPreference(KEY_TETHER_SETTINGS); p.setTitle(Utils.getTetheringLabel(cm)); // Grey out if provisioning is not available. p.setEnabled(!TetherSettings .isProvisioningNeededButUnavailable(getActivity())); } // Enable link to CMAS app settings depending on the value in config.xml. boolean isCellBroadcastAppLinkEnabled = this.getResources().getBoolean( com.android.internal.R.bool.config_cellBroadcastAppLinks); try { if (isCellBroadcastAppLinkEnabled) { if (mPm.getApplicationEnabledSetting("com.android.cellbroadcastreceiver") == PackageManager.COMPONENT_ENABLED_STATE_DISABLED) { isCellBroadcastAppLinkEnabled = false; // CMAS app disabled } } } catch (IllegalArgumentException ignored) { isCellBroadcastAppLinkEnabled = false; // CMAS app not installed } if (isSecondaryUser || !isCellBroadcastAppLinkEnabled || mUm.hasUserRestriction(UserManager.DISALLOW_CONFIG_CELL_BROADCASTS)) { PreferenceScreen root = getPreferenceScreen(); Preference ps = findPreference(KEY_CELL_BROADCAST_SETTINGS); if (ps != null) root.removePreference(ps); } /// M: Remove the entrance if RCSE not support. @{ if (isAPKInstalled(activity, RCSE_SETTINGS_INTENT)) { Intent intent = new Intent(RCSE_SETTINGS_INTENT); findPreference(KEY_RCSE_SETTINGS).setIntent(intent); } else { Log.d(TAG, RCSE_SETTINGS_INTENT + " is not installed"); getPreferenceScreen().removePreference(findPreference(KEY_RCSE_SETTINGS)); } /// @} /// M: add the entrance RCS switch. @{ IRCSSettings rcsExt = UtilsExt.getRcsSettingsPlugin(getActivity()); rcsExt.addRCSPreference(getActivity(), getPreferenceScreen()); /// @} /// M: for plug-in, make wfc setting plug-in @{ mWfcExt = UtilsExt.getWfcSettingsPlugin(getActivity()); /// @} } @Override public void onResume() { super.onResume(); mAirplaneModeEnabler.resume(); if (mNfcEnabler != null) { mNfcEnabler.resume(); } if (mNsdEnabler != null) { mNsdEnabler.resume(); } // update WFC setting final Context context = getActivity(); mWfcExt.initPlugin(this); if (ImsManager.isWfcEnabledByPlatform(context)) { getPreferenceScreen().addPreference(mButtonWfc); mButtonWfc.setSummary(WifiCallingSettings.getWfcModeSummary( context, ImsManager.getWfcMode(context))); /// M: for plug-in mButtonWfc.setSummary(mWfcExt.getWfcSummary(context, WifiCallingSettings.getWfcModeSummary(context, ImsManager.getWfcMode(context)))); mWfcExt.customizedWfcPreference(getActivity(), getPreferenceScreen()); } else { removePreference(KEY_WFC_SETTINGS); } /// M: @{ TelephonyManager telephonyManager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE); telephonyManager.listen(mPhoneStateListener, PhoneStateListener.LISTEN_CALL_STATE); updateMobileNetworkEnabled(); IntentFilter intentFilter = new IntentFilter(TelephonyIntents.ACTION_SUBINFO_RECORD_UPDATED); // listen to Carrier config change intentFilter.addAction(CarrierConfigManager.ACTION_CARRIER_CONFIG_CHANGED); getActivity().registerReceiver(mReceiver, intentFilter); /// @} /// M: WFC: get customized intent filter @{ mWfcExt.onWirelessSettingsEvent(DefaultWfcSettingsExt.RESUME); /// @} } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); if (!TextUtils.isEmpty(mManageMobilePlanMessage)) { outState.putString(SAVED_MANAGE_MOBILE_PLAN_MSG, mManageMobilePlanMessage); } } @Override public void onPause() { super.onPause(); mAirplaneModeEnabler.pause(); if (mNfcEnabler != null) { mNfcEnabler.pause(); } if (mNsdEnabler != null) { mNsdEnabler.pause(); } /// M: @{ TelephonyManager telephonyManager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE); telephonyManager.listen(mPhoneStateListener, PhoneStateListener.LISTEN_NONE); getActivity().unregisterReceiver(mReceiver); mWfcExt.onWirelessSettingsEvent(DefaultWfcSettingsExt.PAUSE); /// @} } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_CODE_EXIT_ECM) { Boolean isChoiceYes = data.getBooleanExtra(EXIT_ECM_RESULT, false); // Set Airplane mode based on the return value and checkbox state mAirplaneModeEnabler.setAirplaneModeInECM(isChoiceYes, mAirplaneModePreference.isChecked()); } super.onActivityResult(requestCode, resultCode, data); } @Override protected int getHelpResource() { return R.string.help_url_more_networks; } /** * For Search. */ public static final Indexable.SearchIndexProvider SEARCH_INDEX_DATA_PROVIDER = new BaseSearchIndexProvider() { @Override public List<SearchIndexableResource> getXmlResourcesToIndex( Context context, boolean enabled) { SearchIndexableResource sir = new SearchIndexableResource(context); sir.xmlResId = R.xml.wireless_settings; return Arrays.asList(sir); } @Override public List<String> getNonIndexableKeys(Context context) { final ArrayList<String> result = new ArrayList<String>(); result.add(KEY_TOGGLE_NSD); final UserManager um = (UserManager) context.getSystemService(Context.USER_SERVICE); final int myUserId = UserHandle.myUserId(); final boolean isSecondaryUser = myUserId != UserHandle.USER_OWNER; final boolean isWimaxEnabled = !isSecondaryUser && context.getResources().getBoolean( com.android.internal.R.bool.config_wimaxEnabled); if (!isWimaxEnabled || um.hasUserRestriction(UserManager.DISALLOW_CONFIG_MOBILE_NETWORKS)) { result.add(KEY_WIMAX_SETTINGS); } if (isSecondaryUser) { // Disable VPN result.add(KEY_VPN_SETTINGS); } // Remove NFC if not available final NfcManager manager = (NfcManager) context.getSystemService(Context.NFC_SERVICE); if (manager != null) { NfcAdapter adapter = manager.getDefaultAdapter(); if (adapter == null) { result.add(KEY_TOGGLE_NFC); result.add(KEY_ANDROID_BEAM_SETTINGS); /// M: Remove MTK NFC setting result.add(KEY_MTK_TOGGLE_NFC); } else { /// M: Remove NFC duplicate items @{ if (FeatureOption.MTK_NFC_ADDON_SUPPORT) { result.add(KEY_TOGGLE_NFC); result.add(KEY_ANDROID_BEAM_SETTINGS); } else { result.add(KEY_MTK_TOGGLE_NFC); } /// @} } } // Remove Mobile Network Settings and Manage Mobile Plan if it's a wifi-only device. if (isSecondaryUser || Utils.isWifiOnly(context)) { result.add(KEY_MOBILE_NETWORK_SETTINGS); result.add(KEY_MANAGE_MOBILE_PLAN); } // Remove Mobile Network Settings and Manage Mobile Plan // if config_show_mobile_plan sets false. final boolean isMobilePlanEnabled = context.getResources().getBoolean( R.bool.config_show_mobile_plan); if (!isMobilePlanEnabled) { result.add(KEY_MANAGE_MOBILE_PLAN); } final PackageManager pm = context.getPackageManager(); // Remove Airplane Mode settings if it's a stationary device such as a TV. if (pm.hasSystemFeature(PackageManager.FEATURE_TELEVISION)) { result.add(KEY_TOGGLE_AIRPLANE); } // proxy UI disabled until we have better app support result.add(KEY_PROXY_SETTINGS); // Disable Tethering if it's not allowed or if it's a wifi-only device ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); if (isSecondaryUser || !cm.isTetheringSupported()) { result.add(KEY_TETHER_SETTINGS); } // Enable link to CMAS app settings depending on the value in config.xml. boolean isCellBroadcastAppLinkEnabled = context.getResources().getBoolean( com.android.internal.R.bool.config_cellBroadcastAppLinks); try { if (isCellBroadcastAppLinkEnabled) { if (pm.getApplicationEnabledSetting("com.android.cellbroadcastreceiver") == PackageManager.COMPONENT_ENABLED_STATE_DISABLED) { isCellBroadcastAppLinkEnabled = false; // CMAS app disabled } } } catch (IllegalArgumentException ignored) { isCellBroadcastAppLinkEnabled = false; // CMAS app not installed } if (isSecondaryUser || !isCellBroadcastAppLinkEnabled) { result.add(KEY_CELL_BROADCAST_SETTINGS); } ///M: Reomve RCSE search if not support. if (!isAPKInstalled(context, RCSE_SETTINGS_INTENT)) { result.add(KEY_RCSE_SETTINGS); } return result; } }; ///M: private static boolean isAPKInstalled(Context context, String action) { Intent intent = new Intent(action); List<ResolveInfo> apps = context.getPackageManager().queryIntentActivities(intent, 0); return !(apps == null || apps.size() == 0); } /// M: @{ private PreferenceScreen mNetworkSettingsPreference; private void updateMobileNetworkEnabled() { // modify in a simple way to get whether there is sim card inserted ISettingsMiscExt miscExt = UtilsExt.getMiscPlugin(getActivity()); TelephonyManager telephonyManager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE); int callState = telephonyManager.getCallState(); int simNum = SubscriptionManager.from(getActivity()).getActiveSubscriptionInfoCount(); Log.d(TAG, "callstate = " + callState + " simNum = " + simNum); if (simNum > 0 && callState == TelephonyManager.CALL_STATE_IDLE && !miscExt.isWifiOnlyModeSet()) { mNetworkSettingsPreference.setEnabled(true); } else { /// M: for plug-in if (CdmaFeatureOptionUtils.isCT6MSupport()) { mNetworkSettingsPreference.setEnabled(CdmaFeatureOptionUtils .isCTLteTddTestSupport()); } else { mNetworkSettingsPreference.setEnabled(UtilsExt .getSimManagmentExtPlugin(getActivity()).useCtTestcard() || false); } } } private PhoneStateListener mPhoneStateListener = new PhoneStateListener() { @Override public void onCallStateChanged(int state, String incomingNumber) { super.onCallStateChanged(state, incomingNumber); Log.d(TAG, "PhoneStateListener, new state=" + state); if (state == TelephonyManager.CALL_STATE_IDLE && getActivity() != null) { updateMobileNetworkEnabled(); } } }; private final BroadcastReceiver mReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (TelephonyIntents.ACTION_SUBINFO_RECORD_UPDATED.equals(action)) { Log.d(TAG, "ACTION_SIM_INFO_UPDATE received"); updateMobileNetworkEnabled(); // when received Carrier config changes, update WFC buttons } else if (CarrierConfigManager.ACTION_CARRIER_CONFIG_CHANGED.equals(action)) { Log.d(TAG, "carrier config changed..."); if (mButtonWfc != null) { if (ImsManager.isWfcEnabledByPlatform(context)) { Log.d(TAG, "wfc enabled, add WCF setting"); getPreferenceScreen().addPreference(mButtonWfc); mWfcExt.initPlugin(WirelessSettings.this); mButtonWfc.setSummary(mWfcExt.getWfcSummary(context, WifiCallingSettings.getWfcModeSummary(context, ImsManager.getWfcMode(context)))); mWfcExt.customizedWfcPreference(getActivity(), getPreferenceScreen()); mWfcExt.onWirelessSettingsEvent(DefaultWfcSettingsExt.CONFIG_CHANGE); } else { Log.d(TAG, "wfc disabled, remove WCF setting"); mWfcExt.onWirelessSettingsEvent(DefaultWfcSettingsExt.CONFIG_CHANGE); getPreferenceScreen().removePreference(mButtonWfc); } } } } }; /// @} }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.client.admin.internal; import static org.asynchttpclient.Dsl.post; import static org.asynchttpclient.Dsl.put; import com.google.gson.Gson; import java.io.File; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import javax.ws.rs.client.Entity; import javax.ws.rs.client.InvocationCallback; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.GenericType; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; import org.apache.pulsar.client.admin.PulsarAdminException; import org.apache.pulsar.client.admin.Source; import org.apache.pulsar.client.admin.Sources; import org.apache.pulsar.client.api.Authentication; import org.apache.pulsar.common.functions.UpdateOptions; import org.apache.pulsar.common.io.ConnectorDefinition; import org.apache.pulsar.common.io.SourceConfig; import org.apache.pulsar.common.policies.data.SourceStatus; import org.apache.pulsar.common.util.ObjectMapperFactory; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.RequestBuilder; import org.asynchttpclient.request.body.multipart.FilePart; import org.asynchttpclient.request.body.multipart.StringPart; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataMultiPart; @Slf4j public class SourcesImpl extends ComponentResource implements Sources, Source { private final WebTarget source; private final AsyncHttpClient asyncHttpClient; public SourcesImpl(WebTarget web, Authentication auth, AsyncHttpClient asyncHttpClient, long readTimeoutMs) { super(auth, readTimeoutMs); this.source = web.path("/admin/v3/source"); this.asyncHttpClient = asyncHttpClient; } @Override public List<String> listSources(String tenant, String namespace) throws PulsarAdminException { try { return listSourcesAsync(tenant, namespace).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<List<String>> listSourcesAsync(String tenant, String namespace) { WebTarget path = source.path(tenant).path(namespace); final CompletableFuture<List<String>> future = new CompletableFuture<>(); asyncGetRequest(path, new InvocationCallback<Response>() { @Override public void completed(Response response) { if (!response.getStatusInfo().equals(Response.Status.OK)) { future.completeExceptionally(getApiException(response)); } else { future.complete(response.readEntity(new GenericType<List<String>>() {})); } } @Override public void failed(Throwable throwable) { future.completeExceptionally(getApiException(throwable.getCause())); } }); return future; } @Override public SourceConfig getSource(String tenant, String namespace, String sourceName) throws PulsarAdminException { try { return getSourceAsync(tenant, namespace, sourceName).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<SourceConfig> getSourceAsync(String tenant, String namespace, String sourceName) { WebTarget path = source.path(tenant).path(namespace).path(sourceName); final CompletableFuture<SourceConfig> future = new CompletableFuture<>(); asyncGetRequest(path, new InvocationCallback<Response>() { @Override public void completed(Response response) { if (!response.getStatusInfo().equals(Response.Status.OK)) { future.completeExceptionally(getApiException(response)); } else { future.complete(response.readEntity(SourceConfig.class)); } } @Override public void failed(Throwable throwable) { future.completeExceptionally(getApiException(throwable.getCause())); } }); return future; } @Override public SourceStatus getSourceStatus( String tenant, String namespace, String sourceName) throws PulsarAdminException { try { return getSourceStatusAsync(tenant, namespace, sourceName).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<SourceStatus> getSourceStatusAsync(String tenant, String namespace, String sourceName) { WebTarget path = source.path(tenant).path(namespace).path(sourceName).path("status"); final CompletableFuture<SourceStatus> future = new CompletableFuture<>(); asyncGetRequest(path, new InvocationCallback<Response>() { @Override public void completed(Response response) { if (!response.getStatusInfo().equals(Response.Status.OK)) { future.completeExceptionally(getApiException(response)); } else { future.complete(response.readEntity(SourceStatus.class)); } } @Override public void failed(Throwable throwable) { future.completeExceptionally(getApiException(throwable.getCause())); } }); return future; } @Override public SourceStatus.SourceInstanceStatus.SourceInstanceStatusData getSourceStatus( String tenant, String namespace, String sourceName, int id) throws PulsarAdminException { try { return getSourceStatusAsync(tenant, namespace, sourceName, id) .get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<SourceStatus.SourceInstanceStatus.SourceInstanceStatusData> getSourceStatusAsync( String tenant, String namespace, String sourceName, int id) { WebTarget path = source.path(tenant).path(namespace).path(sourceName).path(Integer.toString(id)).path("status"); final CompletableFuture<SourceStatus.SourceInstanceStatus.SourceInstanceStatusData> future = new CompletableFuture<>(); asyncGetRequest(path, new InvocationCallback<Response>() { @Override public void completed(Response response) { if (!response.getStatusInfo().equals(Response.Status.OK)) { future.completeExceptionally(getApiException(response)); } else { future.complete(response.readEntity( SourceStatus.SourceInstanceStatus.SourceInstanceStatusData.class)); } } @Override public void failed(Throwable throwable) { future.completeExceptionally(getApiException(throwable.getCause())); } }); return future; } @Override public void createSource(SourceConfig sourceConfig, String fileName) throws PulsarAdminException { try { createSourceAsync(sourceConfig, fileName).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> createSourceAsync(SourceConfig sourceConfig, String fileName) { final CompletableFuture<Void> future = new CompletableFuture<>(); try { RequestBuilder builder = post(source.path(sourceConfig.getTenant()) .path(sourceConfig.getNamespace()).path(sourceConfig.getName()).getUri().toASCIIString()) .addBodyPart(new StringPart("sourceConfig", ObjectMapperFactory.getThreadLocal() .writeValueAsString(sourceConfig), MediaType.APPLICATION_JSON)); if (fileName != null && !fileName.startsWith("builtin://")) { // If the function code is built in, we don't need to submit here builder.addBodyPart(new FilePart("data", new File(fileName), MediaType.APPLICATION_OCTET_STREAM)); } asyncHttpClient.executeRequest(addAuthHeaders(source, builder).build()) .toCompletableFuture() .thenAccept(response -> { if (response.getStatusCode() < 200 || response.getStatusCode() >= 300) { future.completeExceptionally( getApiException(Response .status(response.getStatusCode()) .entity(response.getResponseBody()).build())); } else { future.complete(null); } }) .exceptionally(throwable -> { future.completeExceptionally(getApiException(throwable)); return null; }); } catch (Exception e) { future.completeExceptionally(getApiException(e)); } return future; } @Override public void createSourceWithUrl(SourceConfig sourceConfig, String pkgUrl) throws PulsarAdminException { try { createSourceWithUrlAsync(sourceConfig, pkgUrl).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> createSourceWithUrlAsync(SourceConfig sourceConfig, String pkgUrl) { final FormDataMultiPart mp = new FormDataMultiPart(); mp.bodyPart(new FormDataBodyPart("url", pkgUrl, MediaType.TEXT_PLAIN_TYPE)); mp.bodyPart(new FormDataBodyPart("sourceConfig", new Gson().toJson(sourceConfig), MediaType.APPLICATION_JSON_TYPE)); WebTarget path = source.path(sourceConfig.getTenant()) .path(sourceConfig.getNamespace()).path(sourceConfig.getName()); return asyncPostRequest(path, Entity.entity(mp, MediaType.MULTIPART_FORM_DATA)); } @Override public void deleteSource(String cluster, String namespace, String function) throws PulsarAdminException { try { deleteSourceAsync(cluster, namespace, function).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> deleteSourceAsync(String tenant, String namespace, String function) { WebTarget path = source.path(tenant).path(namespace).path(function); return asyncDeleteRequest(path); } @Override public void updateSource(SourceConfig sourceConfig, String fileName, UpdateOptions updateOptions) throws PulsarAdminException { try { updateSourceAsync(sourceConfig, fileName, updateOptions).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> updateSourceAsync( SourceConfig sourceConfig, String fileName, UpdateOptions updateOptions) { final CompletableFuture<Void> future = new CompletableFuture<>(); try { RequestBuilder builder = put(source.path(sourceConfig.getTenant()).path(sourceConfig.getNamespace()) .path(sourceConfig.getName()).getUri().toASCIIString()) .addBodyPart(new StringPart("sourceConfig", ObjectMapperFactory.getThreadLocal() .writeValueAsString(sourceConfig), MediaType.APPLICATION_JSON)); if (updateOptions != null) { builder.addBodyPart(new StringPart("updateOptions", ObjectMapperFactory.getThreadLocal().writeValueAsString(updateOptions), MediaType.APPLICATION_JSON)); } if (fileName != null && !fileName.startsWith("builtin://")) { // If the function code is built in, we don't need to submit here builder.addBodyPart(new FilePart("data", new File(fileName), MediaType.APPLICATION_OCTET_STREAM)); } asyncHttpClient.executeRequest(addAuthHeaders(source, builder).build()) .toCompletableFuture() .thenAccept(response -> { if (response.getStatusCode() < 200 || response.getStatusCode() >= 300) { future.completeExceptionally( getApiException(Response .status(response.getStatusCode()) .entity(response.getResponseBody()).build())); } else { future.complete(null); } }) .exceptionally(throwable -> { future.completeExceptionally(getApiException(throwable)); return null; }); } catch (Exception e) { future.completeExceptionally(getApiException(e)); } return future; } @Override public void updateSource(SourceConfig sourceConfig, String fileName) throws PulsarAdminException { updateSource(sourceConfig, fileName, null); } @Override public CompletableFuture<Void> updateSourceAsync(SourceConfig sourceConfig, String fileName) { return updateSourceAsync(sourceConfig, fileName, null); } @Override public void updateSourceWithUrl(SourceConfig sourceConfig, String pkgUrl, UpdateOptions updateOptions) throws PulsarAdminException { try { updateSourceWithUrlAsync(sourceConfig, pkgUrl, updateOptions) .get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> updateSourceWithUrlAsync( SourceConfig sourceConfig, String pkgUrl, UpdateOptions updateOptions) { final CompletableFuture<Void> future = new CompletableFuture<>(); try { final FormDataMultiPart mp = new FormDataMultiPart(); mp.bodyPart(new FormDataBodyPart("url", pkgUrl, MediaType.TEXT_PLAIN_TYPE)); mp.bodyPart(new FormDataBodyPart( "sourceConfig", new Gson().toJson(sourceConfig), MediaType.APPLICATION_JSON_TYPE)); if (updateOptions != null) { mp.bodyPart(new FormDataBodyPart( "updateOptions", ObjectMapperFactory.getThreadLocal().writeValueAsString(updateOptions), MediaType.APPLICATION_JSON_TYPE)); } WebTarget path = source.path(sourceConfig.getTenant()).path(sourceConfig.getNamespace()) .path(sourceConfig.getName()); return asyncPutRequest(path, Entity.entity(mp, MediaType.MULTIPART_FORM_DATA)); } catch (Exception e) { future.completeExceptionally(getApiException(e)); } return future; } @Override public void updateSourceWithUrl(SourceConfig sourceConfig, String pkgUrl) throws PulsarAdminException { updateSourceWithUrl(sourceConfig, pkgUrl, null); } @Override public CompletableFuture<Void> updateSourceWithUrlAsync(SourceConfig sourceConfig, String pkgUrl) { return updateSourceWithUrlAsync(sourceConfig, pkgUrl, null); } @Override public void restartSource(String tenant, String namespace, String functionName, int instanceId) throws PulsarAdminException { try { restartSourceAsync(tenant, namespace, functionName, instanceId) .get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> restartSourceAsync( String tenant, String namespace, String functionName, int instanceId) { WebTarget path = source.path(tenant).path(namespace).path(functionName).path(Integer.toString(instanceId)) .path("restart"); return asyncPostRequest(path, Entity.entity("", MediaType.APPLICATION_JSON)); } @Override public void restartSource(String tenant, String namespace, String functionName) throws PulsarAdminException { try { restartSourceAsync(tenant, namespace, functionName).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> restartSourceAsync(String tenant, String namespace, String functionName) { WebTarget path = source.path(tenant).path(namespace).path(functionName).path("restart"); return asyncPostRequest(path, Entity.entity("", MediaType.APPLICATION_JSON)); } @Override public void stopSource(String tenant, String namespace, String sourceName, int instanceId) throws PulsarAdminException { try { stopSourceAsync(tenant, namespace, sourceName, instanceId).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> stopSourceAsync(String tenant, String namespace, String sourceName, int instanceId) { WebTarget path = source.path(tenant).path(namespace).path(sourceName).path(Integer.toString(instanceId)) .path("stop"); return asyncPostRequest(path, Entity.entity("", MediaType.APPLICATION_JSON)); } @Override public void stopSource(String tenant, String namespace, String sourceName) throws PulsarAdminException { try { stopSourceAsync(tenant, namespace, sourceName).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> stopSourceAsync(String tenant, String namespace, String sourceName) { WebTarget path = source.path(tenant).path(namespace).path(sourceName).path("stop"); return asyncPostRequest(path, Entity.entity("", MediaType.APPLICATION_JSON)); } @Override public void startSource(String tenant, String namespace, String sourceName, int instanceId) throws PulsarAdminException { try { startSourceAsync(tenant, namespace, sourceName, instanceId).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> startSourceAsync( String tenant, String namespace, String sourceName, int instanceId) { WebTarget path = source.path(tenant).path(namespace).path(sourceName).path(Integer.toString(instanceId)) .path("start"); return asyncPostRequest(path, Entity.entity("", MediaType.APPLICATION_JSON)); } @Override public void startSource(String tenant, String namespace, String sourceName) throws PulsarAdminException { try { startSourceAsync(tenant, namespace, sourceName).get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> startSourceAsync(String tenant, String namespace, String sourceName) { WebTarget path = source.path(tenant).path(namespace).path(sourceName).path("start"); return asyncPostRequest(path, Entity.entity("", MediaType.APPLICATION_JSON)); } @Override public List<ConnectorDefinition> getBuiltInSources() throws PulsarAdminException { try { return getBuiltInSourcesAsync().get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<List<ConnectorDefinition>> getBuiltInSourcesAsync() { WebTarget path = source.path("builtinsources"); final CompletableFuture<List<ConnectorDefinition>> future = new CompletableFuture<>(); asyncGetRequest(path, new InvocationCallback<Response>() { @Override public void completed(Response response) { if (!response.getStatusInfo().equals(Response.Status.OK)) { future.completeExceptionally(getApiException(response)); } else { future.complete(response.readEntity( new GenericType<List<ConnectorDefinition>>() {})); } } @Override public void failed(Throwable throwable) { future.completeExceptionally(getApiException(throwable.getCause())); } }); return future; } @Override public void reloadBuiltInSources() throws PulsarAdminException { try { reloadBuiltInSourcesAsync().get(this.readTimeoutMs, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { throw (PulsarAdminException) e.getCause(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PulsarAdminException(e); } catch (TimeoutException e) { throw new PulsarAdminException.TimeoutException(e); } } @Override public CompletableFuture<Void> reloadBuiltInSourcesAsync() { WebTarget path = source.path("reloadBuiltInSources"); return asyncPostRequest(path, Entity.entity("", MediaType.APPLICATION_JSON)); } }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.andes.kernel.disruptor.inbound; import com.google.common.util.concurrent.SettableFuture; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.andes.kernel.AndesChannel; import org.wso2.andes.kernel.AndesException; import org.wso2.andes.kernel.AndesMessage; import org.wso2.andes.kernel.AndesMessageMetadata; import org.wso2.andes.kernel.MessagingEngine; import org.wso2.andes.kernel.slot.SlotMessageCounter; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * This is the Andes transaction event related class. This event object handles * the life cycle of a single transaction coming from the protocol level to Andes. */ public class InboundTransactionEvent implements AndesInboundStateEvent { private static Log log = LogFactory.getLog(InboundTransactionEvent.class); /** * Reference to Disruptor based event manager */ private final InboundEventManager eventManager; /** * Internal event type of to denote the current state of the transaction */ private EventType eventType; /** * This is used to make {@link #commit()} {@link #rollback()} and {@link #close()} methods * blocking calls */ private SettableFuture<Boolean> taskCompleted; /** * Message list of the current transaction. This list doesn't have the duplicates that are * created for topics. */ private ConcurrentLinkedQueue<AndesMessage> messageQueue; /** * Reference to {@link org.wso2.andes.kernel.MessagingEngine} to do message storing operations */ private final MessagingEngine messagingEngine; /** * Maximum batch size for a transaction. Limit is set for content size of the batch. * Exceeding this limit will lead to a failure in the subsequent commit request. */ private final int maxBatchSize; /** * Content batch cached size at a given point in time. */ private int currentBatchSize; /** * Reference to the channel of the publisher */ private final AndesChannel channel; /** * maximum wait time for commit, rollback or close event to complete */ private final long txWaitTimeout; /** * Check whether messages are stored to DB for the current transaction. If this is true that means * messages are stored in DB but {@link org.wso2.andes.kernel.slot.SlotMessageCounter} is not * updated completely. */ private boolean messagesStoredNotCommitted; /** * messages list to be committed, enqueued messages */ Queue<AndesMessage> getQueuedMessages() { return messageQueue; } /** * This message queue represent the message list of the current transaction. For topics this queue gets duplicates * after {@link org.wso2.andes.kernel.disruptor.inbound.MessagePreProcessor} * * @param queue message list of the current transaction */ void setQueue(ConcurrentLinkedQueue<AndesMessage> queue) { this.messageQueue = queue; } void addMessages(Collection<AndesMessage> messages) { this.messageQueue.addAll(messages); } void clearMessages() { this.messageQueue.clear(); } /** * Supported state events */ private enum EventType { /** Transaction commit related event type */ TX_COMMIT_EVENT, /** Transaction rollback related event type */ TX_ROLLBACK_EVENT, /** close the current transaction and release all resources */ TX_CLOSE_EVENT } /** * * @param eventManager InboundEventManager */ /** * Transaction object to do a transaction * @param messagingEngine {@link org.wso2.andes.kernel.MessagingEngine} * @param eventManager InboundEventManager * @param maxBatchSize maximum batch size for a commit * @param channel AndesChannel * @param txWaitTimeout maximum wait time for commit, rollback or close event to complete */ public InboundTransactionEvent(MessagingEngine messagingEngine, InboundEventManager eventManager, int maxBatchSize, long txWaitTimeout, AndesChannel channel) { this.messagingEngine = messagingEngine; this.eventManager = eventManager; messageQueue = new ConcurrentLinkedQueue<>(); taskCompleted = SettableFuture.create(); this.maxBatchSize = maxBatchSize; this.channel = channel; this.txWaitTimeout = txWaitTimeout; } /** * This will commit the batched transacted message to the persistence storage using Andes * underlying event manager. * * This is a blocking call * @throws AndesException */ public void commit() throws AndesException { if(currentBatchSize > maxBatchSize) { currentBatchSize = 0; messageQueue.clear(); throw new AndesException("Current enqueued batch size exceeds maximum transactional batch size of " + maxBatchSize + " bytes." ); } if (log.isDebugEnabled()) { log.debug("Prepare for commit. Channel id: " + channel.getId()); } eventType = EventType.TX_COMMIT_EVENT; taskCompleted = SettableFuture.create(); // Publish to event manager for processing eventManager.requestTransactionCommitEvent(this, channel); // Make the call blocking waitForCompletion(); } /** * This will rollback the transaction. This is done using Andes underlying event manager * This is a blocking call. * * @throws AndesException */ public void rollback() throws AndesException { if (log.isDebugEnabled()) { log.debug("Prepare for rollback. Channel: " + channel.getId()); } eventType = EventType.TX_ROLLBACK_EVENT; taskCompleted = SettableFuture.create(); // Publish to event manager for processing eventManager.requestTransactionRollbackEvent(this, channel); // Make the call blocking waitForCompletion(); } /** * Add a message to a transaction. Added messages will be persisted in DB only when * commit is invoked. Underlying event manager will add the message to the the transaction * * This is a asynchronous call * @param message AndesMessage */ public void enqueue(AndesMessage message) { currentBatchSize = currentBatchSize + message.getMetadata().getMessageContentLength(); if (currentBatchSize > maxBatchSize) { messageQueue.clear(); // if max batch size exceeds invalidate commit. } else { // This will go through ContentChunkHandler and add the message to message list of the transaction eventManager.requestTransactionEnqueueEvent(message, this, channel); if (log.isDebugEnabled()) { log.debug("Enqueue message with message id " + message.getMetadata().getMessageID() + " for transaction "); } } } /** * Release all resources used by transaction object. This should be called when the transactional session is * closed. This is to prevent unwanted resource usage (DB connections etc) after closing * a transactional session. * * @throws AndesException */ public void close() throws AndesException { eventType = EventType.TX_CLOSE_EVENT; taskCompleted = SettableFuture.create(); eventManager.requestTransactionCloseEvent(this, channel); waitForCompletion(); } /** * Update internal state of the transaction according to the prepared event of the transaction * This method is call by the state event handler. */ @Override public void updateState() throws AndesException { switch (eventType) { case TX_COMMIT_EVENT: executeCommitEvent(); break; case TX_ROLLBACK_EVENT: executeRollbackEvent(); break; case TX_CLOSE_EVENT: executeCloseEvent(); break; default: if(log.isDebugEnabled()) { log.debug("Event " + eventType + " ignored."); } break; } } /** * {@inheritDoc} */ @Override public String eventInfo() { return "Event type " + eventType; } /** * Close the current transaction */ private void executeCloseEvent() throws AndesException { try { messageQueue.clear(); currentBatchSize = 0; taskCompleted.set(true); } catch (Throwable t) { // Exception is passed to the the caller who is waiting on the future taskCompleted.setException(t); messageQueue.clear(); throw new AndesException("Exception occurred while closing transactional channel " + channel.getId(), t); } } /** * Update the state of Andes core by informing the slot counter about written messages. * This is called by {@link org.wso2.andes.kernel.disruptor.inbound.StateEventHandler} * Messages are written to DB by {@link org.wso2.andes.kernel.disruptor.inbound.MessageWriter} */ private void executeCommitEvent() throws AndesException { try { messagesStoredNotCommitted = true; // update slot information for transaction related messages SlotMessageCounter.getInstance().recordMetadataCountInSlot(getQueuedMessages()); messageQueue.clear(); messagesStoredNotCommitted = false; // Once slots are updated rolling back is irrelevant. currentBatchSize = 0; taskCompleted.set(true); } catch (Throwable t) { // Exception is passed to the the caller of get method of settable future taskCompleted.setException(t); throw new AndesException("Exception occurred while committing transaction. Channel id " + channel.getId(), t); } } /** * Undo changes done by the current transaction. This has DB interactions and Andes core state changes mixed * hence calling this from {@link org.wso2.andes.kernel.disruptor.inbound.StateEventHandler}. */ private void executeRollbackEvent() throws AndesException { try { if(messagesStoredNotCommitted) { List<AndesMessageMetadata> messagesToRemove = new ArrayList<>(); for (AndesMessage message : messageQueue) { messagesToRemove.add(message.getMetadata()); } messagingEngine.deleteMessages(messagesToRemove); messagesStoredNotCommitted = false; } messageQueue.clear(); currentBatchSize = 0; taskCompleted.set(true); } catch (Throwable t) { taskCompleted.setException(t); throw new AndesException("Exception occurred while rolling back transaction. Channel id " + channel.getId(), t); } } /** * Wait until the respective task set the value of the future once the task is completed * * @return True if task is successful and false otherwise * @throws AndesException */ private Boolean waitForCompletion() throws AndesException { try { return taskCompleted.get(txWaitTimeout, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (ExecutionException e) { String errMsg = "Error occurred while processing transaction event " + eventType; log.error(errMsg, e); throw new AndesException(errMsg, e); } catch (TimeoutException e) { String errMsg = eventType + " Timeout. Didn't complete within " + txWaitTimeout + " seconds."; log.error(errMsg, e); throw new AndesException(errMsg, e); } return false; } }
package com.bwyap.engine.gui.element.base; import org.joml.Vector2f; import com.bwyap.engine.gui.element.EllipticalButton; import com.bwyap.engine.gui.element.properties.Fade; import com.bwyap.engine.gui.element.properties.TextComponent; import com.bwyap.engine.gui.interfaces.GUIBoundsInterface; import com.bwyap.engine.gui.interfaces.IFade; import com.bwyap.engine.gui.interfaces.ITextDisplay; import com.bwyap.engine.input.InputHandler; import com.bwyap.lwjgl.engine.resource.LWJGLResourceManager; /** * A extension of the panel class that can be used * as an in-screen window, which can be moved, resized * and closed. * @author bwyap * */ public abstract class PanelWindow extends Panel implements IFade, ITextDisplay { private float MIN_WIDTH = 25; private float MIN_HEIGHT = 25; private float MAX_WIDTH; private float MAX_HEIGHT; private float RESIZE_AREA = 6; // Window properties private boolean resizable; private boolean movable; private boolean canMove; // disabled when the mouse button is pressed private boolean visible; private boolean keepWithinParent; private boolean fadeOnClose; private final Fade fade; private EllipticalButton closeButton; private float closeButtonRadius = 6, closeButtonPadding = 6; private boolean moving = false; private boolean resizing = false; private final TextComponent title; /** * Create a panel window with the specified dimensions. * The panel is not resizable but is movable by default. * The maxWidth and maxHeight are set to its width and height. * @param x * @param y * @param width * @param height */ public PanelWindow(String title, float x, float y, float width, float height) { this(title, x, y, width, height, 100000, 100000); setResizable(false); } /** * Create a panel window with the specified dimensions and specified maxWidth and maxHeight. * This panel is resizable and movable by default. * @param x * @param y * @param width * @param height * @param maxWidth * @param maxHeight */ public PanelWindow(String name, float x, float y, float width, float height, float maxWidth, float maxHeight) { super(x, y, width, height); fade = new Fade(); fadeOnClose = true; keepWithinParent = true; movable = true; resizable = true; title = new TextComponent(name); title.setFontName(LWJGLResourceManager.instance().lib.getFont("default")); title.setAlignment(ETextAlignment.TOP_CENTER); title.setOffset(0, 3); title.setTextSize(15.0f); MAX_WIDTH = maxWidth; MAX_HEIGHT = maxHeight; initButtons(); initElements(); } /** * Initialise the close button */ private void initButtons() { closeButton = new EllipticalButton(closeButtonPadding, closeButtonPadding, closeButtonRadius, closeButtonRadius) { @Override public void onMouseClicked(float x, float y, int mouseButton) { if (!resizing) { if (fadeOnClose) { fade.setFading(true); fade.setFade(1.0f); } else close(); } } @Override public void onMouseDown(float x, float y, int mouseButton) { // Disable moving the window when the mouse button is down over the close button // re-enabled when the mouse is released (see onHandleInput method) canMove = moving || false; } }; closeButton.setScaleAbsolute(true); closeButton.setPositionAbsolute(true); closeButton.colourProperties().setColour(1.0f, 0.0f, 0.0f, 1.0f); closeButton.colourProperties().setMouseoverColour(1.0f, 0.5f, 0.5f, 1.0f); closeButton.colourProperties().setMouseDownColour(1.0f, 0.2f, 0.2f, 1.0f); addElement(closeButton); } @Override public TextComponent getTextComponent() { return title; } @Override public Fade getFade() { return fade; } /** * Initialise any GUI components that should be displayed in the window */ protected abstract void initElements(); @Override public void onUpdate(float timestep) { if (fade.isFading()) { fade.decreaseFade(timestep * 5.0f); if (fade.getFade() == 0) { fade.setFading(false); close(); } } } @Override public void onHandleInput(InputHandler input, GUIBoundsInterface bounds) { super.onHandleInput(input, bounds); // Resize window if (resizable) resizeWindow(input, bounds); // Move window if (movable && canMove && !resizing) moveWindow(input, bounds); if (!input.isMouseDown()) canMove = true; } private Vector2f mouseDisplacement; private Vector2f original; /** * Resize the window according to the mouse movement * @param input * @param bounds */ private void resizeWindow(InputHandler input, GUIBoundsInterface bounds) { // Check if the mouse is within the resize area if ((input.getMouseX() > getPositionX() + (getWidth() - RESIZE_AREA) && input.getMouseX() < getPositionX() + getWidth() && input.getMouseY() > getPositionY() + (getHeight() - RESIZE_AREA) && input.getMouseY() < getPositionY() + getHeight()) || resizing) { // Check if the mouse is down if (input.isMouseDown(LWJGLResourceManager.instance().inputMapping().getBinding("mouse_left"))) { mouseOver = true; if (!resizing) { // Set resize origin resizing = true; mouseDisplacement = new Vector2f((float)input.getMouseX(), (float)input.getMouseY()); original = new Vector2f(getWidth(), getHeight()); } // Calculate displacement float newWidth = ((float)input.getMouseX() - mouseDisplacement.x + original.x)/scaleX; float newHeight = ((float)input.getMouseY() - mouseDisplacement.y + original.y)/scaleY; // Cut resizing if the panel must stay within its parent if (keepWithinParent) { if (newWidth + getAbsolutePosition().x > bounds.getBounds().x/scaleX) newWidth = bounds.getBounds().x/scaleX - getAbsolutePosition().x; if (newHeight + getAbsolutePosition().y > bounds.getBounds().y/scaleY) newHeight = bounds.getBounds().y/scaleY - getAbsolutePosition().y; } // Resize within minimum and maximum size setBounds(newWidth < MIN_WIDTH ? MIN_WIDTH : (newWidth > MAX_WIDTH ? MAX_WIDTH : newWidth), newHeight < MIN_HEIGHT ? MIN_HEIGHT : (newHeight > MAX_HEIGHT ? MAX_HEIGHT : newHeight)); } else resizing = false; } } /** * Move the window according to the input handler * @param input * @param bounds */ protected void moveWindow(InputHandler input, GUIBoundsInterface bounds) { // Check if the window should be moved if (withinBounds((float)input.getMouseX(), (float)input.getMouseY()) || moving) { // Check if the mouse is down if (input.isMouseDown(LWJGLResourceManager.instance().inputMapping().getBinding("mouse_left"))) { mouseOver = true; if (!moving) { // Set movement origin moving = true; mouseDisplacement = new Vector2f((float)input.getMouseX(), (float)input.getMouseY()).sub(new Vector2f(getPosition())); } // Calculate displacement float xOffset = ((float)input.getMouseX() - mouseDisplacement.x - bounds.getPositionX())/scaleX; float yOffset = ((float)input.getMouseY() - mouseDisplacement.y - bounds.getPositionY())/scaleY; // Cut movement if the panel must stay within its parent if (keepWithinParent) { if (getWidth()/scaleX + xOffset > bounds.getBounds().x/scaleX) xOffset = (bounds.getBounds().x - getWidth())/scaleX; else if (xOffset < 0) xOffset = 0; if (getHeight()/scaleY + yOffset > bounds.getBounds().y/scaleY) yOffset = (bounds.getBounds().y - getHeight())/scaleY; else if (yOffset < 0) yOffset = 0; } // Set the position setPosition(xOffset, yOffset); } else moving = false; } } /** * Close the window. * Override this method to implement custom functionality * (ensure you call {@code super.close()}). */ public void close() { setVisible(false); } /** * Set whether the window should fade out on close * @param fadeOnClose */ public void setFadeOnClose(boolean fadeOnClose) { this.fadeOnClose = fadeOnClose; } /** * Checks if the window will fade out on close * @return */ public boolean fadeOnClose() { return fadeOnClose; } /** * Check if the panel window is visible * @return */ public boolean isVisible() { return visible; } /** * Set whether the panel window is visible * @param visible */ public void setVisible(boolean visible) { this.visible = visible; } /** * Check if the panel window is movable * @return */ public boolean isMovable() { return movable; } /** * Set whether the panel window is movable * @param movable */ public void setMovable(boolean movable) { this.movable = movable; } /** * Check if the panel window is resizable * @return */ public boolean isResizable() { return resizable; } /** * Set whether the panel window is resizable * @param resizable */ public void setResizable(boolean resizable) { this.resizable = resizable; } /** * Set the maximum dimensions for this panel window. * These should be greater than the minimum dimensions. * @param maxWidth * @param maxHeight */ public void setMaxDimensions(float maxWidth, float maxHeight) { this.MAX_WIDTH = maxWidth; this.MAX_HEIGHT = maxHeight; } /** * Set the minimum dimensions for this panel window. * These should be less than the maximum dimensions. * @param minWidth * @param minHeight */ public void setMinDimensions(float minWidth, float minHeight) { this.MIN_WIDTH = minWidth; this.MIN_HEIGHT = minHeight; } /** * Set whether this panel window must * be kept within its parent when moved. * @param keepWithinParent */ public void setKeepWithinParent(boolean keepWithinParent) { this.keepWithinParent = keepWithinParent; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Represents a Target HTTP Proxy resource. * * Google Compute Engine has two Target HTTP Proxy resources: * * * [Global](/compute/docs/reference/rest/{$api_version}/targetHttpProxies) * * [Regional](/compute/docs/reference/rest/{$api_version}/regionTargetHttpProxies) * * A target HTTP proxy is a component of GCP HTTP load balancers. * * * targetHttpProxies are used by external HTTP load balancers and Traffic Director. * * regionTargetHttpProxies are used by internal HTTP load balancers. * * Forwarding rules reference a target HTTP proxy, and the target proxy then references a URL map. * For more information, read Using Target Proxies and Forwarding rule concepts. (== resource_for * {$api_version}.targetHttpProxies ==) (== resource_for {$api_version}.regionTargetHttpProxies ==) * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class TargetHttpProxy extends com.google.api.client.json.GenericJson { /** * [Output Only] Creation timestamp in RFC3339 text format. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String creationTimestamp; /** * An optional description of this resource. Provide this property when you create the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpProxy. An up-to- * date fingerprint must be provided in order to patch/update the TargetHttpProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpProxy. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String fingerprint; /** * URLs to networkservices.HttpFilter resources enabled for xDS clients using this configuration. * For example, https://networkservices.googleapis.com/v1alpha1/projects/project/locations/locatio * nhttpFilters/httpFilter Only filters that handle outbound connection and stream events may be * specified. These filters work in conjunction with a default set of HTTP filters that may * already be configured by Traffic Director. Traffic Director will determine the final location * of these filters within xDS configuration based on the name of the HTTP filter. If Traffic * Director positions multiple filters at the same location, those filters will be in the same * order as specified in this list. httpFilters only applies for loadbalancers with * loadBalancingScheme set to INTERNAL_SELF_MANAGED. See ForwardingRule for more details. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> httpFilters; /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.math.BigInteger id; /** * [Output Only] Type of resource. Always compute#targetHttpProxy for target HTTP proxies. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * This field only applies when the forwarding rule that references this target proxy has a * loadBalancingScheme set to INTERNAL_SELF_MANAGED. * * When this field is set to true, Envoy proxies set up inbound traffic interception and bind to * the IP address and port specified in the forwarding rule. This is generally useful when using * Traffic Director to configure Envoy as a gateway or middle proxy (in other words, not a sidecar * proxy). The Envoy proxy listens for inbound requests and handles requests when it receives * them. * * The default is false. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean proxyBind; /** * [Output Only] URL of the region where the regional Target HTTP Proxy resides. This field is not * applicable to global Target HTTP Proxies. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String region; /** * [Output Only] Server-defined URL for the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * [Output Only] Server-defined URL for this resource with the resource id. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLinkWithId; /** * URL to the UrlMap resource that defines the mapping from URL to the BackendService. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String urlMap; /** * [Output Only] Creation timestamp in RFC3339 text format. * @return value or {@code null} for none */ public java.lang.String getCreationTimestamp() { return creationTimestamp; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @param creationTimestamp creationTimestamp or {@code null} for none */ public TargetHttpProxy setCreationTimestamp(java.lang.String creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; } /** * An optional description of this resource. Provide this property when you create the resource. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * An optional description of this resource. Provide this property when you create the resource. * @param description description or {@code null} for none */ public TargetHttpProxy setDescription(java.lang.String description) { this.description = description; return this; } /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpProxy. An up-to- * date fingerprint must be provided in order to patch/update the TargetHttpProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpProxy. * @see #decodeFingerprint() * @return value or {@code null} for none */ public java.lang.String getFingerprint() { return fingerprint; } /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpProxy. An up-to- * date fingerprint must be provided in order to patch/update the TargetHttpProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpProxy. * @see #getFingerprint() * @return Base64 decoded value or {@code null} for none * * @since 1.14 */ public byte[] decodeFingerprint() { return com.google.api.client.util.Base64.decodeBase64(fingerprint); } /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpProxy. An up-to- * date fingerprint must be provided in order to patch/update the TargetHttpProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpProxy. * @see #encodeFingerprint() * @param fingerprint fingerprint or {@code null} for none */ public TargetHttpProxy setFingerprint(java.lang.String fingerprint) { this.fingerprint = fingerprint; return this; } /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpProxy. An up-to- * date fingerprint must be provided in order to patch/update the TargetHttpProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpProxy. * @see #setFingerprint() * * <p> * The value is encoded Base64 or {@code null} for none. * </p> * * @since 1.14 */ public TargetHttpProxy encodeFingerprint(byte[] fingerprint) { this.fingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(fingerprint); return this; } /** * URLs to networkservices.HttpFilter resources enabled for xDS clients using this configuration. * For example, https://networkservices.googleapis.com/v1alpha1/projects/project/locations/locatio * nhttpFilters/httpFilter Only filters that handle outbound connection and stream events may be * specified. These filters work in conjunction with a default set of HTTP filters that may * already be configured by Traffic Director. Traffic Director will determine the final location * of these filters within xDS configuration based on the name of the HTTP filter. If Traffic * Director positions multiple filters at the same location, those filters will be in the same * order as specified in this list. httpFilters only applies for loadbalancers with * loadBalancingScheme set to INTERNAL_SELF_MANAGED. See ForwardingRule for more details. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getHttpFilters() { return httpFilters; } /** * URLs to networkservices.HttpFilter resources enabled for xDS clients using this configuration. * For example, https://networkservices.googleapis.com/v1alpha1/projects/project/locations/locatio * nhttpFilters/httpFilter Only filters that handle outbound connection and stream events may be * specified. These filters work in conjunction with a default set of HTTP filters that may * already be configured by Traffic Director. Traffic Director will determine the final location * of these filters within xDS configuration based on the name of the HTTP filter. If Traffic * Director positions multiple filters at the same location, those filters will be in the same * order as specified in this list. httpFilters only applies for loadbalancers with * loadBalancingScheme set to INTERNAL_SELF_MANAGED. See ForwardingRule for more details. * @param httpFilters httpFilters or {@code null} for none */ public TargetHttpProxy setHttpFilters(java.util.List<java.lang.String> httpFilters) { this.httpFilters = httpFilters; return this; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @return value or {@code null} for none */ public java.math.BigInteger getId() { return id; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @param id id or {@code null} for none */ public TargetHttpProxy setId(java.math.BigInteger id) { this.id = id; return this; } /** * [Output Only] Type of resource. Always compute#targetHttpProxy for target HTTP proxies. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * [Output Only] Type of resource. Always compute#targetHttpProxy for target HTTP proxies. * @param kind kind or {@code null} for none */ public TargetHttpProxy setKind(java.lang.String kind) { this.kind = kind; return this; } /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @param name name or {@code null} for none */ public TargetHttpProxy setName(java.lang.String name) { this.name = name; return this; } /** * This field only applies when the forwarding rule that references this target proxy has a * loadBalancingScheme set to INTERNAL_SELF_MANAGED. * * When this field is set to true, Envoy proxies set up inbound traffic interception and bind to * the IP address and port specified in the forwarding rule. This is generally useful when using * Traffic Director to configure Envoy as a gateway or middle proxy (in other words, not a sidecar * proxy). The Envoy proxy listens for inbound requests and handles requests when it receives * them. * * The default is false. * @return value or {@code null} for none */ public java.lang.Boolean getProxyBind() { return proxyBind; } /** * This field only applies when the forwarding rule that references this target proxy has a * loadBalancingScheme set to INTERNAL_SELF_MANAGED. * * When this field is set to true, Envoy proxies set up inbound traffic interception and bind to * the IP address and port specified in the forwarding rule. This is generally useful when using * Traffic Director to configure Envoy as a gateway or middle proxy (in other words, not a sidecar * proxy). The Envoy proxy listens for inbound requests and handles requests when it receives * them. * * The default is false. * @param proxyBind proxyBind or {@code null} for none */ public TargetHttpProxy setProxyBind(java.lang.Boolean proxyBind) { this.proxyBind = proxyBind; return this; } /** * [Output Only] URL of the region where the regional Target HTTP Proxy resides. This field is not * applicable to global Target HTTP Proxies. * @return value or {@code null} for none */ public java.lang.String getRegion() { return region; } /** * [Output Only] URL of the region where the regional Target HTTP Proxy resides. This field is not * applicable to global Target HTTP Proxies. * @param region region or {@code null} for none */ public TargetHttpProxy setRegion(java.lang.String region) { this.region = region; return this; } /** * [Output Only] Server-defined URL for the resource. * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * [Output Only] Server-defined URL for the resource. * @param selfLink selfLink or {@code null} for none */ public TargetHttpProxy setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } /** * [Output Only] Server-defined URL for this resource with the resource id. * @return value or {@code null} for none */ public java.lang.String getSelfLinkWithId() { return selfLinkWithId; } /** * [Output Only] Server-defined URL for this resource with the resource id. * @param selfLinkWithId selfLinkWithId or {@code null} for none */ public TargetHttpProxy setSelfLinkWithId(java.lang.String selfLinkWithId) { this.selfLinkWithId = selfLinkWithId; return this; } /** * URL to the UrlMap resource that defines the mapping from URL to the BackendService. * @return value or {@code null} for none */ public java.lang.String getUrlMap() { return urlMap; } /** * URL to the UrlMap resource that defines the mapping from URL to the BackendService. * @param urlMap urlMap or {@code null} for none */ public TargetHttpProxy setUrlMap(java.lang.String urlMap) { this.urlMap = urlMap; return this; } @Override public TargetHttpProxy set(String fieldName, Object value) { return (TargetHttpProxy) super.set(fieldName, value); } @Override public TargetHttpProxy clone() { return (TargetHttpProxy) super.clone(); } }
/* * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spanner; import static com.google.common.base.Preconditions.checkNotNull; import com.google.cloud.ByteArray; import com.google.cloud.Date; import com.google.cloud.Timestamp; import com.google.common.base.Joiner; import com.google.protobuf.ListValue; import com.google.protobuf.NullValue; import com.google.protobuf.Value; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.annotation.Nullable; /** * Represents a row key in a Cloud Spanner table or index. A key is a tuple of values constrained to * the scalar Cloud Spanner types: currently these are {@code BOOLEAN}, {@code INT64}, {@code * FLOAT64}, {@code STRING}, {@code BYTES} and {@code TIMESTAMP}. Values may be null where the table * definition permits it. * * <p>{@code Key} is used to define the row, or endpoints of a range of rows, to retrieve in read * operations or to delete in a mutation. * * <p>{@code Key} instances are immutable. */ public final class Key implements Serializable { private static final Joiner joiner = Joiner.on(',').useForNull("<null>"); private static final com.google.protobuf.Value NULL_PROTO = Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build(); private static final long serialVersionUID = 4433485671785063530L; private final List<Object> parts; private Key(List<Object> parts) { this.parts = Collections.unmodifiableList(parts); } /** * Construct a key with parts specified by {@code values}. Each object in {@code values} must be * either {@code null} or one of the following supported types: * * <ul> * <li>{@code Boolean} for the {@code BOOL} Cloud Spanner type * <li>{@code Integer}, {@code Long} for {@code INT64} * <li>{@code Float}, {@code Double} for {@code FLOAT64} * <li>{@code String} for {@code STRING} * <li>{@link ByteArray} for {@code BYTES} * </ul> * * @throws IllegalArgumentException if any member of {@code values} is not a supported type */ public static Key of(Object... values) { // A literal Key.of(null) results in a null array being passed. Provide a clearer error. checkNotNull( values, "'values' cannot be null. For a literal key containing a single null value, " + "call Key.of((Object) null)."); Builder b = new Builder(false /* builder never leaves this scope */); for (Object value : values) { b.appendObject(value); } return b.build(); } /** Returns a new builder for constructing a key. */ public static Builder newBuilder() { return new Builder(true /* escaped */); } /** Builder for {@link Key} instances. */ public static class Builder { /** * Indicates whether this builder can escape the scope of this class. If so, we must assume that * the builder can be modified after {@code build()} is called and so we perform a defensive * copy. */ private final boolean canEscape; private final ArrayList<Object> buffer = new ArrayList<>(); private Builder(boolean canEscape) { this.canEscape = canEscape; } private Builder(Key key) { canEscape = true; buffer.addAll(key.parts); } /** Appends a {@code BOOL} value to the key. */ public Builder append(@Nullable Boolean value) { buffer.add(value); return this; } /** Appends an {@code INT64} value to the key. */ public Builder append(long value) { buffer.add(value); return this; } /** Appends an {@code INT64} value to the key. */ public Builder append(@Nullable Long value) { buffer.add(value); return this; } /** Appends a {@code FLOAT64} value to the key. */ public Builder append(double value) { buffer.add(value); return this; } /** Appends a {@code FLOAT64} value to the key. */ public Builder append(@Nullable Double value) { buffer.add(value); return this; } /** Appends a {@code STRING} value to the key. */ public Builder append(@Nullable String value) { buffer.add(value); return this; } /** Appends a {@code BYTES} value to the key. */ public Builder append(@Nullable ByteArray value) { buffer.add(value); return this; } /** Appends a {@code TIMESTAMP} value to the key */ public Builder append(@Nullable Timestamp value) { buffer.add(value); return this; } /** Appends a {@code DATE} value to the key */ public Builder append(@Nullable Date value) { buffer.add(value); return this; } /** * Appends an object following the same conversion rules as {@link Key#of(Object...)}. When * using the {@code Builder}, most code should prefer using the strongly typed {@code * append(...)} methods, for both performance and the benefit of compile-time checking. */ public Builder appendObject(@Nullable Object value) { if (value == null) { append((Boolean) null); } else if (value instanceof Boolean) { append((Boolean) value); } else if (value instanceof Integer) { append((Integer) value); } else if (value instanceof Long) { append((Long) value); } else if (value instanceof Float) { append((Float) value); } else if (value instanceof Double) { append((Double) value); } else if (value instanceof String) { append((String) value); } else if (value instanceof ByteArray) { append((ByteArray) value); } else if (value instanceof Timestamp) { append((Timestamp) value); } else if (value instanceof Date) { append((Date) value); } else { throw new IllegalArgumentException( "Unsupported type [" + value.getClass().getCanonicalName() + "] for argument: " + value); } return this; } public Key build() { if (canEscape) { // Copy buffer to preserve immutability contract. return new Key(new ArrayList<>(buffer)); } else { // Internal use of builder that does not escape; no need for defensive copy. return new Key(buffer); } } } /** Returns the number of parts in this key, including {@code null} values. */ public int size() { return parts.size(); } /** * Returns the parts in this key. Each part is represented by the corresponding Cloud Spanner * type's canonical Java type, as listed below. Note that other types supported by {@link * #of(Object...)} are converted to one of the canonical types. * * <ul> * <li>{@code BOOL} is represented by {@code Boolean} * <li>{@code INT64} is represented by {@code Long} * <li>{@code FLOAT64} is represented by {@code Double} * <li>{@code STRING} is represented by {@code String} * <li>{@code BYTES} is represented by {@link ByteArray} * <li>{@code TIMESTAMP} is represented by {@link Timestamp} * <li>{@code DATE} is represented by {@link Date} * </ul> * * @return an unmodifiable list containing the key parts */ public Iterable<Object> getParts() { return parts; } /** Returns a builder initialized with the value of this key. */ public Builder toBuilder() { return new Builder(this); } void toString(StringBuilder b) { // TODO(user): Consider limiting the length of string output. // Note: the format produced should match that used for keys in error messages yielded by the // backend. b.append('['); joiner.appendTo(b, parts); b.append(']'); } @Override public String toString() { StringBuilder b = new StringBuilder(); toString(b); return b.toString(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Key that = (Key) o; return parts.equals(that.parts); } @Override public int hashCode() { return parts.hashCode(); } ListValue toProto() { ListValue.Builder builder = ListValue.newBuilder(); for (Object part : parts) { if (part == null) { builder.addValues(NULL_PROTO); } else if (part instanceof Boolean) { builder.addValuesBuilder().setBoolValue((Boolean) part); } else if (part instanceof Long) { builder.addValuesBuilder().setStringValue(part.toString()); } else if (part instanceof Double) { builder.addValuesBuilder().setNumberValue((Double) part); } else if (part instanceof String) { builder.addValuesBuilder().setStringValue((String) part); } else if (part instanceof ByteArray) { builder.addValuesBuilder().setStringValue(((ByteArray) part).toBase64()); } else if (part instanceof Timestamp) { builder.addValuesBuilder().setStringValue(((Timestamp) part).toString()); } else { throw new AssertionError("Illegal key part: " + part.getClass()); } } return builder.build(); } }
/* * #%L * ===================================================== * _____ _ ____ _ _ _ _ * |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | | * | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| | * | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ | * |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_| * \____/ * * ===================================================== * * Hochschule Hannover * (University of Applied Sciences and Arts, Hannover) * Faculty IV, Dept. of Computer Science * Ricklinger Stadtweg 118, 30459 Hannover, Germany * * Email: trust@f4-i.fh-hannover.de * Website: http://trust.f4.hs-hannover.de/ * * This file is part of visitmeta-dataservice, version 0.5.0, * implemented by the Trust@HsH research group at the Hochschule Hannover. * %% * Copyright (C) 2012 - 2015 Trust@HsH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package de.hshannover.f4.trust.visitmeta.ifmap.testcases.multisubscription; import java.util.Date; import java.util.List; import java.util.SortedMap; import org.junit.Test; import de.hshannover.f4.trust.ifmapj.identifier.Identifiers; import de.hshannover.f4.trust.ifmapj.messages.PollResult; import de.hshannover.f4.trust.ifmapj.messages.SearchResult.Type; import de.hshannover.f4.trust.visitmeta.ifmap.AbstractMultiSubscriptionTestCase; import de.hshannover.f4.trust.visitmeta.interfaces.IdentifierGraph; public class ExtensiveSingleMultivalueTest extends AbstractMultiSubscriptionTestCase { private static final Date FIRST_TIMESTAMP = new Date(3333); private static final Date SECOND_TIMESTAMP = new Date(5555); private static final Date THIRD_TIMESTAMP = new Date(8888); private SortedMap<Long, Long> mFirstChangesMap; private SortedMap<Long, Long> mSecondChangesMap; private SortedMap<Long, Long> mThirdChangesMap; @Test public void twoPolls_ShouldReturnTheRightChangeMapSize() { executeFirstTwoPolls(); super.assertEqualsMapSize(mSecondChangesMap, mFirstChangesMap.size() + 1); } @Test public void twoPolls_ShouldReturnTheRightChangeMapChangeValues() { executeFirstTwoPolls(); super.assertEqualsMapValues(mFirstChangesMap, mSecondChangesMap); } @Test public void twoPolls_ShouldReturnTheRightSecondChangeMapChangeValue() { executeFirstTwoPolls(); super.assertEqualsNewValues(mFirstChangesMap, mSecondChangesMap, 1); } @Test public void twoPolls_ShouldReturnTheRightGraph() { executeFirstTwoPolls(); List<IdentifierGraph> currentGraph = super.mService.getCurrentGraph(); super.assertRightGraph(currentGraph, 1, 3, 5); } @Test public void thirdPollSingleValue_ShouldReturnTheRightChangeMapSize() { executeFirstTwoPolls(); executeThirdPollWithSingleValue(); super.assertEqualsMapSize(mThirdChangesMap, mSecondChangesMap.size() + 1); } @Test public void thirdPollSingleValue_ShouldReturnTheRightChangeMapChangeValues() { executeFirstTwoPolls(); executeThirdPollWithSingleValue(); super.assertEqualsMapValues(mSecondChangesMap, mThirdChangesMap); } @Test public void thirdPollSingleValue_ShouldReturnTheRightThirdChangeMapChangeValue() { executeFirstTwoPolls(); executeThirdPollWithSingleValue(); super.assertEqualsNewValues(mSecondChangesMap, mThirdChangesMap, 1); } @Test public void thirdPollSingleValue_ShouldReturnTheRightGraph() { executeFirstTwoPolls(); executeThirdPollWithSingleValue(); List<IdentifierGraph> currentGraph = super.mService.getCurrentGraph(); super.assertRightGraph(currentGraph, 1, 4, 6); } @Test public void thirdPollMultiValue_ShouldReturnTheRightChangeMapSize() { executeFirstTwoPolls(); executeThirdPollWithMultiValue(); super.assertEqualsMapSize(mThirdChangesMap, mSecondChangesMap.size() + 1); } @Test public void thirdPollMultiValue_ShouldReturnTheRightChangeMapChangeValues() { executeFirstTwoPolls(); executeThirdPollWithMultiValue(); super.assertEqualsMapValues(mSecondChangesMap, mThirdChangesMap); } @Test public void thirdPollMultiValue_ShouldReturnTheRightThirdChangeMapChangeValue() { executeFirstTwoPolls(); executeThirdPollWithMultiValue(); super.assertEqualsNewValues(mSecondChangesMap, mThirdChangesMap, 1); } @Test public void thirdPollMultiValue_ShouldReturnTheRightGraph() { executeFirstTwoPolls(); executeThirdPollWithMultiValue(); List<IdentifierGraph> currentGraph = super.mService.getCurrentGraph(); super.assertRightGraph(currentGraph, 1, 3, 6); } /** * Makes two polls with different PollResult. The PollResult are the same as if when we makes two subscriptions. */ private void executeFirstTwoPolls() { // mock first and second poll results PollResult firstPollResult = buildFirstPollResult(); PollResult secondPollResult = buildSecondPollResult(); // run first poll super.startPollTask(firstPollResult); // save current ChangesMap after the first poll mFirstChangesMap = super.mService.getChangesMap(); // run second poll super.startPollTask(secondPollResult); // save current ChangesMap after the second poll mSecondChangesMap = super.mService.getChangesMap(); } private void executeThirdPollWithSingleValue() { PollResult thirdPollResult = buildThirdSingleValuePollResult(); super.startPollTask(thirdPollResult); // save current ChangesMap after the third poll mThirdChangesMap = super.mService.getChangesMap(); } private void executeThirdPollWithMultiValue() { PollResult thirdPollResult = buildThirdMultiValuePollResult(); super.startPollTask(thirdPollResult); // save current ChangesMap after the third poll mThirdChangesMap = super.mService.getChangesMap(); } private PollResult buildFirstPollResult() { return PollResultMock( SearchResultMock(SUB1, Type.updateResult, ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), CreateCapability(CAP1, FIRST_TIMESTAMP)), ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), Identifiers.createMac(MAC1), CreateArMac(FIRST_TIMESTAMP)), ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), CreateCapability(CAP2, FIRST_TIMESTAMP), CreateCapability(CAP3, FIRST_TIMESTAMP)))); } private PollResult buildSecondPollResult() { return PollResultMock( SearchResultMock(SUB2, Type.updateResult, ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), CreateCapability(CAP1, FIRST_TIMESTAMP)), ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), Identifiers.createMac(MAC1), CreateArMac(FIRST_TIMESTAMP)), ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), CreateCapability(CAP2, FIRST_TIMESTAMP), CreateCapability(CAP3, FIRST_TIMESTAMP)), ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), Identifiers.createMac(MAC2), CreateArMac(SECOND_TIMESTAMP))), SearchResultMock(SUB1, Type.updateResult, ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), Identifiers.createMac(MAC2), CreateArMac(SECOND_TIMESTAMP)))); } private PollResult buildThirdMultiValuePollResult() { return PollResultMock( SearchResultMock(SUB1, Type.updateResult, ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), CreateCapability(CAP4, THIRD_TIMESTAMP))), SearchResultMock(SUB2, Type.updateResult, ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), CreateCapability(CAP4, THIRD_TIMESTAMP)))); } private PollResult buildThirdSingleValuePollResult() { return PollResultMock( SearchResultMock(SUB1, Type.updateResult, ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), Identifiers.createMac(MAC4), CreateArMac(THIRD_TIMESTAMP))), SearchResultMock(SUB2, Type.updateResult, ResultItemMock( Identifiers.createAr(ACCESS_REQUEST), Identifiers.createMac(MAC4), CreateArMac(THIRD_TIMESTAMP)))); } }
/* * Copyright 2017 LinkedIn Corp. Licensed under the BSD 2-Clause License (the "License"). See License in the project root for license information. * */ package com.linkedin.kafka.cruisecontrol.analyzer.goals; import com.linkedin.kafka.cruisecontrol.analyzer.ActionAcceptance; import com.linkedin.kafka.cruisecontrol.analyzer.AnalyzerUtils; import com.linkedin.kafka.cruisecontrol.analyzer.BalancingConstraint; import com.linkedin.kafka.cruisecontrol.analyzer.BalancingAction; import com.linkedin.kafka.cruisecontrol.analyzer.ActionType; import com.linkedin.kafka.cruisecontrol.common.Resource; import com.linkedin.kafka.cruisecontrol.model.Broker; import com.linkedin.kafka.cruisecontrol.model.ClusterModel; import com.linkedin.kafka.cruisecontrol.model.ClusterModelStats; import com.linkedin.kafka.cruisecontrol.model.RawAndDerivedResource; import com.linkedin.kafka.cruisecontrol.model.Replica; import com.linkedin.kafka.cruisecontrol.monitor.ModelCompletenessRequirements; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.SortedSet; import java.util.stream.Collectors; import org.apache.commons.math3.stat.descriptive.moment.Mean; import org.apache.commons.math3.stat.descriptive.moment.Variance; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.linkedin.kafka.cruisecontrol.analyzer.ActionAcceptance.ACCEPT; import static com.linkedin.kafka.cruisecontrol.analyzer.ActionAcceptance.REPLICA_REJECT; /** * Soft goal to distribute leader bytes evenly. */ public class LeaderBytesInDistributionGoal extends AbstractGoal { private static final Logger LOG = LoggerFactory.getLogger(LeaderBytesInDistributionGoal.class); private double _meanLeaderBytesIn; private Set<Integer> _overLimitBrokerIds; public LeaderBytesInDistributionGoal() { } /** Testing constructor */ LeaderBytesInDistributionGoal(BalancingConstraint balancingConstraint) { this._balancingConstraint = balancingConstraint; } /** * @deprecated Please use {@link this#actionAcceptance(BalancingAction, ClusterModel)} instead. */ @Override public boolean isActionAcceptable(BalancingAction action, ClusterModel clusterModel) { return actionAcceptance(action, clusterModel) == ACCEPT; } /** * An action is acceptable if it does not move the leader bytes in above the threshold for leader bytes in. * * @param action Action to be checked for acceptance. * @param clusterModel State of the cluster before application of the action. * @return {@link ActionAcceptance#ACCEPT} if the action is acceptable by this goal, * {@link ActionAcceptance#REPLICA_REJECT} otherwise. */ @Override public ActionAcceptance actionAcceptance(BalancingAction action, ClusterModel clusterModel) { Replica sourceReplica = clusterModel.broker(action.sourceBrokerId()).replica(action.topicPartition()); Broker destinationBroker = clusterModel.broker(action.destinationBrokerId()); initMeanLeaderBytesIn(clusterModel); if (!sourceReplica.isLeader()) { switch (action.balancingAction()) { case REPLICA_SWAP: if (!destinationBroker.replica(action.destinationTopicPartition()).isLeader()) { // No leadership bytes are being swapped between source and destination. return ACCEPT; } break; case REPLICA_MOVEMENT: // No leadership bytes are being moved to destination. return ACCEPT; case LEADERSHIP_MOVEMENT: throw new IllegalStateException("Attempt to move leadership from the follower."); default: throw new IllegalArgumentException("Unsupported balancing action " + action.balancingAction() + " is provided."); } } double sourceReplicaUtilization = sourceReplica.load().expectedUtilizationFor(Resource.NW_IN); double newDestLeaderBytesIn; switch (action.balancingAction()) { case REPLICA_SWAP: double destinationReplicaUtilization = destinationBroker.replica(action.destinationTopicPartition()).load() .expectedUtilizationFor(Resource.NW_IN); newDestLeaderBytesIn = destinationBroker.leadershipLoadForNwResources().expectedUtilizationFor(Resource.NW_IN) + sourceReplicaUtilization - destinationReplicaUtilization; Broker sourceBroker = clusterModel.broker(action.sourceBrokerId()); double newSourceLeaderBytesIn = sourceBroker.leadershipLoadForNwResources().expectedUtilizationFor(Resource.NW_IN) + destinationReplicaUtilization - sourceReplicaUtilization; if (newSourceLeaderBytesIn > balanceThreshold(clusterModel, sourceBroker.id())) { return REPLICA_REJECT; } break; case REPLICA_MOVEMENT: case LEADERSHIP_MOVEMENT: newDestLeaderBytesIn = destinationBroker.leadershipLoadForNwResources().expectedUtilizationFor(Resource.NW_IN) + sourceReplicaUtilization; break; default: throw new IllegalArgumentException("Unsupported balancing action " + action.balancingAction() + " is provided."); } return !(newDestLeaderBytesIn > balanceThreshold(clusterModel, destinationBroker.id())) ? ACCEPT : REPLICA_REJECT; } @Override public ClusterModelStatsComparator clusterModelStatsComparator() { return new LeaderBytesInDistributionGoalStatsComparator(); } @Override public ModelCompletenessRequirements clusterModelCompletenessRequirements() { return new ModelCompletenessRequirements(_numWindows, _minMonitoredPartitionPercentage, false); } @Override public String name() { return LeaderBytesInDistributionGoal.class.getSimpleName(); } @Override protected SortedSet<Broker> brokersToBalance(ClusterModel clusterModel) { // Brokers having inbound network traffic over the balance threshold for inbound traffic are eligible for balancing. SortedSet<Broker> brokersToBalance = clusterModel.brokers(); for (Iterator<Broker> iterator = brokersToBalance.iterator(); iterator.hasNext(); ) { Broker broker = iterator.next(); double brokerUtilizationForNwIn = broker.leadershipLoadForNwResources().expectedUtilizationFor(Resource.NW_IN); if (brokerUtilizationForNwIn <= balanceThreshold(clusterModel, broker.id())) { iterator.remove(); } } return brokersToBalance; } @Override protected boolean selfSatisfied(ClusterModel clusterModel, BalancingAction action) { if (action.balancingAction() != ActionType.LEADERSHIP_MOVEMENT) { throw new IllegalStateException("Found balancing action " + action.balancingAction() + " but expected leadership movement."); } return actionAcceptance(action, clusterModel) == ACCEPT; } @Override protected void initGoalState(ClusterModel clusterModel, Set<String> excludedTopics) { // While proposals exclude the excludedTopics, the leader bytes in still considers replicas of the excludedTopics. _meanLeaderBytesIn = 0.0; _overLimitBrokerIds = new HashSet<>(); } @Override protected void updateGoalState(ClusterModel clusterModel, Set<String> excludedTopics) { // While proposals exclude the excludedTopics, the leader bytes in still considers replicas of the excludedTopics. if (!_overLimitBrokerIds.isEmpty()) { LOG.warn("There were still {} brokers over the limit.", _overLimitBrokerIds.size()); _succeeded = false; } finish(); } @Override protected void rebalanceForBroker(Broker broker, ClusterModel clusterModel, Set<Goal> optimizedGoals, Set<String> excludedTopics) { double balanceThreshold = balanceThreshold(clusterModel, broker.id()); if (broker.leadershipLoadForNwResources().expectedUtilizationFor(Resource.NW_IN) < balanceThreshold) { return; } List<Replica> leaderReplicasSortedByBytesIn = broker.replicas().stream() .filter(Replica::isLeader) .filter(r -> !shouldExclude(r, excludedTopics)) .sorted((a, b) -> Double.compare(b.load().expectedUtilizationFor(Resource.NW_IN), a.load().expectedUtilizationFor(Resource.NW_IN))) .collect(Collectors.toList()); boolean overThreshold = true; Iterator<Replica> leaderReplicaIt = leaderReplicasSortedByBytesIn.iterator(); while (overThreshold && leaderReplicaIt.hasNext()) { Replica leaderReplica = leaderReplicaIt.next(); List<Replica> followers = clusterModel.partition(leaderReplica.topicPartition()).followers(); List<Broker> eligibleBrokers = followers.stream().map(Replica::broker) .sorted(Comparator.comparingDouble(a -> a.leadershipLoadForNwResources().expectedUtilizationFor(Resource.NW_IN))) .collect(Collectors.toList()); maybeApplyBalancingAction(clusterModel, leaderReplica, eligibleBrokers, ActionType.LEADERSHIP_MOVEMENT, optimizedGoals); overThreshold = broker.leadershipLoadForNwResources().expectedUtilizationFor(Resource.NW_IN) > balanceThreshold; } if (overThreshold) { _overLimitBrokerIds.add(broker.id()); } } private void initMeanLeaderBytesIn(ClusterModel clusterModel) { if (_meanLeaderBytesIn == 0.0) { _meanLeaderBytesIn = meanLeaderResourceUtilization(clusterModel.brokers(), Resource.NW_IN); } } private static double meanLeaderResourceUtilization(Collection<Broker> brokers, Resource resource) { double accumulator = 0.0; int brokerCount = 0; for (Broker broker : brokers) { if (!broker.isAlive()) { continue; } accumulator += broker.leadershipLoadForNwResources().expectedUtilizationFor(resource); brokerCount++; } return accumulator / brokerCount; } /** * In this context of this goal the balance threshold can not be measured against an absolute number since leader bytes * in is constrained by network capacity but also depends on follower bytes in. We also reuse the NW_IN low utilization * threshold to avoid unnecessary rebalance. * @param clusterModel non-null * @param brokerId the brokerId * @return a non-negative value */ private double balanceThreshold(ClusterModel clusterModel, int brokerId) { initMeanLeaderBytesIn(clusterModel); double lowUtilizationThreshold = _balancingConstraint.lowUtilizationThreshold(Resource.NW_IN) * clusterModel.broker(brokerId).capacityFor(Resource.NW_IN); // We only balance leader bytes in rate of the brokers whose leader bytes in rate is higher than the minimum // balancing threshold. return Math.max(_meanLeaderBytesIn * _balancingConstraint.resourceBalancePercentage(Resource.NW_IN), lowUtilizationThreshold); } private class LeaderBytesInDistributionGoalStatsComparator implements ClusterModelStatsComparator { private String _reasonForLastNegativeResult; @Override public int compare(ClusterModelStats stats1, ClusterModelStats stats2) { double[] stat1 = stats1.utilizationMatrix()[RawAndDerivedResource.LEADER_NW_IN.ordinal()]; double meanPreLeaderBytesIn = new Mean().evaluate(stat1, 0, stat1.length); double threshold = meanPreLeaderBytesIn * _balancingConstraint.resourceBalancePercentage(Resource.NW_IN); if (Arrays.stream(stat1).noneMatch(v -> v > threshold)) { return 1; } double[] stat2 = stats2.utilizationMatrix()[RawAndDerivedResource.LEADER_NW_IN.ordinal()]; double variance1 = new Variance().evaluate(stat1); double variance2 = new Variance().evaluate(stat2); int result = AnalyzerUtils.compare(Math.sqrt(variance2), Math.sqrt(variance1), Resource.NW_IN); if (result < 0) { _reasonForLastNegativeResult = String.format("Violated leader bytes in balancing. preVariance: %.3f " + "postVariance: %.3f.", variance2, variance1); } return result; } @Override public String explainLastComparison() { return _reasonForLastNegativeResult; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.operators.sort; import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.api.common.typeutils.TypeSerializerFactory; import org.apache.flink.api.common.typeutils.base.IntComparator; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.runtime.io.disk.iomanager.IOManager; import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.memory.MemoryManager; import org.apache.flink.runtime.operators.testutils.DummyInvokable; import org.apache.flink.runtime.operators.testutils.RandomIntPairGenerator; import org.apache.flink.runtime.operators.testutils.TestData; import org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator.KeyMode; import org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator.ValueMode; import org.apache.flink.runtime.operators.testutils.types.IntPair; import org.apache.flink.runtime.operators.testutils.types.IntPairSerializer; import org.apache.flink.util.MutableObjectIterator; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ExternalSortITCase { private static final Logger LOG = LoggerFactory.getLogger(ExternalSortITCase.class); private static final long SEED = 649180756312423613L; private static final int KEY_MAX = Integer.MAX_VALUE; private static final int VALUE_LENGTH = 114; private static final String VAL = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; private static final int NUM_PAIRS = 200000; private static final int MEMORY_SIZE = 1024 * 1024 * 78; private final AbstractInvokable parentTask = new DummyInvokable(); private IOManager ioManager; private MemoryManager memoryManager; private TypeSerializerFactory<Tuple2<Integer, String>> pactRecordSerializer; private TypeComparator<Tuple2<Integer, String>> pactRecordComparator; private boolean testSuccess; // -------------------------------------------------------------------------------------------- @SuppressWarnings("unchecked") @Before public void beforeTest() { this.memoryManager = new MemoryManager(MEMORY_SIZE, 1); this.ioManager = new IOManagerAsync(); this.pactRecordSerializer = TestData.getIntStringTupleSerializerFactory(); this.pactRecordComparator = TestData.getIntStringTupleComparator(); } @After public void afterTest() { this.ioManager.shutdown(); if (!this.ioManager.isProperlyShutDown()) { Assert.fail("I/O Manager was not properly shut down."); } if (this.memoryManager != null && testSuccess) { Assert.assertTrue("Memory leak: not all segments have been returned to the memory manager.", this.memoryManager.verifyEmpty()); this.memoryManager.shutdown(); this.memoryManager = null; } } // -------------------------------------------------------------------------------------------- @Test public void testInMemorySort() { try { // comparator final TypeComparator<Integer> keyComparator = new IntComparator(true); final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.CONSTANT, VAL); final MutableObjectIterator<Tuple2<Integer, String>> source = new TestData.TupleGeneratorIterator(generator, NUM_PAIRS); // merge iterator LOG.debug("Initializing sortmerger..."); Sorter<Tuple2<Integer, String>> merger = new UnilateralSortMerger<>(this.memoryManager, this.ioManager, source, this.parentTask, this.pactRecordSerializer, this.pactRecordComparator, (double)64/78, 2, 0.9f, true /*use large record handler*/, true); // emit data LOG.debug("Reading and sorting data..."); // check order MutableObjectIterator<Tuple2<Integer, String>> iterator = merger.getIterator(); LOG.debug("Checking results..."); int pairsEmitted = 1; Tuple2<Integer, String> rec1 = new Tuple2<>(); Tuple2<Integer, String> rec2 = new Tuple2<>(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); while ((rec2 = iterator.next(rec2)) != null) { pairsEmitted++; Assert.assertTrue(keyComparator.compare(rec1.f0, rec2.f0) <= 0); Tuple2<Integer, String> tmp = rec1; rec1 = rec2; rec2 = tmp; } Assert.assertTrue(NUM_PAIRS == pairsEmitted); merger.close(); testSuccess = true; } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testInMemorySortUsing10Buffers() { try { // comparator final TypeComparator<Integer> keyComparator = new IntComparator(true); final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.CONSTANT, VAL); final MutableObjectIterator<Tuple2<Integer, String>> source = new TestData.TupleGeneratorIterator(generator, NUM_PAIRS); // merge iterator LOG.debug("Initializing sortmerger..."); Sorter<Tuple2<Integer, String>> merger = new UnilateralSortMerger<>(this.memoryManager, this.ioManager, source, this.parentTask, this.pactRecordSerializer, this.pactRecordComparator, (double)64/78, 10, 2, 0.9f, true /*use large record handler*/, false); // emit data LOG.debug("Reading and sorting data..."); // check order MutableObjectIterator<Tuple2<Integer, String>> iterator = merger.getIterator(); LOG.debug("Checking results..."); int pairsEmitted = 1; Tuple2<Integer, String> rec1 = new Tuple2<>(); Tuple2<Integer, String> rec2 = new Tuple2<>(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); while ((rec2 = iterator.next(rec2)) != null) { pairsEmitted++; Assert.assertTrue(keyComparator.compare(rec1.f0, rec2.f0) <= 0); Tuple2<Integer, String> tmp = rec1; rec1 = rec2; rec2 = tmp; } Assert.assertTrue(NUM_PAIRS == pairsEmitted); merger.close(); testSuccess = true; } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testSpillingSort() { try { // comparator final TypeComparator<Integer> keyComparator = new IntComparator(true); final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.CONSTANT, VAL); final MutableObjectIterator<Tuple2<Integer, String>> source = new TestData.TupleGeneratorIterator(generator, NUM_PAIRS); // merge iterator LOG.debug("Initializing sortmerger..."); Sorter<Tuple2<Integer, String>> merger = new UnilateralSortMerger<>(this.memoryManager, this.ioManager, source, this.parentTask, this.pactRecordSerializer, this.pactRecordComparator, (double)16/78, 64, 0.7f, true /*use large record handler*/, true); // emit data LOG.debug("Reading and sorting data..."); // check order MutableObjectIterator<Tuple2<Integer, String>> iterator = merger.getIterator(); LOG.debug("Checking results..."); int pairsEmitted = 1; Tuple2<Integer, String> rec1 = new Tuple2<>(); Tuple2<Integer, String> rec2 = new Tuple2<>(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); while ((rec2 = iterator.next(rec2)) != null) { pairsEmitted++; Assert.assertTrue(keyComparator.compare(rec1.f0, rec2.f0) <= 0); Tuple2<Integer, String> tmp = rec1; rec1 = rec2; rec2 = tmp; } Assert.assertTrue(NUM_PAIRS == pairsEmitted); merger.close(); testSuccess = true; } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testSpillingSortWithIntermediateMerge() { try { // amount of pairs final int PAIRS = 10000000; // comparator final TypeComparator<Integer> keyComparator = new IntComparator(true); final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); final MutableObjectIterator<Tuple2<Integer, String>> source = new TestData.TupleGeneratorIterator(generator, PAIRS); // merge iterator LOG.debug("Initializing sortmerger..."); Sorter<Tuple2<Integer, String>> merger = new UnilateralSortMerger<>(this.memoryManager, this.ioManager, source, this.parentTask, this.pactRecordSerializer, this.pactRecordComparator, (double)64/78, 16, 0.7f, true /*use large record handler*/, false); // emit data LOG.debug("Emitting data..."); // check order MutableObjectIterator<Tuple2<Integer, String>> iterator = merger.getIterator(); LOG.debug("Checking results..."); int pairsRead = 1; int nextStep = PAIRS / 20; Tuple2<Integer, String> rec1 = new Tuple2<>(); Tuple2<Integer, String> rec2 = new Tuple2<>(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); while ((rec2 = iterator.next(rec2)) != null) { pairsRead++; Assert.assertTrue(keyComparator.compare(rec1.f0, rec2.f0) <= 0); Tuple2<Integer, String> tmp = rec1; rec1 = rec2; rec2 = tmp; // log if (pairsRead == nextStep) { nextStep += PAIRS / 20; } } Assert.assertEquals("Not all pairs were read back in.", PAIRS, pairsRead); merger.close(); testSuccess = true; } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } @Test public void testSpillingSortWithIntermediateMergeIntPair() { try { // amount of pairs final int PAIRS = 50000000; // comparator final RandomIntPairGenerator generator = new RandomIntPairGenerator(12345678, PAIRS); final TypeSerializerFactory<IntPair> serializerFactory = new IntPairSerializer.IntPairSerializerFactory(); final TypeComparator<IntPair> comparator = new TestData.IntPairComparator(); // merge iterator LOG.debug("Initializing sortmerger..."); Sorter<IntPair> merger = new UnilateralSortMerger<IntPair>(this.memoryManager, this.ioManager, generator, this.parentTask, serializerFactory, comparator, (double)64/78, 4, 0.7f, true /*use large record handler*/, true); // emit data LOG.debug("Emitting data..."); // check order MutableObjectIterator<IntPair> iterator = merger.getIterator(); LOG.debug("Checking results..."); int pairsRead = 1; int nextStep = PAIRS / 20; IntPair rec1 = new IntPair(); IntPair rec2 = new IntPair(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); while ((rec2 = iterator.next(rec2)) != null) { final int k1 = rec1.getKey(); final int k2 = rec2.getKey(); pairsRead++; Assert.assertTrue(k1 - k2 <= 0); IntPair tmp = rec1; rec1 = rec2; rec2 = tmp; // log if (pairsRead == nextStep) { nextStep += PAIRS / 20; } } Assert.assertEquals("Not all pairs were read back in.", PAIRS, pairsRead); merger.close(); testSuccess = true; } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } }
/* * Copyright 2007 Yusuke Yamamoto * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package twitter4j; import java.util.HashMap; import java.util.Map; /** * @author Yusuke Yamamoto - yusuke at mac.com * @since Twitter4J 2.2.3 */ public class MediaEntityJSONImpl extends EntityIndex implements MediaEntity { private static final long serialVersionUID = 3609683338035442290L; protected long id; protected String url; protected String mediaURL; protected String mediaURLHttps; protected String expandedURL; protected String displayURL; protected Map<Integer, MediaEntity.Size> sizes; protected String type; MediaEntityJSONImpl(JSONObject json) throws TwitterException { try { JSONArray indicesArray = json.getJSONArray("indices"); setStart(indicesArray.getInt(0)); setEnd(indicesArray.getInt(1)); this.id = ParseUtil.getLong("id", json); this.url = json.getString("url"); this.expandedURL = json.getString("expanded_url"); this.mediaURL = json.getString("media_url"); this.mediaURLHttps = json.getString("media_url_https"); this.displayURL = json.getString("display_url"); JSONObject sizes = json.getJSONObject("sizes"); this.sizes = new HashMap<Integer, MediaEntity.Size>(4); // thumbworkarounding API side issue addMediaEntitySizeIfNotNull(this.sizes, sizes, MediaEntity.Size.LARGE, "large"); addMediaEntitySizeIfNotNull(this.sizes, sizes, MediaEntity.Size.MEDIUM, "medium"); addMediaEntitySizeIfNotNull(this.sizes, sizes, MediaEntity.Size.SMALL, "small"); addMediaEntitySizeIfNotNull(this.sizes, sizes, MediaEntity.Size.THUMB, "thumb"); if (!json.isNull("type")) { this.type = json.getString("type"); } } catch (JSONException jsone) { throw new TwitterException(jsone); } } private void addMediaEntitySizeIfNotNull(Map<Integer, MediaEntity.Size> sizes, JSONObject sizesJSON, Integer size, String key) throws JSONException { if (!sizesJSON.isNull(key)) { sizes.put(size, new Size(sizesJSON.getJSONObject(key))); } } /* For serialization purposes only. */ /* package */ MediaEntityJSONImpl() { } @Override public long getId() { return id; } @Override public String getMediaURL() { return mediaURL; } @Override public String getMediaURLHttps() { return mediaURLHttps; } @Override public String getText() { return url; } @Override public String getURL() { return url; } @Override public String getDisplayURL() { return displayURL; } @Override public String getExpandedURL() { return expandedURL; } @Override public Map<Integer, MediaEntity.Size> getSizes() { return sizes; } @Override public String getType() { return type; } @Override public int getStart() { return super.getStart(); } @Override public int getEnd() { return super.getEnd(); } static class Size implements MediaEntity.Size { private static final long serialVersionUID = -2515842281909325169L; int width; int height; int resize; /* For serialization purposes only. */ /* package */ Size() { } Size(JSONObject json) throws JSONException { width = json.getInt("w"); height = json.getInt("h"); resize = "fit".equals(json.getString("resize")) ? MediaEntity.Size.FIT : MediaEntity.Size.CROP; } @Override public int getWidth() { return width; } @Override public int getHeight() { return height; } @Override public int getResize() { return resize; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Size)) return false; Size size = (Size) o; if (height != size.height) return false; if (resize != size.resize) return false; if (width != size.width) return false; return true; } @Override public int hashCode() { int result = width; result = 31 * result + height; result = 31 * result + resize; return result; } @Override public String toString() { return "Size{" + "width=" + width + ", height=" + height + ", resize=" + resize + '}'; } } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof MediaEntityJSONImpl)) return false; MediaEntityJSONImpl that = (MediaEntityJSONImpl) o; if (id != that.id) return false; return true; } @Override public int hashCode() { return (int) (id ^ (id >>> 32)); } @Override public String toString() { return "MediaEntityJSONImpl{" + "id=" + id + ", url=" + url + ", mediaURL=" + mediaURL + ", mediaURLHttps=" + mediaURLHttps + ", expandedURL=" + expandedURL + ", displayURL='" + displayURL + '\'' + ", sizes=" + sizes + ", type=" + type + '}'; } }
/* * Copyright (c) 2014-2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts; import static org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EditPartConstants.DEFAULT_PROPERTY_VALUE_TEXT; import static org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EditPartConstants.RMSEQEUENCE_MEDIATOR_ICON_PATH; import org.apache.commons.lang.StringUtils; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.Label; import org.eclipse.draw2d.PositionConstants; import org.eclipse.draw2d.Shape; import org.eclipse.draw2d.ToolbarLayout; import org.eclipse.gef.EditPart; import org.eclipse.gef.EditPolicy; import org.eclipse.gef.Request; import org.eclipse.gef.commands.Command; import org.eclipse.gef.editpolicies.LayoutEditPolicy; import org.eclipse.gef.editpolicies.NonResizableEditPolicy; import org.eclipse.gef.requests.CreateRequest; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.BorderItemSelectionEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CreationEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.DragDropEditPolicy; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles; import org.eclipse.gmf.runtime.diagram.ui.figures.BorderItemLocator; import org.eclipse.gmf.runtime.draw2d.ui.figures.ConstrainedToolbarLayout; import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel; import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure; import org.eclipse.gmf.runtime.notation.View; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShapeWithLabel; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedBorderItemLocator; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedSizedAbstractMediator; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.ShowPropertyViewEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.editpolicy.FeedbackIndicateDragDropEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.RMSequenceMediatorCanonicalEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.RMSequenceMediatorItemSemanticEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry; /** * @generated NOT */ public class RMSequenceMediatorEditPart extends FixedSizedAbstractMediator { /** * @generated */ public static final int VISUAL_ID = 3522; /** * @generated */ protected IFigure contentPane; /** * @generated */ public RMSequenceMediatorEditPart(View view) { super(view); } /** * @generated NOT */ protected void createDefaultEditPolicies() { installEditPolicy(EditPolicyRoles.CREATION_ROLE, new CreationEditPolicy()); super.createDefaultEditPolicies(); installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE, new RMSequenceMediatorItemSemanticEditPolicy()); installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE, new DragDropEditPolicy()); installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE, new FeedbackIndicateDragDropEditPolicy()); installEditPolicy(EditPolicyRoles.CANONICAL_ROLE, new RMSequenceMediatorCanonicalEditPolicy()); installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy()); // For handle Double click Event. installEditPolicy(EditPolicyRoles.OPEN_ROLE, new ShowPropertyViewEditPolicy()); // XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable editpolicies // removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE); } /** * @generated */ protected LayoutEditPolicy createLayoutEditPolicy() { org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() { protected EditPolicy createChildEditPolicy(EditPart child) { View childView = (View) child.getModel(); switch (EsbVisualIDRegistry.getVisualID(childView)) { case RMSequenceMediatorInputConnectorEditPart.VISUAL_ID: case RMSequenceMediatorOutputConnectorEditPart.VISUAL_ID: return new BorderItemSelectionEditPolicy(); } EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE); if (result == null) { result = new NonResizableEditPolicy(); } return result; } protected Command getMoveChildrenCommand(Request request) { return null; } protected Command getCreateCommand(CreateRequest request) { return null; } }; return lep; } /** * @generated NOT */ protected IFigure createNodeShape() { return primaryShape = new RMSequenceMediatorFigure() { public void setBounds(org.eclipse.draw2d.geometry.Rectangle rect) { super.setBounds(rect); if (this.getBounds().getLocation().x != 0 && this.getBounds().getLocation().y != 0) { connectToMostSuitableElement(); reAllocate(rect); } }; }; } /** * @generated */ public RMSequenceMediatorFigure getPrimaryShape() { return (RMSequenceMediatorFigure) primaryShape; } protected boolean addFixedChild(EditPart childEditPart) { if (childEditPart instanceof RMSequenceMediatorDescriptionEditPart) { ((RMSequenceMediatorDescriptionEditPart) childEditPart).setLabel(getPrimaryShape() .getRMSequenceMediatorDescriptionLabel()); return true; } if (childEditPart instanceof RMSequenceMediatorInputConnectorEditPart) { IFigure borderItemFigure = ((RMSequenceMediatorInputConnectorEditPart) childEditPart).getFigure(); BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.WEST, 0.5); getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator); return true; } else if (childEditPart instanceof RMSequenceMediatorOutputConnectorEditPart) { IFigure borderItemFigure = ((RMSequenceMediatorOutputConnectorEditPart) childEditPart).getFigure(); BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(), borderItemFigure, PositionConstants.EAST, 0.5); getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator); return true; } /* if (childEditPart instanceof PropertyMediatorPropertyNameEditPart) { ((PropertyMediatorPropertyNameEditPart) childEditPart) .setLabel(getPrimaryShape() .getFigurePropertyMediatorPropertyValueLabel()); return true; }*/ return false; } protected boolean removeFixedChild(EditPart childEditPart) { if (childEditPart instanceof RMSequenceMediatorDescriptionEditPart) { return true; } return false; } protected void addChildVisual(EditPart childEditPart, int index) { if (addFixedChild(childEditPart)) { return; } super.addChildVisual(childEditPart, -1); } protected void removeChildVisual(EditPart childEditPart) { if (removeFixedChild(childEditPart)) { return; } super.removeChildVisual(childEditPart); } /** * Creates figure for this edit part. * * Body of this method does not depend on settings in generation model * so you may safely remove <i>generated</i> tag and modify it. * * @generated NOT */ protected NodeFigure createMainFigure() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new ToolbarLayout(true)); IFigure shape = createNodeShape(); figure.add(shape); contentPane = setupContentPane(shape); return figure; } /** * Default implementation treats passed figure as content pane. * Respects layout one may have set for generated figure. * @param nodeShape instance of generated figure class * @generated */ protected IFigure setupContentPane(IFigure nodeShape) { if (nodeShape.getLayoutManager() == null) { ConstrainedToolbarLayout layout = new ConstrainedToolbarLayout(); layout.setSpacing(5); nodeShape.setLayoutManager(layout); } return nodeShape; // use nodeShape itself as contentPane } /** * @generated */ public IFigure getContentPane() { if (contentPane != null) { return contentPane; } return super.getContentPane(); } /** * @generated */ protected void setForegroundColor(Color color) { if (primaryShape != null) { primaryShape.setForegroundColor(color); } } /** * @generated */ protected void setBackgroundColor(Color color) { if (primaryShape != null) { primaryShape.setBackgroundColor(color); } } /** * @generated */ protected void setLineWidth(int width) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineWidth(width); } } /** * @generated */ protected void setLineType(int style) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineStyle(style); } } /** * @generated */ public class RMSequenceMediatorFigure extends EsbGraphicalShapeWithLabel { /** * @generated */ private WrappingLabel fFigureRMSequenceMediatorPropertyValue; private WrappingLabel rmSequenceMediatorDescriptionLabel; /** * @generated */ public RMSequenceMediatorFigure() { this.setBackgroundColor(THIS_BACK); createContents(); } /** * @generated NOT */ private void createContents() { fFigureRMSequenceMediatorPropertyValue = new WrappingLabel(); fFigureRMSequenceMediatorPropertyValue .setText(DEFAULT_PROPERTY_VALUE_TEXT); fFigureRMSequenceMediatorPropertyValue.setAlignment(SWT.CENTER); rmSequenceMediatorDescriptionLabel = getPropertyNameLabel(); } /** * @generated */ public WrappingLabel getFigureRMSequenceMediatorPropertyValue() { return fFigureRMSequenceMediatorPropertyValue; } public WrappingLabel getRMSequenceMediatorDescriptionLabel() { return rmSequenceMediatorDescriptionLabel; } public String getIconPath() { return RMSEQEUENCE_MEDIATOR_ICON_PATH; } public String getNodeName() { return Messages.RMSequenceMediatorEditPart_NodeName; } public IFigure getToolTip() { if (StringUtils.isEmpty(toolTipMessage)) { return new Label( Messages.RMSequenceMediatorEditPart_ToolTipMessage); } else { return new Label(toolTipMessage); } } } /** * @generated */ static final Color THIS_BACK = new Color(null, 230, 230, 230); }
package com.Da_Technomancer.crossroads.tileentities.fluid; import javax.annotation.Nullable; import com.Da_Technomancer.crossroads.API.Capabilities; import com.Da_Technomancer.crossroads.API.EnergyConverters; import com.Da_Technomancer.crossroads.API.rotary.IAxisHandler; import com.Da_Technomancer.crossroads.API.rotary.IAxleHandler; import com.Da_Technomancer.crossroads.fluids.BlockDistilledWater; import com.Da_Technomancer.crossroads.fluids.BlockSteam; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.EnumFacing; import net.minecraft.util.ITickable; import net.minecraftforge.common.capabilities.Capability; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fluids.capability.CapabilityFluidHandler; import net.minecraftforge.fluids.capability.FluidTankProperties; import net.minecraftforge.fluids.capability.IFluidHandler; import net.minecraftforge.fluids.capability.IFluidTankProperties; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; public class SteamTurbineTileEntity extends TileEntity implements ITickable{ private FluidStack steamContent; private FluidStack waterContent; private static final int CAPACITY = 10_000; public static final int LIMIT = 5; private final double[] motionData = new double[4]; @Override public void update(){ if(world.isRemote){ IAxleHandler gear = null; TileEntity te = world.getTileEntity(pos.offset(EnumFacing.UP)); if(te != null && te.hasCapability(Capabilities.AXLE_HANDLER_CAPABILITY, EnumFacing.DOWN)){ gear = te.getCapability(Capabilities.AXLE_HANDLER_CAPABILITY, EnumFacing.DOWN); } completion = (float) (gear == null ? 0 : gear.getAngle()); return; } if(steamContent != null){ runMachine(); } } private float completion; /** * This uses the angle of the attached gear instead of calculating its own for a few reasons. It will always be attached when it should spin, and should always have the same angle as the attached gear (no point calculating). */ @SideOnly(Side.CLIENT) public float getCompletion(){ return completion; } private void runMachine(){ int limit = steamContent.amount / 100; limit = Math.min(limit, (CAPACITY - (waterContent == null ? 0 : waterContent.amount)) / 100); limit = Math.min(limit, LIMIT); if(limit != 0){ axleHandler.addEnergy(((double) limit) * .1D * EnergyConverters.DEG_PER_BUCKET_STEAM / EnergyConverters.DEG_PER_JOULE, true, true); steamContent.amount -= limit * 100; if(steamContent.amount <= 0){ steamContent = null; } waterContent = new FluidStack(BlockDistilledWater.getDistilledWater(), (waterContent == null ? 0 : waterContent.amount) + (100 * limit)); } } @Override public void readFromNBT(NBTTagCompound nbt){ super.readFromNBT(nbt); steamContent = FluidStack.loadFluidStackFromNBT(nbt); waterContent = FluidStack.loadFluidStackFromNBT((NBTTagCompound) nbt.getTag("water")); for(int i = 0; i < 4; i++){ motionData[i] = nbt.getDouble("motion" + i); } } @Override public NBTTagCompound writeToNBT(NBTTagCompound nbt){ super.writeToNBT(nbt); if(steamContent != null){ steamContent.writeToNBT(nbt); } NBTTagCompound waterHolder = new NBTTagCompound(); if(waterContent != null){ waterContent.writeToNBT(waterHolder); } nbt.setTag("water", waterHolder); for(int i = 0; i < 4; i++){ nbt.setDouble("motion" + i, motionData[i]); } return nbt; } private final IFluidHandler waterHandler = new WaterFluidHandler(); private final IFluidHandler steamHandler = new SteamFluidHandler(); private final IFluidHandler innerHandler = new InnerFluidHandler(); private final IAxleHandler axleHandler = new AxleHandler(); @Override public boolean hasCapability(Capability<?> capability, @Nullable EnumFacing facing){ if(capability == CapabilityFluidHandler.FLUID_HANDLER_CAPABILITY && facing != EnumFacing.UP){ return true; } if(capability == Capabilities.AXLE_HANDLER_CAPABILITY && facing == EnumFacing.UP){ return true; } return super.hasCapability(capability, facing); } @SuppressWarnings("unchecked") @Override public <T> T getCapability(Capability<T> capability, @Nullable EnumFacing facing){ if(capability == CapabilityFluidHandler.FLUID_HANDLER_CAPABILITY){ if(facing == null){ return (T) innerHandler; } if(facing == EnumFacing.DOWN){ return (T) steamHandler; }else if(facing != EnumFacing.UP){ return (T) waterHandler; } } if(capability == Capabilities.AXLE_HANDLER_CAPABILITY && facing == EnumFacing.UP){ return (T) axleHandler; } return super.getCapability(capability, facing); } private class AxleHandler implements IAxleHandler{ @Override public double[] getMotionData(){ return motionData; } private double rotRatio; private byte updateKey; @Override public void propogate(IAxisHandler masterIn, byte key, double rotRatioIn, double lastRadius){ //If true, this has already been checked. if(key == updateKey || masterIn.addToList(this)){ return; } rotRatio = rotRatioIn == 0 ? 1 : rotRatioIn; updateKey = key; } @Override public double getMoInertia(){ return 8; } @Override public double getRotationRatio(){ return rotRatio; } @Override public void addEnergy(double energy, boolean allowInvert, boolean absolute){ if(allowInvert && absolute){ motionData[1] += energy; }else if(allowInvert){ motionData[1] += energy * Math.signum(motionData[1]); }else if(absolute){ int sign = (int) Math.signum(motionData[1]); motionData[1] += energy; if(sign != 0 && Math.signum(motionData[1]) != sign){ motionData[1] = 0; } }else{ int sign = (int) Math.signum(motionData[1]); motionData[1] += energy * ((double) sign); if(Math.signum(motionData[1]) != sign){ motionData[1] = 0; } } markDirty(); } @Override public void markChanged(){ markDirty(); } @Override public boolean shouldManageAngle(){ return false; } } private class WaterFluidHandler implements IFluidHandler{ @Override public IFluidTankProperties[] getTankProperties(){ return new IFluidTankProperties[] {new FluidTankProperties(waterContent, CAPACITY, false, true)}; } @Override public int fill(FluidStack resource, boolean doFill){ return 0; } @Override public FluidStack drain(FluidStack resource, boolean doDrain){ if(resource != null && resource.getFluid() == BlockDistilledWater.getDistilledWater() && waterContent != null){ int change = Math.min(waterContent.amount, resource.amount); if(doDrain){ waterContent.amount -= change; if(waterContent.amount == 0){ waterContent = null; } } return new FluidStack(BlockDistilledWater.getDistilledWater(), change); }else{ return null; } } @Override public FluidStack drain(int maxDrain, boolean doDrain){ if(waterContent == null || maxDrain == 0){ return null; } int change = Math.min(waterContent.amount, maxDrain); if(doDrain){ waterContent.amount -= change; if(waterContent.amount == 0){ waterContent = null; } } return new FluidStack(BlockDistilledWater.getDistilledWater(), change); } } private class SteamFluidHandler implements IFluidHandler{ @Override public IFluidTankProperties[] getTankProperties(){ return new IFluidTankProperties[] {new FluidTankProperties(steamContent, CAPACITY, true, false)}; } @Override public int fill(FluidStack resource, boolean doFill){ if(resource == null || resource.getFluid() != BlockSteam.getSteam()){ return 0; } int change = Math.min(CAPACITY - (steamContent == null ? 0 : steamContent.amount), resource.amount); if(doFill){ steamContent = new FluidStack(BlockSteam.getSteam(), change + (steamContent == null ? 0 : steamContent.amount)); } return change; } @Override public FluidStack drain(FluidStack resource, boolean doDrain){ return null; } @Override public FluidStack drain(int maxDrain, boolean doDrain){ return null; } } private class InnerFluidHandler implements IFluidHandler{ @Override public IFluidTankProperties[] getTankProperties(){ return new IFluidTankProperties[] {new FluidTankProperties(waterContent, CAPACITY, false, true), new FluidTankProperties(steamContent, CAPACITY, true, false)}; } @Override public int fill(FluidStack resource, boolean doFill){ if(resource == null || resource.getFluid() != BlockSteam.getSteam()){ return 0; } int change = Math.min(CAPACITY - (steamContent == null ? 0 : steamContent.amount), resource.amount); if(doFill){ steamContent = new FluidStack(BlockSteam.getSteam(), change + (steamContent == null ? 0 : steamContent.amount)); } return change; } @Override public FluidStack drain(FluidStack resource, boolean doDrain){ if(resource != null && resource.getFluid() == BlockDistilledWater.getDistilledWater() && waterContent != null){ int change = Math.min(waterContent.amount, resource.amount); if(doDrain){ waterContent.amount -= change; if(waterContent.amount == 0){ waterContent = null; } } return new FluidStack(BlockDistilledWater.getDistilledWater(), change); }else{ return null; } } @Override public FluidStack drain(int maxDrain, boolean doDrain){ if(waterContent == null || maxDrain == 0){ return null; } int change = Math.min(waterContent.amount, maxDrain); if(doDrain){ waterContent.amount -= change; if(waterContent.amount == 0){ waterContent = null; } } return new FluidStack(BlockDistilledWater.getDistilledWater(), change); } } }
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.client; import java.io.IOException; import java.lang.reflect.Type; import java.net.URI; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.transform.Source; import org.springframework.core.ParameterizedTypeReference; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.http.client.ClientHttpRequest; import org.springframework.http.client.ClientHttpRequestFactory; import org.springframework.http.client.ClientHttpResponse; import org.springframework.http.client.support.InterceptingHttpAccessor; import org.springframework.http.converter.ByteArrayHttpMessageConverter; import org.springframework.http.converter.FormHttpMessageConverter; import org.springframework.http.converter.GenericHttpMessageConverter; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.ResourceHttpMessageConverter; import org.springframework.http.converter.StringHttpMessageConverter; import org.springframework.http.converter.json.GsonHttpMessageConverter; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.http.converter.support.AllEncompassingFormHttpMessageConverter; import org.springframework.http.converter.xml.SimpleXmlHttpMessageConverter; import org.springframework.http.converter.xml.SourceHttpMessageConverter; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.web.util.UriTemplate; import android.util.Log; /** * <strong>Spring's central class for client-side HTTP access.</strong> * It simplifies communication with HTTP servers, and enforces RESTful principles. * It handles HTTP connections, leaving application code to provide URLs * (with possible template variables) and extract results. * * <p>The main entry points of this template are the methods named after the six main HTTP methods: * <table> * <tr><th>HTTP method</th><th>RestTemplate methods</th></tr> * <tr><td>DELETE</td><td>{@link #delete}</td></tr> * <tr><td>GET</td><td>{@link #getForObject}</td></tr> * <tr><td></td><td>{@link #getForEntity}</td></tr> * <tr><td>HEAD</td><td>{@link #headForHeaders}</td></tr> * <tr><td>OPTIONS</td><td>{@link #optionsForAllow}</td></tr> * <tr><td>POST</td><td>{@link #postForLocation}</td></tr> * <tr><td></td><td>{@link #postForObject}</td></tr> * <tr><td>PUT</td><td>{@link #put}</td></tr> * <tr><td>any</td><td>{@link #exchange}</td></tr> * <tr><td></td><td>{@link #execute}</td></tr> </table> * * <p>The {@code exchange} and {@code execute} methods are generalized versions of the more specific methods listed * above them. They support additional, less frequently used combinations including support for requests using the * HTTP PATCH method. However, note that the underlying HTTP library must also support the desired combination. * * <p>For each of these HTTP methods, there are three corresponding Java methods in the {@code RestTemplate}. * Two variants take a {@code String} URI as first argument (eg. {@link #getForObject(String, Class, Object[])}, * {@link #getForObject(String, Class, Map)}), and are capable of substituting any {@linkplain UriTemplate URI templates} * in that URL using either a {@code String} variable arguments array, or a {@code Map<String, String>}. * The string varargs variant expands the given template variables in order, so that * <pre class="code"> * String result = restTemplate.getForObject("http://example.com/hotels/{hotel}/bookings/{booking}", String.class, "42", * "21"); * </pre> * will perform a GET on {@code http://example.com/hotels/42/bookings/21}. The map variant expands the template based * on variable name, and is therefore more useful when using many variables, or when a single variable is used multiple * times. For example: * <pre class="code"> * Map&lt;String, String&gt; vars = Collections.singletonMap("hotel", "42"); * String result = restTemplate.getForObject("http://example.com/hotels/{hotel}/rooms/{hotel}", String.class, vars); * </pre> * will perform a GET on {@code http://example.com/hotels/42/rooms/42}. Alternatively, there are {@link URI} variant * methods ({@link #getForObject(URI, Class)}), which do not allow for URI templates, but allow you to reuse a single, * expanded URI multiple times. * * <p>Furthermore, the {@code String}-argument methods assume that the URL String is unencoded. This means that * <pre class="code"> * restTemplate.getForObject("http://example.com/hotel list"); * </pre> * will perform a GET on {@code http://example.com/hotel%20list}. As a result, any URL passed that is already encoded * will be encoded twice (i.e. {@code http://example.com/hotel%20list} will become {@code * http://example.com/hotel%2520list}). If this behavior is undesirable, use the {@code URI}-argument methods, which * will not perform any URL encoding. * * <p>Objects passed to and returned from these methods are converted to and from HTTP messages by * {@link HttpMessageConverter} instances. Converters for the main mime types are registered by default, * but you can also write your own converter and register it via the {@link #setMessageConverters messageConverters} * bean property. * * <p>The default set of converters are listed in the following table, and are registered based on the corresponding rule. * * <p><table border=1 cellpadding=2 cellspacing=0> * <tr><th>Message Body Converter</th><th>Rule</th></tr> * <tr><td>{@link ByteArrayHttpMessageConverter}</td><td rowspan=5 valign=top>Always included</td></tr> * <tr><td>{@link StringHttpMessageConverter}</td></tr> * <tr><td>{@link ResourceHttpMessageConverter}</td></tr> * <tr><td>{@link SourceHttpMessageConverter}</td></tr> * <tr><td>{@link AllEncompassingFormHttpMessageConverter}</td></tr> * <tr><td>{@link SimpleXmlHttpMessageConverter}</td><td>Included if the Simple XML serializer is present.</td></tr> * <tr><td>{@link MappingJackson2HttpMessageConverter}</td><td>Included if the Jackson 2.x JSON processor is present.</td></tr> * <tr><td>{@link GsonHttpMessageConverter}</td><td>Included if Gson is present, and only included if Jackson is not available.</td></tr> * </table><br /> * * <p>This template uses a {@link org.springframework.http.client.SimpleClientHttpRequestFactory} and a * {@link DefaultResponseErrorHandler} as default strategies for creating HTTP connections or handling HTTP errors, * respectively. These defaults can be overridden through the {@link #setRequestFactory(ClientHttpRequestFactory) * requestFactory} and {@link #setErrorHandler(ResponseErrorHandler) errorHandler} properties. * * @author Arjen Poutsma * @author Roy Clarkson * @see HttpMessageConverter * @see RequestCallback * @see ResponseExtractor * @see ResponseErrorHandler * @since 1.0 */ public class RestTemplate extends InterceptingHttpAccessor implements RestOperations { private static final String TAG = "RestTemplate"; private final List<HttpMessageConverter<?>> messageConverters = new ArrayList<HttpMessageConverter<?>>(); private ResponseErrorHandler errorHandler = new DefaultResponseErrorHandler(); private final ResponseExtractor<HttpHeaders> headersExtractor = new HeadersExtractor(); /** * Create a new instance of the {@link RestTemplate} using default settings. * Default {@link HttpMessageConverter}s are initialized. * @since 2.0 registers a default set of {@link HttpMessageConverter}s */ public RestTemplate() { DefaultMessageConverters.init(this.messageConverters); } /** * Create a new instance of {@link RestTemplate}, specifying whether to include a * default set of {@link HttpMessageConverter}s. Setting to {@code true} is equivalent * to using the default constructor. * @param registerDefaultConverters true to add the default set of * {@link HttpMessageConverter}s * @see HttpMessageConverter * @deprecated in favor of {@link #RestTemplate(List)} */ @Deprecated public RestTemplate(boolean registerDefaultConverters) { if (registerDefaultConverters) { DefaultMessageConverters.init(this.messageConverters); } } /** * Create a new instance of the {@link RestTemplate} based on the given {@link ClientHttpRequestFactory}. * @param requestFactory HTTP request factory to use * @see org.springframework.http.client.SimpleClientHttpRequestFactory * @see org.springframework.http.client.HttpComponentsClientHttpRequestFactory * @see org.springframework.http.client.OkHttpClientHttpRequestFactory * @see org.springframework.http.client.HttpComponentsAndroidClientHttpRequestFactory */ public RestTemplate(ClientHttpRequestFactory requestFactory) { this(); setRequestFactory(requestFactory); } /** * Create a new instance of {@link RestTemplate} based on the given * {@link ClientHttpRequestFactory}, specifying whether to include a default set of * {@link HttpMessageConverter}s. Setting to {@code true} is equivalent to using the * default constructor. * @param registerDefaultConverters true to add the default set of * {@link HttpMessageConverter}s * @param requestFactory HTTP request factory to use * @see HttpMessageConverter * @see org.springframework.http.client.SimpleClientHttpRequestFactory * @see org.springframework.http.client.HttpComponentsClientHttpRequestFactory * @see org.springframework.http.client.OkHttpClientHttpRequestFactory * @see org.springframework.http.client.HttpComponentsAndroidClientHttpRequestFactory * @deprecated in favor of {@link #RestTemplate(List)} and {@link #setRequestFactory(ClientHttpRequestFactory)} */ @Deprecated public RestTemplate(boolean registerDefaultConverters, ClientHttpRequestFactory requestFactory) { this(registerDefaultConverters); setRequestFactory(requestFactory); } /** * Create a new instance of the {@link RestTemplate} using the given list of * {@link HttpMessageConverter} to use * @param messageConverters the list of {@link HttpMessageConverter} to use * @since 2.0 */ public RestTemplate(List<HttpMessageConverter<?>> messageConverters) { Assert.notEmpty(messageConverters, "'messageConverters' must not be empty"); this.messageConverters.addAll(messageConverters); } /** * Set the message body converters to use. * <p>These converters are used to convert from and to HTTP requests and responses. */ public void setMessageConverters(List<HttpMessageConverter<?>> messageConverters) { Assert.notEmpty(messageConverters, "'messageConverters' must not be empty"); // Take getMessageConverters() List as-is when passed in here if (this.messageConverters != messageConverters) { this.messageConverters.clear(); this.messageConverters.addAll(messageConverters); } } /** * Return the message body converters. */ public List<HttpMessageConverter<?>> getMessageConverters() { return this.messageConverters; } /** * Set the error handler. * <p>By default, RestTemplate uses a {@link DefaultResponseErrorHandler}. */ public void setErrorHandler(ResponseErrorHandler errorHandler) { Assert.notNull(errorHandler, "'errorHandler' must not be null"); this.errorHandler = errorHandler; } /** * Return the error handler. */ public ResponseErrorHandler getErrorHandler() { return this.errorHandler; } // GET public <T> T getForObject(String url, Class<T> responseType, Object... urlVariables) throws RestClientException { AcceptHeaderRequestCallback requestCallback = new AcceptHeaderRequestCallback(responseType); HttpMessageConverterExtractor<T> responseExtractor = new HttpMessageConverterExtractor<T>(responseType, getMessageConverters()); return execute(url, HttpMethod.GET, requestCallback, responseExtractor, urlVariables); } public <T> T getForObject(String url, Class<T> responseType, Map<String, ?> urlVariables) throws RestClientException { AcceptHeaderRequestCallback requestCallback = new AcceptHeaderRequestCallback(responseType); HttpMessageConverterExtractor<T> responseExtractor = new HttpMessageConverterExtractor<T>(responseType, getMessageConverters()); return execute(url, HttpMethod.GET, requestCallback, responseExtractor, urlVariables); } public <T> T getForObject(URI url, Class<T> responseType) throws RestClientException { AcceptHeaderRequestCallback requestCallback = new AcceptHeaderRequestCallback(responseType); HttpMessageConverterExtractor<T> responseExtractor = new HttpMessageConverterExtractor<T>(responseType, getMessageConverters()); return execute(url, HttpMethod.GET, requestCallback, responseExtractor); } public <T> ResponseEntity<T> getForEntity(String url, Class<T> responseType, Object... urlVariables) throws RestClientException { AcceptHeaderRequestCallback requestCallback = new AcceptHeaderRequestCallback(responseType); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(responseType); return execute(url, HttpMethod.GET, requestCallback, responseExtractor, urlVariables); } public <T> ResponseEntity<T> getForEntity(String url, Class<T> responseType, Map<String, ?> urlVariables) throws RestClientException { AcceptHeaderRequestCallback requestCallback = new AcceptHeaderRequestCallback(responseType); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(responseType); return execute(url, HttpMethod.GET, requestCallback, responseExtractor, urlVariables); } public <T> ResponseEntity<T> getForEntity(URI url, Class<T> responseType) throws RestClientException { AcceptHeaderRequestCallback requestCallback = new AcceptHeaderRequestCallback(responseType); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(responseType); return execute(url, HttpMethod.GET, requestCallback, responseExtractor); } // HEAD public HttpHeaders headForHeaders(String url, Object... urlVariables) throws RestClientException { return execute(url, HttpMethod.HEAD, null, this.headersExtractor, urlVariables); } public HttpHeaders headForHeaders(String url, Map<String, ?> urlVariables) throws RestClientException { return execute(url, HttpMethod.HEAD, null, this.headersExtractor, urlVariables); } public HttpHeaders headForHeaders(URI url) throws RestClientException { return execute(url, HttpMethod.HEAD, null, this.headersExtractor); } // POST public URI postForLocation(String url, Object request, Object... urlVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request); HttpHeaders headers = execute(url, HttpMethod.POST, requestCallback, this.headersExtractor, urlVariables); return headers.getLocation(); } public URI postForLocation(String url, Object request, Map<String, ?> urlVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request); HttpHeaders headers = execute(url, HttpMethod.POST, requestCallback, this.headersExtractor, urlVariables); return headers.getLocation(); } public URI postForLocation(URI url, Object request) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request); HttpHeaders headers = execute(url, HttpMethod.POST, requestCallback, this.headersExtractor); return headers.getLocation(); } public <T> T postForObject(String url, Object request, Class<T> responseType, Object... uriVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request, responseType); HttpMessageConverterExtractor<T> responseExtractor = new HttpMessageConverterExtractor<T>(responseType, getMessageConverters()); return execute(url, HttpMethod.POST, requestCallback, responseExtractor, uriVariables); } public <T> T postForObject(String url, Object request, Class<T> responseType, Map<String, ?> uriVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request, responseType); HttpMessageConverterExtractor<T> responseExtractor = new HttpMessageConverterExtractor<T>(responseType, getMessageConverters()); return execute(url, HttpMethod.POST, requestCallback, responseExtractor, uriVariables); } public <T> T postForObject(URI url, Object request, Class<T> responseType) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request, responseType); HttpMessageConverterExtractor<T> responseExtractor = new HttpMessageConverterExtractor<T>(responseType, getMessageConverters()); return execute(url, HttpMethod.POST, requestCallback, responseExtractor); } public <T> ResponseEntity<T> postForEntity(String url, Object request, Class<T> responseType, Object... uriVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request, responseType); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(responseType); return execute(url, HttpMethod.POST, requestCallback, responseExtractor, uriVariables); } public <T> ResponseEntity<T> postForEntity(String url, Object request, Class<T> responseType, Map<String, ?> uriVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request, responseType); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(responseType); return execute(url, HttpMethod.POST, requestCallback, responseExtractor, uriVariables); } public <T> ResponseEntity<T> postForEntity(URI url, Object request, Class<T> responseType) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request, responseType); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(responseType); return execute(url, HttpMethod.POST, requestCallback, responseExtractor); } // PUT public void put(String url, Object request, Object... urlVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request); execute(url, HttpMethod.PUT, requestCallback, null, urlVariables); } public void put(String url, Object request, Map<String, ?> urlVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request); execute(url, HttpMethod.PUT, requestCallback, null, urlVariables); } public void put(URI url, Object request) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(request); execute(url, HttpMethod.PUT, requestCallback, null); } // DELETE public void delete(String url, Object... urlVariables) throws RestClientException { execute(url, HttpMethod.DELETE, null, null, urlVariables); } public void delete(String url, Map<String, ?> urlVariables) throws RestClientException { execute(url, HttpMethod.DELETE, null, null, urlVariables); } public void delete(URI url) throws RestClientException { execute(url, HttpMethod.DELETE, null, null); } // OPTIONS public Set<HttpMethod> optionsForAllow(String url, Object... urlVariables) throws RestClientException { HttpHeaders headers = execute(url, HttpMethod.OPTIONS, null, this.headersExtractor, urlVariables); return headers.getAllow(); } public Set<HttpMethod> optionsForAllow(String url, Map<String, ?> urlVariables) throws RestClientException { HttpHeaders headers = execute(url, HttpMethod.OPTIONS, null, this.headersExtractor, urlVariables); return headers.getAllow(); } public Set<HttpMethod> optionsForAllow(URI url) throws RestClientException { HttpHeaders headers = execute(url, HttpMethod.OPTIONS, null, this.headersExtractor); return headers.getAllow(); } // exchange public <T> ResponseEntity<T> exchange(String url, HttpMethod method, HttpEntity<?> requestEntity, Class<T> responseType, Object... uriVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(requestEntity, responseType); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(responseType); return execute(url, method, requestCallback, responseExtractor, uriVariables); } public <T> ResponseEntity<T> exchange(String url, HttpMethod method, HttpEntity<?> requestEntity, Class<T> responseType, Map<String, ?> uriVariables) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(requestEntity, responseType); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(responseType); return execute(url, method, requestCallback, responseExtractor, uriVariables); } public <T> ResponseEntity<T> exchange(URI url, HttpMethod method, HttpEntity<?> requestEntity, Class<T> responseType) throws RestClientException { HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(requestEntity, responseType); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(responseType); return execute(url, method, requestCallback, responseExtractor); } public <T> ResponseEntity<T> exchange(String url, HttpMethod method, HttpEntity<?> requestEntity, ParameterizedTypeReference<T> responseType, Object... uriVariables) throws RestClientException { Type type = responseType.getType(); HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(requestEntity, type); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(type); return execute(url, method, requestCallback, responseExtractor, uriVariables); } public <T> ResponseEntity<T> exchange(String url, HttpMethod method, HttpEntity<?> requestEntity, ParameterizedTypeReference<T> responseType, Map<String, ?> uriVariables) throws RestClientException { Type type = responseType.getType(); HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(requestEntity, type); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(type); return execute(url, method, requestCallback, responseExtractor, uriVariables); } public <T> ResponseEntity<T> exchange(URI url, HttpMethod method, HttpEntity<?> requestEntity, ParameterizedTypeReference<T> responseType) throws RestClientException { Type type = responseType.getType(); HttpEntityRequestCallback requestCallback = new HttpEntityRequestCallback(requestEntity, type); ResponseEntityResponseExtractor<T> responseExtractor = new ResponseEntityResponseExtractor<T>(type); return execute(url, method, requestCallback, responseExtractor); } // general execution public <T> T execute(String url, HttpMethod method, RequestCallback requestCallback, ResponseExtractor<T> responseExtractor, Object... urlVariables) throws RestClientException { URI expanded = new UriTemplate(url).expand(urlVariables); return doExecute(expanded, method, requestCallback, responseExtractor); } public <T> T execute(String url, HttpMethod method, RequestCallback requestCallback, ResponseExtractor<T> responseExtractor, Map<String, ?> urlVariables) throws RestClientException { URI expanded = new UriTemplate(url).expand(urlVariables); return doExecute(expanded, method, requestCallback, responseExtractor); } public <T> T execute(URI url, HttpMethod method, RequestCallback requestCallback, ResponseExtractor<T> responseExtractor) throws RestClientException { return doExecute(url, method, requestCallback, responseExtractor); } /** * Execute the given method on the provided URI. * <p>The {@link ClientHttpRequest} is processed using the {@link RequestCallback}; * the response with the {@link ResponseExtractor}. * @param url the fully-expanded URL to connect to * @param method the HTTP method to execute (GET, POST, etc.) * @param requestCallback object that prepares the request (can be {@code null}) * @param responseExtractor object that extracts the return value from the response (can be {@code null}) * @return an arbitrary object, as returned by the {@link ResponseExtractor} */ protected <T> T doExecute(URI url, HttpMethod method, RequestCallback requestCallback, ResponseExtractor<T> responseExtractor) throws RestClientException { Assert.notNull(url, "'url' must not be null"); Assert.notNull(method, "'method' must not be null"); ClientHttpResponse response = null; try { ClientHttpRequest request = createRequest(url, method); if (requestCallback != null) { requestCallback.doWithRequest(request); } response = request.execute(); if (!getErrorHandler().hasError(response)) { logResponseStatus(method, url, response); } else { handleResponseError(method, url, response); } if (responseExtractor != null) { return responseExtractor.extractData(response); } else { return null; } } catch (IOException ex) { throw new ResourceAccessException("I/O error on " + method.name() + " request for \"" + url + "\": " + ex.getMessage(), ex); } finally { if (response != null) { response.close(); } } } private void logResponseStatus(HttpMethod method, URI url, ClientHttpResponse response) { if (Log.isLoggable(TAG, Log.DEBUG)) { try { Log.d(TAG, method.name() + " request for \"" + url + "\" resulted in " + response.getStatusCode() + " (" + response.getStatusText() + ")"); } catch (IOException e) { // ignore } } } private void handleResponseError(HttpMethod method, URI url, ClientHttpResponse response) throws IOException { if (Log.isLoggable(TAG, Log.WARN)) { try { Log.w(TAG, method.name() + " request for \"" + url + "\" resulted in " + response.getStatusCode() + " (" + response.getStatusText() + "); invoking error handler"); } catch (IOException e) { // ignore } } getErrorHandler().handleError(response); } /** * Request callback implementation that prepares the request's accept headers. */ private class AcceptHeaderRequestCallback implements RequestCallback { private final Type responseType; private AcceptHeaderRequestCallback(Type responseType) { this.responseType = responseType; } public void doWithRequest(ClientHttpRequest request) throws IOException { if (responseType != null) { Class<?> responseClass = null; if (responseType instanceof Class) { responseClass = (Class<?>) responseType; } List<MediaType> allSupportedMediaTypes = new ArrayList<MediaType>(); for (HttpMessageConverter<?> converter : getMessageConverters()) { if (responseClass != null) { if (converter.canRead(responseClass, null)) { allSupportedMediaTypes.addAll(getSupportedMediaTypes(converter)); } } else if (converter instanceof GenericHttpMessageConverter) { GenericHttpMessageConverter<?> genericConverter = (GenericHttpMessageConverter<?>) converter; if (genericConverter.canRead(responseType, null, null)) { allSupportedMediaTypes.addAll(getSupportedMediaTypes(converter)); } } } if (!allSupportedMediaTypes.isEmpty()) { MediaType.sortBySpecificity(allSupportedMediaTypes); if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "Setting request Accept header to " + allSupportedMediaTypes); } request.getHeaders().setAccept(allSupportedMediaTypes); } } } private List<MediaType> getSupportedMediaTypes(HttpMessageConverter<?> messageConverter) { List<MediaType> supportedMediaTypes = messageConverter.getSupportedMediaTypes(); List<MediaType> result = new ArrayList<MediaType>(supportedMediaTypes.size()); for (MediaType supportedMediaType : supportedMediaTypes) { if (supportedMediaType.getCharSet() != null) { supportedMediaType = new MediaType(supportedMediaType.getType(), supportedMediaType.getSubtype()); } result.add(supportedMediaType); } return result; } } /** * Request callback implementation that writes the given object to the request stream. */ private class HttpEntityRequestCallback extends AcceptHeaderRequestCallback { private final HttpEntity<?> requestEntity; private HttpEntityRequestCallback(Object requestBody) { this(requestBody, null); } private HttpEntityRequestCallback(Object requestBody, Type responseType) { super(responseType); if (requestBody instanceof HttpEntity) { this.requestEntity = (HttpEntity<?>) requestBody; } else if (requestBody != null) { this.requestEntity = new HttpEntity<Object>(requestBody); } else { this.requestEntity = HttpEntity.EMPTY; } } @Override @SuppressWarnings("unchecked") public void doWithRequest(ClientHttpRequest httpRequest) throws IOException { super.doWithRequest(httpRequest); if (!requestEntity.hasBody()) { HttpHeaders httpHeaders = httpRequest.getHeaders(); HttpHeaders requestHeaders = requestEntity.getHeaders(); if (!requestHeaders.isEmpty()) { httpHeaders.putAll(requestHeaders); } if (httpHeaders.getContentLength() == -1) { httpHeaders.setContentLength(0L); } } else { Object requestBody = requestEntity.getBody(); Class<?> requestType = requestBody.getClass(); HttpHeaders requestHeaders = requestEntity.getHeaders(); MediaType requestContentType = requestHeaders.getContentType(); for (HttpMessageConverter<?> messageConverter : getMessageConverters()) { if (messageConverter.canWrite(requestType, requestContentType)) { if (!requestHeaders.isEmpty()) { httpRequest.getHeaders().putAll(requestHeaders); } if (Log.isLoggable(TAG, Log.DEBUG)) { if (requestContentType != null) { Log.d(TAG, "Writing [" + requestBody + "] as \"" + requestContentType + "\" using [" + messageConverter + "]"); } else { Log.d(TAG, "Writing [" + requestBody + "] using [" + messageConverter + "]"); } } ((HttpMessageConverter<Object>) messageConverter).write( requestBody, requestContentType, httpRequest); return; } } String message = "Could not write request: no suitable HttpMessageConverter found for request type [" + requestType.getName() + "]"; if (requestContentType != null) { message += " and content type [" + requestContentType + "]"; } throw new RestClientException(message); } } } /** * Response extractor for {@link HttpEntity}. */ private class ResponseEntityResponseExtractor<T> implements ResponseExtractor<ResponseEntity<T>> { private final HttpMessageConverterExtractor<T> delegate; public ResponseEntityResponseExtractor(Type responseType) { if (responseType != null && !Void.class.equals(responseType)) { this.delegate = new HttpMessageConverterExtractor<T>(responseType, getMessageConverters()); } else { this.delegate = null; } } public ResponseEntity<T> extractData(ClientHttpResponse response) throws IOException { if (this.delegate != null) { T body = this.delegate.extractData(response); return new ResponseEntity<T>(body, response.getHeaders(), response.getStatusCode()); } else { return new ResponseEntity<T>(response.getHeaders(), response.getStatusCode()); } } } /** * Response extractor that extracts the response {@link HttpHeaders}. */ private static class HeadersExtractor implements ResponseExtractor<HttpHeaders> { public HttpHeaders extractData(ClientHttpResponse response) throws IOException { return response.getHeaders(); } } /** * Identifies and initializes default {@link HttpMessageConverter} implementations. */ private static class DefaultMessageConverters { private static final boolean javaxXmlTransformPresent = ClassUtils.isPresent("javax.xml.transform.Source", RestTemplate.class.getClassLoader()); private static final boolean simpleXmlPresent = ClassUtils.isPresent("org.simpleframework.xml.Serializer", RestTemplate.class.getClassLoader()); private static final boolean jackson2Present = ClassUtils.isPresent("com.fasterxml.jackson.databind.ObjectMapper", RestTemplate.class.getClassLoader()) && ClassUtils.isPresent("com.fasterxml.jackson.core.JsonGenerator", RestTemplate.class.getClassLoader()); private static final boolean gsonPresent = ClassUtils.isPresent("com.google.gson.Gson", RestTemplate.class.getClassLoader()); public static void init(List<HttpMessageConverter<?>> messageConverters) { messageConverters.add(new ByteArrayHttpMessageConverter()); messageConverters.add(new StringHttpMessageConverter()); messageConverters.add(new ResourceHttpMessageConverter()); // if javax.xml.transform is not available, fall back to standard Form message converter if (javaxXmlTransformPresent) { messageConverters.add(new SourceHttpMessageConverter<Source>()); messageConverters.add(new AllEncompassingFormHttpMessageConverter()); } else { messageConverters.add(new FormHttpMessageConverter()); } if (simpleXmlPresent) { messageConverters.add(new SimpleXmlHttpMessageConverter()); } if (jackson2Present) { messageConverters.add(new MappingJackson2HttpMessageConverter()); } else if (gsonPresent) { messageConverters.add(new GsonHttpMessageConverter()); } } } }
package person.daizhongde.virtue.spring; import java.io.File; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import net.sf.json.JSONObject; import person.daizhongde.virtue.assemble.hql.HQLAssembleQ; import person.daizhongde.virtue.assemble.hql.HQLAssembleR; import person.daizhongde.virtue.assemble.sql.SQLAssembleC; import person.daizhongde.virtue.assemble.sql.SQLAssembleD; import person.daizhongde.virtue.assemble.sql.SQLAssembleQ; import person.daizhongde.virtue.assemble.sql.SQLAssembleR; import person.daizhongde.virtue.assemble.sql.SQLAssembleU; import person.daizhongde.virtue.constant.AbstractConstant; import person.daizhongde.virtue.constant.INIT; import person.daizhongde.virtue.constant.Operator; import person.daizhongde.virtue.dao.SpringHibernateDao; /** * Not use, Only for display * @author dzd * */ public class BaseServiceImpl implements BaseService{ private SpringHibernateDao baseDao; public void setBaseDao(SpringHibernateDao baseDao) { this.baseDao = baseDao; } public long getTotal(SQLAssembleQ sqlA) { // Object o= dataDAO.sqlQueryfindaValueByMap( sqlA.getCountSQL(), sqlA.getMap() ); return Long.valueOf( baseDao.sqlQueryfindaValueByMap( sqlA.getCountSQL(), sqlA.getMap() ).toString() ).longValue(); } public List getRowsInMap(SQLAssembleQ sqlA) { return baseDao.sqlQuerylistAllByMap( sqlA.getSQL(), sqlA.getMap() );//use native sql, because of it less data } public List getRowsInMap(SQLAssembleQ sqlA, int offset, int pageSize) { return baseDao.sqlQueryfindByPageByMap(sqlA.getSQL(), sqlA.getMap(), offset, pageSize);//use native sql, because of it less data } public long getTotal(HQLAssembleQ hqlA) { // TODO Auto-generated method stub return 0; } public List getRowsInMap(HQLAssembleQ hqlA) { // TODO Auto-generated method stub return null; } public List getRowsInMap(HQLAssembleQ hqlA, int offset, int pageSize) { // TODO Auto-generated method stub return null; } public List getRowsInArray(SQLAssembleQ sqlA) { return baseDao.sqlQuerylistAllRetArrayByMap(sqlA.getSQL(), sqlA.getMap());//use native sql, because of it less data } public List getRowsInArray(SQLAssembleQ sqlA, int offset, int pageSize) { return baseDao.sqlQueryfindRetArrayByPageByMap(sqlA.getSQL(), sqlA.getMap(), offset, pageSize);//use native sql, because of it less data } public int add(String jdata) { // JSONObject jsonObject = JSONObject.fromObject(jdata); // AbstractConstant absConstant = new AuthorityButton(); // // SQLAssembleC sqlA = new SQLAssembleC( // INIT.AUTH_schema, // absConstant.getTableName(), // jsonObject.getJSONObject("data"), // absConstant.getColumnTypes(), // absConstant.getFront2col() // ); // // return baseDao.sqlQueryExeUByMap(sqlA.getSQL(), sqlA.getMap()); return 0; } public int addRetId(String jdata) { // JSONObject jsonObject = JSONObject.fromObject(jdata); // AbstractConstant absConstant = new AuthorityButton(); // // SQLAssembleC sqlA = new SQLAssembleC( // INIT.AUTH_schema, // absConstant.getTableName(), // jsonObject.getJSONObject("data"), // absConstant.getColumnTypes(), // absConstant.getFront2col() // ); // // return baseDao.sqlQueryExeUByMap(sqlA.getSQL(), sqlA.getMap()); return 0; } public int addWithId( String jdata ){ // JSONObject jsonObject = JSONObject.fromObject(jdata); // AbstractConstant absConstant = new AuthorityButton(); // // SQLAssembleC sqlA = new SQLAssembleC( // INIT.AUTH_schema, // absConstant.getTableName(), // jsonObject.getJSONObject("data"), // absConstant.getColumnTypes(), // absConstant.getFront2col() // ); // // return baseDao.sqlQueryExeUByMap(sqlA.getSQL(), sqlA.getMap()); return 0; } public int addWithIdRetId(String jdata) { // JSONObject jsonObject = JSONObject.fromObject(jdata); // AbstractConstant absConstant = new AuthorityButton(); // // SQLAssembleC sqlA = new SQLAssembleC( // INIT.AUTH_schema, // absConstant.getTableName(), // jsonObject.getJSONObject("data"), // absConstant.getColumnTypes(), // absConstant.getFront2col() // ); // // baseDao.sqlQueryExeUByMap(sqlA.getSQL(), sqlA.getMap()); // //// return (Integer)sqlA.getMap().get( //// absConstant.getBack2front().get("NLid") //// ); // /*The below Code is dependent on front field, // * but sometimes It's compatibility is better then the top Code */ // return new Integer( sqlA.getMap().get("id").toString() ).intValue(); return 0; } public void addBySavePOJO(String jdata) { } public void addBySavePOJO2( Object pojo ){ } public int add(Map data) { // TODO Auto-generated method stub return 0; } public int modify( String jdata ){ // JSONObject jsonObject = JSONObject.fromObject(jdata); // AbstractConstant absConstant = new AuthorityButton(); // // SQLAssembleU sqlA = new SQLAssembleU( // INIT.AUTH_schema, // absConstant.getSQLDOC(), // absConstant.getTableName(), // jsonObject.getJSONObject("data"), // jsonObject.getJSONObject("algorithm"), // jsonObject.getJSONObject("condition"), // jsonObject.getJSONObject("operator"), // absConstant.getColumnTypes(), // absConstant.getFront2col() ); // // return baseDao.sqlQueryExeUByMap(sqlA.getSQL(), sqlA.getMap()); return -1; } public Map browse(String jdata) { // JSONObject jsonObject = JSONObject.fromObject(jdata); // AbstractConstant absConstant = new AuthorityButton(); // // SQLAssembleR sqlA = new SQLAssembleR( // absConstant.getSQLDOC(), // absConstant.getRead_SQL(), // jsonObject.getJSONObject("condition"), // jsonObject.getJSONObject("operator"), // absConstant.getColumnTypes(), // absConstant.getFront2col() ); // // return (Map)baseDao.sqlQuerylistAllByMap(sqlA.getSQL(), sqlA.getMap()).get(0); return null; } public Map browseById(int id) { // AbstractConstant absConstant = new AuthorityButton(); // String pkcolName = absConstant.getPrimaryKeyColumnName(); // // Map cond = new HashMap(1); // cond.put( pkcolName, id); // Map oper = new HashMap(1); // oper.put( pkcolName, Operator.EQUAL); // // SQLAssembleR sqlA = new SQLAssembleR( // absConstant.getSQLDOC(), // absConstant.getRead_SQL(), // cond, // oper, // absConstant.getColumnTypes(), // absConstant.getFront2col() ); // // return (Map)baseDao.sqlQuerylistAllByMap(sqlA.getSQL(), sqlA.getMap()).get(0); return null; } public Map browseById(String id) { // AbstractConstant absConstant = new AuthorityButton(); // String pkcolName = absConstant.getPrimaryKeyColumnName(); // // Map cond = new HashMap(1); // cond.put( pkcolName, id); // Map oper = new HashMap(1); // oper.put( pkcolName, Operator.EQUAL); // // SQLAssembleR sqlA = new SQLAssembleR( // absConstant.getSQLDOC(), // absConstant.getRead_SQL(), // cond, // oper, // absConstant.getColumnTypes(), // absConstant.getFront2col() ); // // return (Map)baseDao.sqlQuerylistAllByMap(sqlA.getSQL(), sqlA.getMap()).get(0); return null; } public Object[] browseArray(String jdata) { // JSONObject jsonObject = JSONObject.fromObject(jdata); // AbstractConstant absConstant = new AuthorityButton(); // // SQLAssembleR sqlA = new SQLAssembleR( // absConstant.getSQLDOC(), // absConstant.getRead_SQL(), // jsonObject.getJSONObject("condition"), // jsonObject.getJSONObject("operator"), // absConstant.getColumnTypes(), // absConstant.getFront2col() ); // // return (Object[])baseDao.sqlQuerylistAllByMap( sqlA.getSQL(), sqlA.getMap() ).get(0); return null; } public Object browsePOJO(String jdata) { // JSONObject jsonObject = JSONObject.fromObject(jdata); // AbstractConstant absConstant = new AuthorityButton(); // // HQLAssembleR hqlA = new HQLAssembleR(absConstant.getSQLDOC(), // absConstant.getRead_SQL(), // jsonObject.getJSONObject("condition"), // jsonObject.getJSONObject("operator"), // absConstant.getColumnTypes(), // absConstant.getFront2back() ); // // return baseDao.listAllByMap( "from TAuthorityButton t1 where "+hqlA.getWhereBackHQL(), hqlA.getMap() ).get(0); return null; } public Object browsePOJOById(int id) { // return baseDao.findById( new Integer(id) ); return null; } public Object browsePOJOById(String id) { // return baseDao.findById( new Integer(id) ); return null; } public int delete( String jdata ){ // JSONObject jsonObject = JSONObject.fromObject(jdata); // AbstractConstant absConstant = new AuthorityButton(); // // SQLAssembleD sqlA = new SQLAssembleD( // INIT.AUTH_schema, // absConstant.getSQLDOC(), // absConstant.getTableName(), // jsonObject.getJSONObject("condition"), // jsonObject.getJSONObject("operator"), // absConstant.getColumnTypes(), // absConstant.getFront2col() ); // // return baseDao.sqlQueryExeUByMap(sqlA.getSQL(), sqlA.getMap()); return 0; } public int deleteNP(String jdata) { return this.modify(jdata); } public void setbaseDao(SpringHibernateDao baseDao) { this.baseDao = baseDao; } }
/** * Copyright (C) 2012-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ninja.session; import com.google.common.collect.ImmutableMap; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.Map; import java.util.UUID; import ninja.Context; import ninja.Cookie; import ninja.Result; import ninja.utils.Clock; import ninja.utils.CookieDataCodec; import ninja.utils.CookieEncryption; import ninja.utils.Crypto; import ninja.utils.NinjaConstant; import ninja.utils.NinjaProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.inject.Inject; public class SessionImpl implements Session { private final static Logger logger = LoggerFactory.getLogger(SessionImpl.class); private final Crypto crypto; private final CookieEncryption encryption; private final Clock time; private Long sessionExpireTimeInMs; private final Long defaultSessionExpireTimeInMs; private final Boolean sessionSendOnlyIfChanged; private final Boolean sessionTransferredOverHttpsOnly; private final Boolean sessionHttpOnly; private final String applicationCookieDomain; private final Map<String, String> data = new HashMap<String, String>(); /** Has cookie been changed => only send new cookie stuff has been changed */ private boolean sessionDataHasBeenChanged = false; private final String sessionCookieName; @Inject public SessionImpl(Crypto crypto, CookieEncryption encryption, NinjaProperties ninjaProperties, Clock clock) { this.crypto = crypto; this.encryption = encryption; this.time = clock; // read configuration stuff: Integer sessionExpireTimeInSeconds = ninjaProperties .getInteger(NinjaConstant.sessionExpireTimeInSeconds); if (sessionExpireTimeInSeconds != null) { this.defaultSessionExpireTimeInMs = sessionExpireTimeInSeconds * 1000L; } else { this.defaultSessionExpireTimeInMs = null; } this.sessionExpireTimeInMs = defaultSessionExpireTimeInMs; this.sessionSendOnlyIfChanged = ninjaProperties.getBooleanWithDefault( NinjaConstant.sessionSendOnlyIfChanged, true); this.sessionTransferredOverHttpsOnly = ninjaProperties .getBooleanWithDefault( NinjaConstant.sessionTransferredOverHttpsOnly, true); this.sessionHttpOnly = ninjaProperties.getBooleanWithDefault( NinjaConstant.sessionHttpOnly, true); this.applicationCookieDomain = ninjaProperties .get(NinjaConstant.applicationCookieDomain); String applicationCookiePrefix = ninjaProperties .getOrDie(NinjaConstant.applicationCookiePrefix); this.sessionCookieName = applicationCookiePrefix + ninja.utils.NinjaConstant.SESSION_SUFFIX; } @Override public void init(Context context) { try { // get the cookie that contains session information: Cookie cookie = context.getCookie(sessionCookieName); // check that the cookie is not empty: if (cookie != null && cookie.getValue() != null && !cookie.getValue().trim().isEmpty()) { String value = cookie.getValue(); // the first substring until "-" is the sign String sign = value.substring(0, value.indexOf("-")); // rest from "-" until the end is the payload of the cookie String payload = value.substring(value.indexOf("-") + 1); // check if payload is valid: if (CookieDataCodec.safeEquals(sign, crypto.signHmacSha1(payload))) { payload = encryption.decrypt(payload); CookieDataCodec.decode(data, payload); } // If an expiry time was set previously use that instead of the // default session expire time. if (data.containsKey(EXPIRY_TIME_KEY)) { Long expiryTime = Long.parseLong(data.get(EXPIRY_TIME_KEY)); if (expiryTime >= 0) { sessionExpireTimeInMs = expiryTime; } } checkExpire(); } } catch (UnsupportedEncodingException unsupportedEncodingException) { logger.error("Encoding exception - this must not happen", unsupportedEncodingException); } } protected boolean shouldExpire() { if (sessionExpireTimeInMs != null) { // Make sure session contains valid timestamp if (!data.containsKey(TIMESTAMP_KEY)) { return true; } Long timestamp = Long.parseLong(data.get(TIMESTAMP_KEY)); return (timestamp + sessionExpireTimeInMs < time.currentTimeMillis()); } return false; } @Override public void setExpiryTime(Long expiryTimeMs) { if (expiryTimeMs == null) { data.remove(EXPIRY_TIME_KEY); sessionExpireTimeInMs = defaultSessionExpireTimeInMs; sessionDataHasBeenChanged = true; } else { data.put(EXPIRY_TIME_KEY, "" + expiryTimeMs); sessionExpireTimeInMs = expiryTimeMs; } if (sessionExpireTimeInMs != null) { if (!data.containsKey(TIMESTAMP_KEY)) { data.put(TIMESTAMP_KEY, "" + time.currentTimeMillis()); } checkExpire(); sessionDataHasBeenChanged = true; } } private void checkExpire() { if (sessionExpireTimeInMs != null) { if (shouldExpire()) { sessionDataHasBeenChanged = true; data.clear(); } else { // Everything's alright => prolong session data.put(TIMESTAMP_KEY, "" + time.currentTimeMillis()); } } } @Override public String getId() { if (!data.containsKey(ID_KEY)) { put(ID_KEY, UUID.randomUUID().toString()); } return get(ID_KEY); } @Override public Map<String, String> getData() { return ImmutableMap.copyOf(data); } @Override public String getAuthenticityToken() { if (!data.containsKey(AUTHENTICITY_KEY)) { put(AUTHENTICITY_KEY, UUID.randomUUID().toString()); } return get(AUTHENTICITY_KEY); } @Override public void save(Context context, Result result) { // Don't save the cookie nothing has changed, and if we're not expiring or // we are expiring but we're only updating if the session changes if (!sessionDataHasBeenChanged && (sessionExpireTimeInMs == null || sessionSendOnlyIfChanged)) { // Nothing changed and no cookie-expire, consequently send nothing // back. return; } if (isEmpty()) { // It is empty, but there was a session coming in, therefore clear // it if (context.hasCookie(sessionCookieName)) { Cookie.Builder expiredSessionCookie = Cookie.builder(sessionCookieName, ""); expiredSessionCookie.setPath(context.getContextPath() + "/"); expiredSessionCookie.setMaxAge(0); result.addCookie(expiredSessionCookie.build()); } return; } // Make sure it has a timestamp, if it needs one if (sessionExpireTimeInMs != null && !data.containsKey(TIMESTAMP_KEY)) { data.put(TIMESTAMP_KEY, Long.toString(System.currentTimeMillis())); } try { String sessionData = CookieDataCodec.encode(data); // first encrypt data and then generate HMAC from encrypted data // http://crypto.stackexchange.com/questions/202/should-we-mac-then-encrypt-or-encrypt-then-mac sessionData = encryption.encrypt(sessionData); String sign = crypto.signHmacSha1(sessionData); Cookie.Builder cookie = Cookie.builder(sessionCookieName, sign + "-" + sessionData); cookie.setPath(context.getContextPath() + "/"); if (applicationCookieDomain != null) { cookie.setDomain(applicationCookieDomain); } if (sessionExpireTimeInMs != null) { cookie.setMaxAge((int)(sessionExpireTimeInMs / 1000L)); } if (sessionTransferredOverHttpsOnly != null) { cookie.setSecure(sessionTransferredOverHttpsOnly); } if (sessionHttpOnly != null) { cookie.setHttpOnly(sessionHttpOnly); } result.addCookie(cookie.build()); } catch (UnsupportedEncodingException unsupportedEncodingException) { logger.error("Encoding exception - this must not happen", unsupportedEncodingException); throw new RuntimeException(unsupportedEncodingException); } } @Override public void put(String key, String value) { // make sure key is valid: if (key.contains(":")) { throw new IllegalArgumentException( "Character ':' is invalid in a session key."); } sessionDataHasBeenChanged = true; if (value == null) { remove(key); } else { data.put(key, value); } } @Override public String get(String key) { return data.get(key); } @Override public String remove(String key) { sessionDataHasBeenChanged = true; String result = get(key); data.remove(key); return result; } @Override public void clear() { sessionDataHasBeenChanged = true; data.clear(); } @Override public boolean isEmpty() { int itemsToIgnore = 0; if (data.containsKey(TIMESTAMP_KEY)) { itemsToIgnore++; } if (data.containsKey(EXPIRY_TIME_KEY)) { itemsToIgnore++; } return (data.isEmpty() || data.size() == itemsToIgnore); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.util; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Condition; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.tree.IElementType; import com.intellij.util.ArrayUtilRt; import com.intellij.util.IncorrectOperationException; import gnu.trove.THashMap; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.HashSet; import java.util.Map; import java.util.Set; public class PsiTypesUtil { @NonNls private static final Map<String, String> ourUnboxedTypes = new THashMap<>(); @NonNls private static final Map<String, String> ourBoxedTypes = new THashMap<>(); static { ourUnboxedTypes.put(CommonClassNames.JAVA_LANG_BOOLEAN, "boolean"); ourUnboxedTypes.put(CommonClassNames.JAVA_LANG_BYTE, "byte"); ourUnboxedTypes.put(CommonClassNames.JAVA_LANG_SHORT, "short"); ourUnboxedTypes.put(CommonClassNames.JAVA_LANG_INTEGER, "int"); ourUnboxedTypes.put(CommonClassNames.JAVA_LANG_LONG, "long"); ourUnboxedTypes.put(CommonClassNames.JAVA_LANG_FLOAT, "float"); ourUnboxedTypes.put(CommonClassNames.JAVA_LANG_DOUBLE, "double"); ourUnboxedTypes.put(CommonClassNames.JAVA_LANG_CHARACTER, "char"); ourBoxedTypes.put("boolean", CommonClassNames.JAVA_LANG_BOOLEAN); ourBoxedTypes.put("byte", CommonClassNames.JAVA_LANG_BYTE); ourBoxedTypes.put("short", CommonClassNames.JAVA_LANG_SHORT); ourBoxedTypes.put("int", CommonClassNames.JAVA_LANG_INTEGER); ourBoxedTypes.put("long", CommonClassNames.JAVA_LANG_LONG); ourBoxedTypes.put("float", CommonClassNames.JAVA_LANG_FLOAT); ourBoxedTypes.put("double", CommonClassNames.JAVA_LANG_DOUBLE); ourBoxedTypes.put("char", CommonClassNames.JAVA_LANG_CHARACTER); } @NonNls private static final String GET_CLASS_METHOD = "getClass"; private PsiTypesUtil() { } public static Object getDefaultValue(PsiType type) { if (!(type instanceof PsiPrimitiveType)) return null; switch (type.getCanonicalText()) { case "boolean": return false; case "byte": return (byte)0; case "char": return '\0'; case "short": return (short)0; case "int": return 0; case "long": return 0L; case "float": return 0F; case "double": return 0D; default: return null; } } @NotNull public static String getDefaultValueOfType(PsiType type) { return getDefaultValueOfType(type, false); } @NotNull public static String getDefaultValueOfType(PsiType type, boolean customDefaultValues) { if (type instanceof PsiArrayType) { int count = type.getArrayDimensions() - 1; PsiType componentType = type.getDeepComponentType(); if (componentType instanceof PsiClassType) { final PsiClassType classType = (PsiClassType)componentType; if (classType.resolve() instanceof PsiTypeParameter) { return PsiKeyword.NULL; } } PsiType erasedComponentType = TypeConversionUtil.erasure(componentType); StringBuilder buffer = new StringBuilder(); buffer.append(PsiKeyword.NEW); buffer.append(" "); buffer.append(erasedComponentType.getCanonicalText()); buffer.append("[0]"); for (int i = 0; i < count; i++) { buffer.append("[]"); } return buffer.toString(); } if (type instanceof PsiPrimitiveType) { return PsiType.BOOLEAN.equals(type) ? PsiKeyword.FALSE : "0"; } if (customDefaultValues) { PsiType rawType = type instanceof PsiClassType ? ((PsiClassType)type).rawType() : null; if (rawType != null && rawType.equalsToText(CommonClassNames.JAVA_UTIL_OPTIONAL)) { return CommonClassNames.JAVA_UTIL_OPTIONAL + ".empty()"; } } return PsiKeyword.NULL; } /** * Returns the unboxed type name or parameter. * @param type boxed java type name * @return unboxed type name if available; same value otherwise */ @Contract("null -> null; !null -> !null") @Nullable public static String unboxIfPossible(@Nullable String type) { if (type == null) return null; final String s = ourUnboxedTypes.get(type); return s == null? type : s; } /** * Returns the boxed type name or parameter. * @param type primitive java type name * @return boxed type name if available; same value otherwise */ @Contract("null -> null; !null -> !null") @Nullable public static String boxIfPossible(@Nullable String type) { if (type == null) return null; final String s = ourBoxedTypes.get(type); return s == null ? type : s; } @Nullable public static PsiClass getPsiClass(@Nullable PsiType psiType) { return psiType instanceof PsiClassType? ((PsiClassType)psiType).resolve() : null; } @NotNull public static PsiClassType getClassType(@NotNull PsiClass psiClass) { return JavaPsiFacade.getElementFactory(psiClass.getProject()).createType(psiClass); } @Nullable public static PsiClassType getLowestUpperBoundClassType(@NotNull final PsiDisjunctionType type) { final PsiType lub = type.getLeastUpperBound(); if (lub instanceof PsiClassType) { return (PsiClassType)lub; } if (lub instanceof PsiIntersectionType) { for (PsiType subType : ((PsiIntersectionType)lub).getConjuncts()) { if (subType instanceof PsiClassType) { final PsiClass aClass = ((PsiClassType)subType).resolve(); if (aClass != null && !aClass.isInterface()) { return (PsiClassType)subType; } } } } return null; } public static PsiType patchMethodGetClassReturnType(@NotNull PsiMethodReferenceExpression methodExpression, @NotNull PsiMethod method) { if (isGetClass(method)) { final PsiType qualifierType = PsiMethodReferenceUtil.getQualifierType(methodExpression); return qualifierType != null ? createJavaLangClassType(methodExpression, qualifierType, true) : null; } return null; } public static PsiType patchMethodGetClassReturnType(@NotNull PsiExpression call, @NotNull PsiReferenceExpression methodExpression, @NotNull PsiMethod method, @NotNull Condition<? super IElementType> condition, @NotNull LanguageLevel languageLevel) { //JLS3 15.8.2 if (languageLevel.isAtLeast(LanguageLevel.JDK_1_5) && isGetClass(method)) { PsiExpression qualifier = methodExpression.getQualifierExpression(); PsiType qualifierType = null; final Project project = call.getProject(); if (qualifier != null) { qualifierType = TypeConversionUtil.erasure(qualifier.getType()); } else { PsiElement parent = call.getContext(); while (parent != null && condition.value(parent instanceof StubBasedPsiElement ? ((StubBasedPsiElement)parent).getElementType() : parent.getNode().getElementType())) { parent = parent.getContext(); } if (parent != null) { qualifierType = JavaPsiFacade.getElementFactory(project).createType((PsiClass)parent); } } return createJavaLangClassType(methodExpression, qualifierType, true); } return null; } public static boolean isGetClass(@NotNull PsiMethod method) { if (GET_CLASS_METHOD.equals(method.getName())) { PsiClass aClass = method.getContainingClass(); return aClass != null && CommonClassNames.JAVA_LANG_OBJECT.equals(aClass.getQualifiedName()); } return false; } @Nullable public static PsiType createJavaLangClassType(@NotNull PsiElement context, @Nullable PsiType qualifierType, boolean captureTopLevelWildcards) { if (qualifierType != null) { PsiUtil.ensureValidType(qualifierType); JavaPsiFacade facade = JavaPsiFacade.getInstance(context.getProject()); PsiClass javaLangClass = facade.findClass(CommonClassNames.JAVA_LANG_CLASS, context.getResolveScope()); if (javaLangClass != null && javaLangClass.getTypeParameters().length == 1) { PsiSubstitutor substitutor = PsiSubstitutor.EMPTY. put(javaLangClass.getTypeParameters()[0], PsiWildcardType.createExtends(context.getManager(), qualifierType)); final PsiClassType classType = facade.getElementFactory().createType(javaLangClass, substitutor, PsiUtil.getLanguageLevel(context)); return captureTopLevelWildcards ? PsiUtil.captureToplevelWildcards(classType, context) : classType; } } return null; } /** * Return type explicitly declared in parent */ @Nullable public static PsiType getExpectedTypeByParent(@NotNull PsiElement element) { final PsiElement parent = PsiUtil.skipParenthesizedExprUp(element.getParent()); if (parent instanceof PsiVariable) { if (PsiUtil.checkSameExpression(element, ((PsiVariable)parent).getInitializer())) { PsiTypeElement typeElement = ((PsiVariable)parent).getTypeElement(); if (typeElement != null && typeElement.isInferredType()) { return null; } return ((PsiVariable)parent).getType(); } } else if (parent instanceof PsiAssignmentExpression) { if (PsiUtil.checkSameExpression(element, ((PsiAssignmentExpression)parent).getRExpression())) { PsiType type = ((PsiAssignmentExpression)parent).getLExpression().getType(); return !PsiType.NULL.equals(type) ? type : null; } } else if (parent instanceof PsiReturnStatement) { final PsiElement psiElement = PsiTreeUtil.getParentOfType(parent, PsiLambdaExpression.class, PsiMethod.class); if (psiElement instanceof PsiLambdaExpression) { return null; } else if (psiElement instanceof PsiMethod){ return ((PsiMethod)psiElement).getReturnType(); } } else if (PsiUtil.isCondition(element, parent)) { return PsiType.BOOLEAN; } else if (parent instanceof PsiArrayInitializerExpression) { final PsiElement gParent = parent.getParent(); if (gParent instanceof PsiNewExpression) { final PsiType type = ((PsiNewExpression)gParent).getType(); if (type instanceof PsiArrayType) { return ((PsiArrayType)type).getComponentType(); } } else if (gParent instanceof PsiVariable) { final PsiType type = ((PsiVariable)gParent).getType(); if (type instanceof PsiArrayType) { return ((PsiArrayType)type).getComponentType(); } } else if (gParent instanceof PsiArrayInitializerExpression) { final PsiType expectedTypeByParent = getExpectedTypeByParent(parent); return expectedTypeByParent instanceof PsiArrayType ? ((PsiArrayType)expectedTypeByParent).getComponentType() : null; } } return null; } /** * Returns the return type for enclosing method or lambda * * @param element element inside method or lambda to determine the return type of * @return the return type or null if cannot be determined */ @Nullable public static PsiType getMethodReturnType(@NotNull PsiElement element) { final PsiElement methodOrLambda = PsiTreeUtil.getParentOfType(element, PsiMethod.class, PsiLambdaExpression.class); return methodOrLambda instanceof PsiMethod ? ((PsiMethod)methodOrLambda).getReturnType() : methodOrLambda instanceof PsiLambdaExpression ? LambdaUtil.getFunctionalInterfaceReturnType((PsiLambdaExpression)methodOrLambda) : null; } public static boolean compareTypes(PsiType leftType, PsiType rightType, boolean ignoreEllipsis) { if (ignoreEllipsis) { if (leftType instanceof PsiEllipsisType) { leftType = ((PsiEllipsisType)leftType).toArrayType(); } if (rightType instanceof PsiEllipsisType) { rightType = ((PsiEllipsisType)rightType).toArrayType(); } } return Comparing.equal(leftType, rightType); } /** * @deprecated not compliant to specification, use {@link PsiTypesUtil#isDenotableType(PsiType, PsiElement)} instead */ @Deprecated public static boolean isDenotableType(@Nullable PsiType type) { return !(type instanceof PsiWildcardType || type instanceof PsiCapturedWildcardType); } /** * @param context in which type should be checked * @return false if type is null or has no explicit canonical type representation (e. g. intersection type) */ public static boolean isDenotableType(@Nullable PsiType type, @NotNull PsiElement context) { if (type == null || type instanceof PsiWildcardType) return false; PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(context.getProject()); try { PsiType typeAfterReplacement = elementFactory.createTypeElementFromText(type.getCanonicalText(), context).getType(); return type.equals(typeAfterReplacement); } catch (IncorrectOperationException e) { return false; } } public static boolean hasUnresolvedComponents(@NotNull PsiType type) { return type.accept(new PsiTypeVisitor<Boolean>() { @Nullable @Override public Boolean visitClassType(@NotNull PsiClassType classType) { PsiClassType.ClassResolveResult resolveResult = classType.resolveGenerics(); final PsiClass psiClass = resolveResult.getElement(); if (psiClass == null) { return true; } PsiSubstitutor substitutor = resolveResult.getSubstitutor(); for (PsiTypeParameter param : PsiUtil.typeParametersIterable(psiClass)) { PsiType psiType = substitutor.substitute(param); if (psiType != null && psiType.accept(this)) { return true; } } return super.visitClassType(classType); } @Nullable @Override public Boolean visitArrayType(@NotNull PsiArrayType arrayType) { return arrayType.getComponentType().accept(this); } @NotNull @Override public Boolean visitWildcardType(@NotNull PsiWildcardType wildcardType) { final PsiType bound = wildcardType.getBound(); return bound != null && bound.accept(this); } @Override public Boolean visitType(@NotNull PsiType type) { return false; } }); } @NotNull public static PsiType getParameterType(PsiParameter @NotNull [] parameters, int i, boolean varargs) { final PsiParameter parameter = parameters[i < parameters.length ? i : parameters.length - 1]; PsiType parameterType = parameter.getType(); if (parameterType instanceof PsiEllipsisType && varargs) { parameterType = ((PsiEllipsisType)parameterType).getComponentType(); } if (!parameterType.isValid()) { PsiUtil.ensureValidType(parameterType, "Invalid type of parameter " + parameter + " of " + parameter.getClass()); } return parameterType; } public static PsiTypeParameter @NotNull [] filterUnusedTypeParameters(PsiTypeParameter @NotNull [] typeParameters, PsiType @NotNull ... types) { if (typeParameters.length == 0) return PsiTypeParameter.EMPTY_ARRAY; TypeParameterSearcher searcher = new TypeParameterSearcher(); for (PsiType type : types) { type.accept(searcher); } return searcher.getTypeParameters().toArray(PsiTypeParameter.EMPTY_ARRAY); } public static PsiTypeParameter @NotNull [] filterUnusedTypeParameters(@NotNull PsiType superReturnTypeInBaseClassType, PsiTypeParameter @NotNull [] typeParameters) { return filterUnusedTypeParameters(typeParameters, superReturnTypeInBaseClassType); } private static boolean isAccessibleAt(@NotNull PsiTypeParameter parameter, @NotNull PsiElement context) { PsiTypeParameterListOwner owner = parameter.getOwner(); if(owner instanceof PsiMethod) { return PsiTreeUtil.isAncestor(owner, context, false); } if(owner instanceof PsiClass) { return PsiTreeUtil.isAncestor(owner, context, false) && InheritanceUtil.hasEnclosingInstanceInScope((PsiClass)owner, context, false, false); } return false; } public static boolean allTypeParametersResolved(@NotNull PsiElement context, @NotNull PsiType targetType) { TypeParameterSearcher searcher = new TypeParameterSearcher(); targetType.accept(searcher); Set<PsiTypeParameter> parameters = searcher.getTypeParameters(); return parameters.stream().allMatch(parameter -> isAccessibleAt(parameter, context)); } @NotNull public static PsiType createArrayType(@NotNull PsiType newType, int arrayDim) { for(int i = 0; i < arrayDim; i++){ newType = newType.createArrayType(); } return newType; } /** * @return null if type can't be explicitly specified */ @Nullable public static PsiTypeElement replaceWithExplicitType(PsiTypeElement typeElement) { PsiType type = typeElement.getType(); if (!isDenotableType(type, typeElement)) { return null; } Project project = typeElement.getProject(); PsiTypeElement typeElementByExplicitType = JavaPsiFacade.getElementFactory(project).createTypeElement(type); PsiElement explicitTypeElement = typeElement.replace(typeElementByExplicitType); explicitTypeElement = JavaCodeStyleManager.getInstance(project).shortenClassReferences(explicitTypeElement); return (PsiTypeElement)CodeStyleManager.getInstance(project).reformat(explicitTypeElement); } public static PsiType getTypeByMethod(@NotNull PsiElement context, PsiExpressionList argumentList, PsiElement parentMethod, boolean varargs, PsiSubstitutor substitutor, boolean inferParent) { if (parentMethod instanceof PsiMethod) { final PsiParameter[] parameters = ((PsiMethod)parentMethod).getParameterList().getParameters(); if (parameters.length == 0) return null; final PsiExpression[] args = argumentList.getExpressions(); if (!((PsiMethod)parentMethod).isVarArgs() && parameters.length != args.length && !inferParent) return null; PsiElement arg = context; while (arg.getParent() instanceof PsiParenthesizedExpression) { arg = arg.getParent(); } final int i = ArrayUtilRt.find(args, arg); if (i < 0) return null; final PsiType parameterType = substitutor != null ? substitutor.substitute(getParameterType(parameters, i, varargs)) : null; final boolean isRaw = substitutor != null && PsiUtil.isRawSubstitutor((PsiMethod)parentMethod, substitutor); return isRaw ? TypeConversionUtil.erasure(parameterType) : parameterType; } return null; } /** * Checks if {@code type} mentions type parameters from the passed {@code Set} * Implicit type arguments of types based on inner classes of generic outer classes are explicitly checked */ public static boolean mentionsTypeParameters(@Nullable PsiType type, Set<PsiTypeParameter> typeParameters) { return mentionsTypeParametersOrUnboundedWildcard(type, typeParameters, false); } /** * Checks if {@code resolveResult} depicts unchecked method call */ public static boolean isUncheckedCall(JavaResolveResult resolveResult) { final PsiElement element = resolveResult.getElement(); if (element instanceof PsiMethod) { PsiMethod method = (PsiMethod)element; PsiSubstitutor substitutor = resolveResult.getSubstitutor(); if (PsiUtil.isRawSubstitutor(method, substitutor)) { Set<PsiTypeParameter> typeParameters = new HashSet<>(substitutor.getSubstitutionMap().keySet()); Arrays.stream(method.getTypeParameters()).forEach(typeParameters::remove); return Arrays.stream(method.getParameterList().getParameters()) .anyMatch(parameter -> mentionsTypeParametersOrUnboundedWildcard(parameter.getType(), typeParameters, true)); } } return false; } private static boolean mentionsTypeParametersOrUnboundedWildcard(@Nullable PsiType type, Set<PsiTypeParameter> typeParameters, boolean acceptUnboundedWildcard) { if (type == null) return false; return type.accept(new PsiTypeVisitor<Boolean>() { @Override public Boolean visitType(@NotNull PsiType type) { return false; } @Override public Boolean visitWildcardType(@NotNull PsiWildcardType wildcardType) { final PsiType bound = wildcardType.getBound(); if (bound != null) { return bound.accept(this); } return acceptUnboundedWildcard; } @Override public Boolean visitClassType(@NotNull PsiClassType classType) { PsiClassType.ClassResolveResult result = classType.resolveGenerics(); final PsiClass psiClass = result.getElement(); if (psiClass != null) { PsiSubstitutor substitutor = result.getSubstitutor(); for (PsiTypeParameter parameter : PsiUtil.typeParametersIterable(psiClass)) { PsiType type = substitutor.substitute(parameter); if (type != null && type.accept(this)) return true; } } return psiClass instanceof PsiTypeParameter && typeParameters.contains(psiClass); } @Override public Boolean visitIntersectionType(@NotNull PsiIntersectionType intersectionType) { for (PsiType conjunct : intersectionType.getConjuncts()) { if (conjunct.accept(this)) return true; } return false; } @Override public Boolean visitMethodReferenceType(@NotNull PsiMethodReferenceType methodReferenceType) { return false; } @Override public Boolean visitLambdaExpressionType(@NotNull PsiLambdaExpressionType lambdaExpressionType) { return false; } @Override public Boolean visitArrayType(@NotNull PsiArrayType arrayType) { return arrayType.getComponentType().accept(this); } }); } public static class TypeParameterSearcher extends PsiTypeVisitor<Boolean> { private final Set<PsiTypeParameter> myTypeParams = new HashSet<>(); @NotNull public Set<PsiTypeParameter> getTypeParameters() { return myTypeParams; } @Override public Boolean visitType(@NotNull final PsiType type) { return false; } @Override public Boolean visitArrayType(@NotNull final PsiArrayType arrayType) { return arrayType.getComponentType().accept(this); } @Override public Boolean visitClassType(@NotNull final PsiClassType classType) { PsiClassType.ClassResolveResult resolveResult = classType.resolveGenerics(); final PsiClass aClass = resolveResult.getElement(); if (aClass instanceof PsiTypeParameter) { myTypeParams.add((PsiTypeParameter)aClass); } if (aClass != null) { PsiSubstitutor substitutor = resolveResult.getSubstitutor(); for (final PsiTypeParameter parameter : PsiUtil.typeParametersIterable(aClass)) { PsiType psiType = substitutor.substitute(parameter); if (psiType != null) { psiType.accept(this); } } } return false; } @Override public Boolean visitWildcardType(@NotNull final PsiWildcardType wildcardType) { final PsiType bound = wildcardType.getBound(); if (bound != null) { bound.accept(this); } return false; } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.client; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequences; import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.HttpResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.client.selector.ConnectionCountServerSelectorStrategy; import io.druid.client.selector.HighestPriorityTierSelectorStrategy; import io.druid.client.selector.QueryableDruidServer; import io.druid.client.selector.ServerSelector; import io.druid.jackson.DefaultObjectMapper; import io.druid.query.Druids; import io.druid.query.QueryInterruptedException; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.ReflectionQueryToolChestWarehouse; import io.druid.query.Result; import io.druid.query.timeboundary.TimeBoundaryQuery; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.Capture; import org.easymock.EasyMock; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.timeout.ReadTimeoutException; import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.net.URL; import java.util.HashMap; import java.util.List; public class DirectDruidClientTest { @Test public void testRun() throws Exception { HttpClient httpClient = EasyMock.createMock(HttpClient.class); final URL url = new URL("http://foo/druid/v2/"); SettableFuture<InputStream> futureResult = SettableFuture.create(); Capture<Request> capturedRequest = EasyMock.newCapture(); EasyMock.expect( httpClient.go( EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject() ) ) .andReturn(futureResult) .times(1); SettableFuture futureException = SettableFuture.create(); EasyMock.expect( httpClient.go( EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject() ) ) .andReturn(futureException) .times(1); EasyMock.expect( httpClient.go( EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject() ) ) .andReturn(SettableFuture.create()) .atLeastOnce(); EasyMock.replay(httpClient); final ServerSelector serverSelector = new ServerSelector( new DataSegment( "test", new Interval("2013-01-01/2013-01-02"), new DateTime("2013-01-01").toString(), Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, 0L ), new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()) ); DirectDruidClient client1 = new DirectDruidClient( new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "foo", new NoopServiceEmitter() ); DirectDruidClient client2 = new DirectDruidClient( new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "foo2", new NoopServiceEmitter() ); QueryableDruidServer queryableDruidServer1 = new QueryableDruidServer( new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0), client1 ); serverSelector.addServerAndUpdateSegment(queryableDruidServer1, serverSelector.getSegment()); QueryableDruidServer queryableDruidServer2 = new QueryableDruidServer( new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0), client2 ); serverSelector.addServerAndUpdateSegment(queryableDruidServer2, serverSelector.getSegment()); TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build(); HashMap<String, List> context = Maps.newHashMap(); Sequence s1 = client1.run(query, context); Assert.assertTrue(capturedRequest.hasCaptured()); Assert.assertEquals(url, capturedRequest.getValue().getUrl()); Assert.assertEquals(HttpMethod.POST, capturedRequest.getValue().getMethod()); Assert.assertEquals(1, client1.getNumOpenConnections()); // simulate read timeout Sequence s2 = client1.run(query, context); Assert.assertEquals(2, client1.getNumOpenConnections()); futureException.setException(new ReadTimeoutException()); Assert.assertEquals(1, client1.getNumOpenConnections()); // subsequent connections should work Sequence s3 = client1.run(query, context); Sequence s4 = client1.run(query, context); Sequence s5 = client1.run(query, context); Assert.assertTrue(client1.getNumOpenConnections() == 4); // produce result for first connection futureResult.set(new ByteArrayInputStream("[{\"timestamp\":\"2014-01-01T01:02:03Z\", \"result\": 42.0}]".getBytes())); List<Result> results = Sequences.toList(s1, Lists.<Result>newArrayList()); Assert.assertEquals(1, results.size()); Assert.assertEquals(new DateTime("2014-01-01T01:02:03Z"), results.get(0).getTimestamp()); Assert.assertEquals(3, client1.getNumOpenConnections()); client2.run(query, context); client2.run(query, context); Assert.assertTrue(client2.getNumOpenConnections() == 2); Assert.assertTrue(serverSelector.pick() == queryableDruidServer2); EasyMock.verify(httpClient); } @Test public void testCancel() throws Exception { HttpClient httpClient = EasyMock.createStrictMock(HttpClient.class); Capture<Request> capturedRequest = EasyMock.newCapture(); ListenableFuture<Object> cancelledFuture = Futures.immediateCancelledFuture(); SettableFuture<Object> cancellationFuture = SettableFuture.create(); EasyMock.expect( httpClient.go( EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject() ) ) .andReturn(cancelledFuture) .once(); EasyMock.expect( httpClient.go( EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject() ) ) .andReturn(cancellationFuture) .once(); EasyMock.replay(httpClient); final ServerSelector serverSelector = new ServerSelector( new DataSegment( "test", new Interval("2013-01-01/2013-01-02"), new DateTime("2013-01-01").toString(), Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, 0L ), new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()) ); DirectDruidClient client1 = new DirectDruidClient( new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "foo", new NoopServiceEmitter() ); QueryableDruidServer queryableDruidServer1 = new QueryableDruidServer( new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0), client1 ); serverSelector.addServerAndUpdateSegment(queryableDruidServer1, serverSelector.getSegment()); TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build(); HashMap<String, List> context = Maps.newHashMap(); cancellationFuture.set(new StatusResponseHolder(HttpResponseStatus.OK, new StringBuilder("cancelled"))); Sequence results = client1.run(query, context); Assert.assertEquals(HttpMethod.DELETE, capturedRequest.getValue().getMethod()); Assert.assertEquals(0, client1.getNumOpenConnections()); QueryInterruptedException exception = null; try { Sequences.toList(results, Lists.newArrayList()); } catch (QueryInterruptedException e) { exception = e; } Assert.assertNotNull(exception); EasyMock.verify(httpClient); } @Test public void testQueryInterruptionExceptionLogMessage() throws JsonProcessingException { HttpClient httpClient = EasyMock.createMock(HttpClient.class); SettableFuture<Object> interruptionFuture = SettableFuture.create(); Capture<Request> capturedRequest = EasyMock.newCapture(); String hostName = "localhost:8080"; EasyMock.expect( httpClient.go( EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject() ) ) .andReturn(interruptionFuture) .anyTimes(); EasyMock.replay(httpClient); DataSegment dataSegment = new DataSegment( "test", new Interval("2013-01-01/2013-01-02"), new DateTime("2013-01-01").toString(), Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, 0L ); final ServerSelector serverSelector = new ServerSelector( dataSegment , new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()) ); DirectDruidClient client1 = new DirectDruidClient( new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, hostName, new NoopServiceEmitter() ); QueryableDruidServer queryableDruidServer = new QueryableDruidServer( new DruidServer("test1", hostName, 0, "historical", DruidServer.DEFAULT_TIER, 0), client1 ); serverSelector.addServerAndUpdateSegment(queryableDruidServer, dataSegment); TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build(); HashMap<String, List> context = Maps.newHashMap(); interruptionFuture.set(new ByteArrayInputStream("{\"error\":\"testing\"}".getBytes())); Sequence results = client1.run(query, context); QueryInterruptedException actualException = null; try { Sequences.toList(results, Lists.newArrayList()); } catch (QueryInterruptedException e) { actualException = e; } Assert.assertNotNull(actualException); Assert.assertEquals(actualException.getMessage(), QueryInterruptedException.UNKNOWN_EXCEPTION); Assert.assertEquals(actualException.getCauseMessage(), "testing"); Assert.assertEquals(actualException.getHost(), hostName); EasyMock.verify(httpClient); } }